^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (C) 2017 Marvell
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Antoine Tenart <antoine.tenart@free-electrons.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/unaligned.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <linux/device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/dmapool.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/authenc.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/chacha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <crypto/ctr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <crypto/internal/des.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <crypto/gcm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <crypto/ghash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <crypto/poly1305.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <crypto/sm3.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <crypto/sm4.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <crypto/xts.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #include "safexcel.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) enum safexcel_cipher_direction {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) SAFEXCEL_ENCRYPT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) SAFEXCEL_DECRYPT,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) enum safexcel_cipher_alg {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) SAFEXCEL_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) SAFEXCEL_3DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) SAFEXCEL_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) SAFEXCEL_CHACHA20,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) SAFEXCEL_SM4,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) struct safexcel_cipher_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) struct safexcel_context base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) struct safexcel_crypto_priv *priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) u32 mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) enum safexcel_cipher_alg alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) u8 aadskip;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) u8 blocksz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) u32 ivmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) u32 ctrinit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) __le32 key[16];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) u32 nonce;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) unsigned int key_len, xts;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) /* All the below is AEAD specific */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) u32 hash_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) u32 state_sz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) struct crypto_cipher *hkaes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) struct crypto_aead *fback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) struct safexcel_cipher_req {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) enum safexcel_cipher_direction direction;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) /* Number of result descriptors associated to the request */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) unsigned int rdescs;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) bool needs_inv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) int nr_src, nr_dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) struct safexcel_command_desc *cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) /* 32 bit nonce */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) cdesc->control_data.token[0] = ctx->nonce;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) /* 64 bit IV part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) memcpy(&cdesc->control_data.token[1], iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) /* 32 bit counter, start at 0 or 1 (big endian!) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) cdesc->control_data.token[3] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) (__force u32)cpu_to_be32(ctx->ctrinit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) return 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) if (ctx->alg == SAFEXCEL_CHACHA20) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) /* 96 bit nonce part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) memcpy(&cdesc->control_data.token[0], &iv[4], 12);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) /* 32 bit counter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) cdesc->control_data.token[3] = *(u32 *)iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) return 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) cdesc->control_data.options |= ctx->ivmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) memcpy(cdesc->control_data.token, iv, ctx->blocksz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) return ctx->blocksz / sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) struct safexcel_command_desc *cdesc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) struct safexcel_token *atoken,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) u32 length)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) struct safexcel_token *token;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) int ivlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) if (ivlen == 4) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) /* No space in cdesc, instruction moves to atoken */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) cdesc->additional_cdata_size = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) token = atoken;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) /* Everything fits in cdesc */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) token = (struct safexcel_token *)(cdesc->control_data.token + 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) /* Need to pad with NOP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) eip197_noop_token(&token[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) token->packet_length = length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) token->instructions = EIP197_TOKEN_INS_LAST |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) EIP197_TOKEN_INS_TYPE_CRYPTO |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) EIP197_TOKEN_INS_TYPE_OUTPUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) struct safexcel_command_desc *cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) /* 32 bit nonce */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) cdesc->control_data.token[0] = ctx->nonce;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) /* 64 bit IV part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) memcpy(&cdesc->control_data.token[1], iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) /* 32 bit counter, start at 0 or 1 (big endian!) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) cdesc->control_data.token[3] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) (__force u32)cpu_to_be32(ctx->ctrinit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) /* 96 bit IV part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) memcpy(&cdesc->control_data.token[0], iv, 12);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) /* 32 bit counter, start at 0 or 1 (big endian!) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) cdesc->control_data.token[3] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) (__force u32)cpu_to_be32(ctx->ctrinit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) /* CBC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) memcpy(cdesc->control_data.token, iv, ctx->blocksz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) struct safexcel_command_desc *cdesc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) struct safexcel_token *atoken,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) enum safexcel_cipher_direction direction,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) u32 cryptlen, u32 assoclen, u32 digestsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) struct safexcel_token *aadref;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) int atoksize = 2; /* Start with minimum size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) int assocadj = assoclen - ctx->aadskip, aadalign;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) /* Always 4 dwords of embedded IV for AEAD modes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) if (direction == SAFEXCEL_DECRYPT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) cryptlen -= digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) /* Construct IV block B0 for the CBC-MAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) u8 *final_iv = (u8 *)cdesc->control_data.token;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) u8 *cbcmaciv = (u8 *)&atoken[1];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) __le32 *aadlen = (__le32 *)&atoken[5];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) /* Length + nonce */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) cdesc->control_data.token[0] = ctx->nonce;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) /* Fixup flags byte */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) *(__le32 *)cbcmaciv =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) cpu_to_le32(ctx->nonce |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) ((assocadj > 0) << 6) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) ((digestsize - 2) << 2));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) /* 64 bit IV part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) memcpy(&cdesc->control_data.token[1], iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) memcpy(cbcmaciv + 4, iv, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) /* Start counter at 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) cdesc->control_data.token[3] = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) /* Message length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) /* Variable length IV part */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) memcpy(final_iv, iv, 15 - iv[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) memcpy(cbcmaciv, iv, 15 - iv[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) /* Start variable length counter at 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) /* fixup flags byte */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) cbcmaciv[0] |= ((assocadj > 0) << 6) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) ((digestsize - 2) << 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) /* insert lower 2 bytes of message length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) cbcmaciv[14] = cryptlen >> 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) cbcmaciv[15] = cryptlen & 255;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) atoken->packet_length = AES_BLOCK_SIZE +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) ((assocadj > 0) << 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) if (likely(assocadj)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) *aadlen = cpu_to_le32((assocadj >> 8) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) (assocadj & 255) << 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) atoken += 6;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) atoksize += 7;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) atoken += 5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) atoksize += 6;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) /* Process AAD data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) aadref = atoken;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) atoken->packet_length = assocadj;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) /* For CCM only, align AAD data towards hash engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) aadalign = (assocadj + 2) & 15;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) atoken->packet_length = assocadj && aadalign ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 16 - aadalign :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) if (likely(cryptlen)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) atoken->instructions = EIP197_TOKEN_INS_LAST |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) safexcel_aead_iv(ctx, iv, cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) /* Process AAD data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) aadref = atoken;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) atoken->packet_length = assocadj;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) atoken->instructions = EIP197_TOKEN_INS_LAST |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) /* For ESP mode (and not GMAC), skip over the IV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) atoken->instructions = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) atoksize++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) direction == SAFEXCEL_DECRYPT)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) /* Poly-chacha decryption needs a dummy NOP here ... */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) atoken->packet_length = 16; /* According to Op Manual */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) atoken->instructions = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) atoksize++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) if (ctx->xcm) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) /* For GCM and CCM, obtain enc(Y0) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) atoken->packet_length = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) atoken->instructions = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) atoken->packet_length = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) EIP197_TOKEN_INS_TYPE_CRYPTO;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) atoksize += 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) /* Fixup stat field for AAD direction instruction */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) aadref->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) /* Process crypto data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) atoken->packet_length = cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) /* Fixup instruction field for AAD dir instruction */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) /* Do not send to crypt engine in case of GMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) atoken->instructions = EIP197_TOKEN_INS_LAST |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) EIP197_TOKEN_INS_TYPE_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) EIP197_TOKEN_INS_TYPE_OUTPUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) atoken->instructions = EIP197_TOKEN_INS_LAST |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) EIP197_TOKEN_INS_TYPE_CRYPTO |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) EIP197_TOKEN_INS_TYPE_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) EIP197_TOKEN_INS_TYPE_OUTPUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) cryptlen &= 15;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) atoken->stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) /* For CCM only, pad crypto data to the hash engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) atoksize++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) atoken->packet_length = 16 - cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) atoksize++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) if (direction == SAFEXCEL_ENCRYPT) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) /* Append ICV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) atoken->packet_length = digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) EIP197_TOKEN_STAT_LAST_PACKET;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) /* Extract ICV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) atoken->packet_length = digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) EIP197_TOKEN_STAT_LAST_PACKET;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) atoken++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) atoksize++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) /* Verify ICV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) atoken->packet_length = digestsize |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) EIP197_TOKEN_HASH_RESULT_VERIFY;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) EIP197_TOKEN_STAT_LAST_PACKET;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) /* Fixup length of the token in the command descriptor */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) cdesc->additional_cdata_size = atoksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) for (i = 0; i < len / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) for (i = 0; i < len / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) ctx->key_len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) struct crypto_authenc_keys keys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) int err = -EINVAL, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) const char *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) /* Must have at least space for the nonce here */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) /* last 4 bytes of key are the nonce! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) CTR_RFC3686_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) /* exclude the nonce here */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) /* Encryption key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) switch (ctx->alg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) case SAFEXCEL_DES:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) if (unlikely(err))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) case SAFEXCEL_3DES:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) if (unlikely(err))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) case SAFEXCEL_AES:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) if (unlikely(err))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) case SAFEXCEL_SM4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) if (le32_to_cpu(ctx->key[i]) !=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) ((u32 *)keys.enckey)[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) /* Auth key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) switch (ctx->hash_alg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) alg = "safexcel-sha1";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) alg = "safexcel-sha224";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) alg = "safexcel-sha256";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) alg = "safexcel-sha384";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) alg = "safexcel-sha512";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) alg = "safexcel-sm3";
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) dev_err(priv->dev, "aead: unsupported hash algorithm\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) alg, ctx->state_sz))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) /* Now copy the keys into the context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) ctx->key_len = keys.enckeylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) memzero_explicit(&keys, sizeof(keys));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) badkey:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) memzero_explicit(&keys, sizeof(keys));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) struct safexcel_command_desc *cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) int ctrl_size = ctx->key_len / sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) cdesc->control_data.control1 = ctx->mode;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) if (ctx->aead) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) /* Take in account the ipad+opad digests */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) if (ctx->xcm) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) ctrl_size += ctx->state_sz / sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) cdesc->control_data.control0 =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) CONTEXT_CONTROL_KEY_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) CONTEXT_CONTROL_DIGEST_XCM |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) ctx->hash_alg |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) CONTEXT_CONTROL_SIZE(ctrl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) } else if (ctx->alg == SAFEXCEL_CHACHA20) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) /* Chacha20-Poly1305 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) cdesc->control_data.control0 =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) CONTEXT_CONTROL_KEY_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) (sreq->direction == SAFEXCEL_ENCRYPT ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) ctx->hash_alg |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) CONTEXT_CONTROL_SIZE(ctrl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) ctrl_size += ctx->state_sz / sizeof(u32) * 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) cdesc->control_data.control0 =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) CONTEXT_CONTROL_KEY_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) CONTEXT_CONTROL_DIGEST_HMAC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) ctx->hash_alg |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) CONTEXT_CONTROL_SIZE(ctrl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) if (sreq->direction == SAFEXCEL_ENCRYPT &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) (ctx->xcm == EIP197_XCM_MODE_CCM ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) else if (sreq->direction == SAFEXCEL_ENCRYPT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) else if (ctx->xcm == EIP197_XCM_MODE_CCM)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) if (sreq->direction == SAFEXCEL_ENCRYPT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) cdesc->control_data.control0 =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) CONTEXT_CONTROL_KEY_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) CONTEXT_CONTROL_SIZE(ctrl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) cdesc->control_data.control0 =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) CONTEXT_CONTROL_TYPE_CRYPTO_IN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) CONTEXT_CONTROL_KEY_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) CONTEXT_CONTROL_SIZE(ctrl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) if (ctx->alg == SAFEXCEL_DES) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) CONTEXT_CONTROL_CRYPTO_ALG_DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) } else if (ctx->alg == SAFEXCEL_3DES) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) CONTEXT_CONTROL_CRYPTO_ALG_3DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) } else if (ctx->alg == SAFEXCEL_AES) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) switch (ctx->key_len >> ctx->xts) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) case AES_KEYSIZE_128:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) CONTEXT_CONTROL_CRYPTO_ALG_AES128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) case AES_KEYSIZE_192:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) CONTEXT_CONTROL_CRYPTO_ALG_AES192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) case AES_KEYSIZE_256:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) CONTEXT_CONTROL_CRYPTO_ALG_AES256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) dev_err(priv->dev, "aes keysize not supported: %u\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) ctx->key_len >> ctx->xts);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) } else if (ctx->alg == SAFEXCEL_CHACHA20) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) } else if (ctx->alg == SAFEXCEL_SM4) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) CONTEXT_CONTROL_CRYPTO_ALG_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) struct scatterlist *src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) struct scatterlist *dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) unsigned int cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) struct skcipher_request *areq = skcipher_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) struct safexcel_result_desc *rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) int ndesc = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) *ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) if (unlikely(!sreq->rdescs))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) while (sreq->rdescs--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) dev_err(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) "cipher: result: could not retrieve the result descriptor\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) *ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) if (likely(!*ret))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) *ret = safexcel_rdesc_check_errors(priv, rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) ndesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) safexcel_complete(priv, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) if (src == dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) * Update IV in req from last crypto output word for CBC modes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) (sreq->direction == SAFEXCEL_ENCRYPT)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) /* For encrypt take the last output word */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) crypto_skcipher_ivsize(skcipher),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) (cryptlen -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) crypto_skcipher_ivsize(skcipher)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) *should_complete = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) return ndesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) static int safexcel_send_req(struct crypto_async_request *base, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) struct scatterlist *src, struct scatterlist *dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) unsigned int cryptlen, unsigned int assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) unsigned int digestsize, u8 *iv, int *commands,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) struct skcipher_request *areq = skcipher_request_cast(base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) struct safexcel_command_desc *cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) struct safexcel_command_desc *first_cdesc = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) struct scatterlist *sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) unsigned int totlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) unsigned int totlen_src = cryptlen + assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) unsigned int totlen_dst = totlen_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) struct safexcel_token *atoken;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) int n_cdesc = 0, n_rdesc = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) int queued, i, ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) bool first = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) sreq->nr_src = sg_nents_for_len(src, totlen_src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) if (ctx->aead) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) * AEAD has auth tag appended to output for encrypt and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) * removed from the output for decrypt!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) if (sreq->direction == SAFEXCEL_DECRYPT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) totlen_dst -= digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) totlen_dst += digestsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) &ctx->base.ipad, ctx->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) if (!ctx->xcm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) memcpy(ctx->base.ctxr->data + (ctx->key_len +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) ctx->state_sz) / sizeof(u32), &ctx->base.opad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) ctx->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) (sreq->direction == SAFEXCEL_DECRYPT)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) * Save IV from last crypto input word for CBC modes in decrypt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) * direction. Need to do this first in case of inplace operation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) * as it will be overwritten.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) crypto_skcipher_ivsize(skcipher),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) (totlen_src -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) crypto_skcipher_ivsize(skcipher)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) * Remember actual input length, source buffer length may be
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) * updated in case of inline operation below.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) totlen = totlen_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) queued = totlen_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) if (src == dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731) sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) sreq->nr_dst = sreq->nr_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) if (unlikely((totlen_src || totlen_dst) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734) (sreq->nr_src <= 0))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) max(totlen_src, totlen_dst));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) totlen_src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748) if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) totlen_dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751) dma_unmap_sg(priv->dev, src, sreq->nr_src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755) dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) if (!totlen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762) * The EIP97 cannot deal with zero length input packets!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) * So stuff a dummy command descriptor indicating a 1 byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) * (dummy) input packet, using the context record as source.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766) first_cdesc = safexcel_add_cdesc(priv, ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) 1, 1, ctx->base.ctxr_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) 1, 1, ctx->base.ctxr_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) &atoken);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) if (IS_ERR(first_cdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) /* No space left in the command descriptor ring */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) ret = PTR_ERR(first_cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773) goto cdesc_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775) n_cdesc = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) goto skip_cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) /* command descriptors */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) for_each_sg(src, sg, sreq->nr_src, i) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) int len = sg_dma_len(sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783) /* Do not overflow the request */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) if (queued < len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) len = queued;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) !(queued - len),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) sg_dma_address(sg), len, totlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790) ctx->base.ctxr_dma, &atoken);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) if (IS_ERR(cdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792) /* No space left in the command descriptor ring */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793) ret = PTR_ERR(cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) goto cdesc_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797) if (!n_cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798) first_cdesc = cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800) n_cdesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801) queued -= len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) if (!queued)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) skip_cdesc:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806) /* Add context control words and token to first command descriptor */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) safexcel_context_control(ctx, base, sreq, first_cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808) if (ctx->aead)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 809) safexcel_aead_token(ctx, iv, first_cdesc, atoken,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 810) sreq->direction, cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 811) assoclen, digestsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 812) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 813) safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 814) cryptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 815)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 816) /* result descriptors */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 817) for_each_sg(dst, sg, sreq->nr_dst, i) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 818) bool last = (i == sreq->nr_dst - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 819) u32 len = sg_dma_len(sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 820)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 821) /* only allow the part of the buffer we know we need */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 822) if (len > totlen_dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 823) len = totlen_dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 824) if (unlikely(!len))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 825) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 826) totlen_dst -= len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 827)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 828) /* skip over AAD space in buffer - not written */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 829) if (assoclen) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 830) if (assoclen >= len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 831) assoclen -= len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 832) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 833) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 834) rdesc = safexcel_add_rdesc(priv, ring, first, last,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 835) sg_dma_address(sg) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 836) assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 837) len - assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 838) assoclen = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 839) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 840) rdesc = safexcel_add_rdesc(priv, ring, first, last,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 841) sg_dma_address(sg),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 842) len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 843) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 844) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 845) /* No space left in the result descriptor ring */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 846) ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 847) goto rdesc_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 848) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 849) if (first) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 850) first_rdesc = rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 851) first = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 852) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 853) n_rdesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 854) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 855)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 856) if (unlikely(first)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 857) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 858) * Special case: AEAD decrypt with only AAD data.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 859) * In this case there is NO output data from the engine,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 860) * but the engine still needs a result descriptor!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 861) * Create a dummy one just for catching the result token.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 862) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 863) rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 864) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 865) /* No space left in the result descriptor ring */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 866) ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 867) goto rdesc_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 868) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 869) first_rdesc = rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 870) n_rdesc = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 871) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 872)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 873) safexcel_rdr_req_set(priv, ring, first_rdesc, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 874)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 875) *commands = n_cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 876) *results = n_rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 877) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 878)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 879) rdesc_rollback:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 880) for (i = 0; i < n_rdesc; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 881) safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 882) cdesc_rollback:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 883) for (i = 0; i < n_cdesc; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 884) safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 885)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 886) if (src == dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 887) dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 888) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 889) dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 890) dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 891) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 892)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 893) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 894) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 895)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 896) static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 897) int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 898) struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 899) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 900) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 901) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 902) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 903) struct safexcel_result_desc *rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 904) int ndesc = 0, enq_ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 905)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 906) *ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 907)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 908) if (unlikely(!sreq->rdescs))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 909) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 910)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 911) while (sreq->rdescs--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 912) rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 913) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 914) dev_err(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 915) "cipher: invalidate: could not retrieve the result descriptor\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 916) *ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 917) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 918) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 919)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 920) if (likely(!*ret))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 921) *ret = safexcel_rdesc_check_errors(priv, rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 922)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 923) ndesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 924) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 925)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 926) safexcel_complete(priv, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 927)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 928) if (ctx->base.exit_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 929) dma_pool_free(priv->context_pool, ctx->base.ctxr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 930) ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 931)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 932) *should_complete = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 933)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 934) return ndesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 935) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 936)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 937) ring = safexcel_select_ring(priv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 938) ctx->base.ring = ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 939)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 940) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 941) enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 942) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 943)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 944) if (enq_ret != -EINPROGRESS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 945) *ret = enq_ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 946)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 947) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 948) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 949)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 950) *should_complete = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 951)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 952) return ndesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 953) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 954)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 955) static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 956) int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 957) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 958) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 959) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 960) struct skcipher_request *req = skcipher_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 961) struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 962) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 963)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 964) if (sreq->needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 965) sreq->needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 966) err = safexcel_handle_inv_result(priv, ring, async, sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 967) should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 968) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 969) err = safexcel_handle_req_result(priv, ring, async, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 970) req->dst, req->cryptlen, sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 971) should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 972) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 973)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 974) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 975) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 976)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 977) static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 978) int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 979) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 980) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 981) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 982) struct aead_request *req = aead_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 983) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 984) struct safexcel_cipher_req *sreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 985) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 986)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 987) if (sreq->needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 988) sreq->needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 989) err = safexcel_handle_inv_result(priv, ring, async, sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 990) should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 991) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 992) err = safexcel_handle_req_result(priv, ring, async, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 993) req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 994) req->cryptlen + crypto_aead_authsize(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 995) sreq, should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 996) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 997)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 998) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 999) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1000)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1001) static int safexcel_cipher_send_inv(struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1002) int ring, int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1003) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1004) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1005) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1006) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1007)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1008) ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1009) if (unlikely(ret))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1010) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1011)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1012) *commands = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1013) *results = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1014)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1015) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1016) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1017)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1018) static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1019) int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1020) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1021) struct skcipher_request *req = skcipher_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1022) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1023) struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1024) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1025) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1026)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1027) BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1028)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1029) if (sreq->needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1030) ret = safexcel_cipher_send_inv(async, ring, commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1031) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1032) struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1033) u8 input_iv[AES_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1034)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1035) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1036) * Save input IV in case of CBC decrypt mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1037) * Will be overwritten with output IV prior to use!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1038) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1039) memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1040)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1041) ret = safexcel_send_req(async, ring, sreq, req->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1042) req->dst, req->cryptlen, 0, 0, input_iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1043) commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1044) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1045)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1046) sreq->rdescs = *results;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1047) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1048) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1049)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1050) static int safexcel_aead_send(struct crypto_async_request *async, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1051) int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1052) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1053) struct aead_request *req = aead_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1054) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1055) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1056) struct safexcel_cipher_req *sreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1057) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1058) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1059)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1060) BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1061)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1062) if (sreq->needs_inv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1063) ret = safexcel_cipher_send_inv(async, ring, commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1064) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1065) ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1066) req->cryptlen, req->assoclen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1067) crypto_aead_authsize(tfm), req->iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1068) commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1069) sreq->rdescs = *results;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1070) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1071) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1072)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1073) static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1074) struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1075) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1076) struct safexcel_inv_result *result)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1077) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1078) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1079) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1080) int ring = ctx->base.ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1081)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1082) init_completion(&result->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1083)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1084) ctx = crypto_tfm_ctx(base->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1085) ctx->base.exit_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1086) sreq->needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1087)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1088) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1089) crypto_enqueue_request(&priv->ring[ring].queue, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1090) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1091)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1092) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1093) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1094)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1095) wait_for_completion(&result->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1096)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1097) if (result->error) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1098) dev_warn(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1099) "cipher: sync: invalidate: completion error %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1100) result->error);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1101) return result->error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1102) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1104) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1105) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1107) static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1108) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1109) EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1110) struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1111) struct safexcel_inv_result result = {};
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1113) memset(req, 0, sizeof(struct skcipher_request));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1115) skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1116) safexcel_inv_complete, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1117) skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1119) return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1120) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1122) static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1123) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1124) EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1125) struct safexcel_cipher_req *sreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1126) struct safexcel_inv_result result = {};
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1128) memset(req, 0, sizeof(struct aead_request));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1129)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1130) aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1131) safexcel_inv_complete, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1132) aead_request_set_tfm(req, __crypto_aead_cast(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1134) return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1135) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1137) static int safexcel_queue_req(struct crypto_async_request *base,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1138) struct safexcel_cipher_req *sreq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1139) enum safexcel_cipher_direction dir)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1140) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1141) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1142) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1143) int ret, ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1145) sreq->needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1146) sreq->direction = dir;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1148) if (ctx->base.ctxr) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1149) if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1150) sreq->needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1151) ctx->base.needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1152) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1153) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1154) ctx->base.ring = safexcel_select_ring(priv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1155) ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1156) EIP197_GFP_FLAGS(*base),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1157) &ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1158) if (!ctx->base.ctxr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1159) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1160) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1162) ring = ctx->base.ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1164) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1165) ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1166) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1168) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1169) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1171) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1172) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1174) static int safexcel_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1175) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1176) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1177) SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1178) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1180) static int safexcel_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1181) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1182) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1183) SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1184) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1186) static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1187) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1188) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1189) struct safexcel_alg_template *tmpl =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1190) container_of(tfm->__crt_alg, struct safexcel_alg_template,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1191) alg.skcipher.base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1193) crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1194) sizeof(struct safexcel_cipher_req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1195)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1196) ctx->base.priv = tmpl->priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1197)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1198) ctx->base.send = safexcel_skcipher_send;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1199) ctx->base.handle_result = safexcel_skcipher_handle_result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1200) ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1201) ctx->ctrinit = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1202) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1203) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1204)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1205) static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1206) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1207) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1209) memzero_explicit(ctx->key, sizeof(ctx->key));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1211) /* context not allocated, skip invalidation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1212) if (!ctx->base.ctxr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1213) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1214)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1215) memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1216) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1217) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1218)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1219) static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1220) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1221) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1222) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1223) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1225) if (safexcel_cipher_cra_exit(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1226) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1227)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1228) if (priv->flags & EIP197_TRC_CACHE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1229) ret = safexcel_skcipher_exit_inv(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1230) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1231) dev_warn(priv->dev, "skcipher: invalidation error %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1232) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1233) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1234) dma_pool_free(priv->context_pool, ctx->base.ctxr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1235) ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1237) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1239) static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1240) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1241) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1242) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1243) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1245) if (safexcel_cipher_cra_exit(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1246) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1248) if (priv->flags & EIP197_TRC_CACHE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1249) ret = safexcel_aead_exit_inv(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1250) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1251) dev_warn(priv->dev, "aead: invalidation error %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1252) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1253) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1254) dma_pool_free(priv->context_pool, ctx->base.ctxr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1255) ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1256) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1257) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1258)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1259) static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1260) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1261) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1262)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1263) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1264) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1265) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1266) ctx->blocksz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1267) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1268) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1269) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1271) struct safexcel_alg_template safexcel_alg_ecb_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1272) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1273) .algo_mask = SAFEXCEL_ALG_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1274) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1275) .setkey = safexcel_skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1276) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1277) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1278) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1279) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1280) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1281) .cra_name = "ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1282) .cra_driver_name = "safexcel-ecb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1283) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1284) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1285) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1286) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1287) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1288) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1289) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1290) .cra_init = safexcel_skcipher_aes_ecb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1291) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1292) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1293) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1294) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1295) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1297) static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1298) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1299) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1300)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1301) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1302) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1303) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1304) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1305) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1306) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1307)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1308) struct safexcel_alg_template safexcel_alg_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1309) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1310) .algo_mask = SAFEXCEL_ALG_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1311) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1312) .setkey = safexcel_skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1313) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1314) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1315) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1316) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1317) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1318) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1319) .cra_name = "cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1320) .cra_driver_name = "safexcel-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1321) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1322) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1323) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1324) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1325) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1326) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1327) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1328) .cra_init = safexcel_skcipher_aes_cbc_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1329) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1330) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1331) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1332) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1333) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1334)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1335) static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1336) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1337) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1338)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1339) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1340) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1341) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1342) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1343) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1344) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1346) struct safexcel_alg_template safexcel_alg_cfb_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1347) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1348) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1349) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1350) .setkey = safexcel_skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1351) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1352) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1353) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1354) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1355) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1356) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1357) .cra_name = "cfb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1358) .cra_driver_name = "safexcel-cfb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1359) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1360) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1361) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1362) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1363) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1364) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1365) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1366) .cra_init = safexcel_skcipher_aes_cfb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1367) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1368) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1369) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1370) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1371) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1372)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1373) static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1374) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1375) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1376)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1377) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1378) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1379) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1380) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1381) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1382) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1384) struct safexcel_alg_template safexcel_alg_ofb_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1385) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1386) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1387) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1388) .setkey = safexcel_skcipher_aes_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1389) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1390) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1391) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1392) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1393) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1394) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1395) .cra_name = "ofb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1396) .cra_driver_name = "safexcel-ofb-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1397) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1398) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1399) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1400) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1401) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1402) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1403) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1404) .cra_init = safexcel_skcipher_aes_ofb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1405) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1406) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1407) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1408) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1409) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1411) static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1412) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1413) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1414) struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1415) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1416) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1417) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1418) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1419) unsigned int keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1420)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1421) /* last 4 bytes of key are the nonce! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1422) ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1423) /* exclude the nonce here */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1424) keylen = len - CTR_RFC3686_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1425) ret = aes_expandkey(&aes, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1426) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1427) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1428)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1429) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1430) for (i = 0; i < keylen / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1431) if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1432) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1433) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1434) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1435) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1436) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1437)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1438) for (i = 0; i < keylen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1439) ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1440)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1441) ctx->key_len = keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1442)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1443) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1444) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1445) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1446)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1447) static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1448) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1449) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1450)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1451) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1452) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1453) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1454) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1455) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1456) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1457)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1458) struct safexcel_alg_template safexcel_alg_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1459) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1460) .algo_mask = SAFEXCEL_ALG_AES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1461) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1462) .setkey = safexcel_skcipher_aesctr_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1463) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1464) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1465) /* Add nonce size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1466) .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1467) .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1468) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1469) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1470) .cra_name = "rfc3686(ctr(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1471) .cra_driver_name = "safexcel-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1472) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1473) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1474) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1475) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1476) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1477) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1478) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1479) .cra_init = safexcel_skcipher_aes_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1480) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1481) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1482) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1483) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1484) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1485)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1486) static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1487) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1488) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1489) struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1490) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1491) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1492)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1493) ret = verify_skcipher_des_key(ctfm, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1494) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1495) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1496)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1497) /* if context exits and key changed, need to invalidate it */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1498) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1499) if (memcmp(ctx->key, key, len))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1500) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1502) memcpy(ctx->key, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1503) ctx->key_len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1504)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1505) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1506) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1507)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1508) static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1509) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1510) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1511)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1512) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1513) ctx->alg = SAFEXCEL_DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1514) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1515) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1516) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1517) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1518) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1519)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1520) struct safexcel_alg_template safexcel_alg_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1521) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1522) .algo_mask = SAFEXCEL_ALG_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1523) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1524) .setkey = safexcel_des_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1525) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1526) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1527) .min_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1528) .max_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1529) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1530) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1531) .cra_name = "cbc(des)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1532) .cra_driver_name = "safexcel-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1533) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1534) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1535) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1536) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1537) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1538) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1539) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1540) .cra_init = safexcel_skcipher_des_cbc_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1541) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1542) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1543) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1544) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1545) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1547) static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1548) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1549) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1550)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1551) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1552) ctx->alg = SAFEXCEL_DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1553) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1554) ctx->blocksz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1555) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1556) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1557) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1558)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1559) struct safexcel_alg_template safexcel_alg_ecb_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1560) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1561) .algo_mask = SAFEXCEL_ALG_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1562) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1563) .setkey = safexcel_des_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1564) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1565) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1566) .min_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1567) .max_keysize = DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1568) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1569) .cra_name = "ecb(des)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1570) .cra_driver_name = "safexcel-ecb-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1571) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1572) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1573) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1574) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1575) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1576) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1577) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1578) .cra_init = safexcel_skcipher_des_ecb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1579) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1580) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1581) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1582) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1583) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1584)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1585) static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1586) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1587) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1588) struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1589) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1590) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1591)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1592) err = verify_skcipher_des3_key(ctfm, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1593) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1594) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1595)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1596) /* if context exits and key changed, need to invalidate it */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1597) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1598) if (memcmp(ctx->key, key, len))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1599) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1600)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1601) memcpy(ctx->key, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1602) ctx->key_len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1603)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1604) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1605) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1606)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1607) static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1608) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1609) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1610)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1611) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1612) ctx->alg = SAFEXCEL_3DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1613) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1614) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1615) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1616) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1617) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1618)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1619) struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1620) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1621) .algo_mask = SAFEXCEL_ALG_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1622) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1623) .setkey = safexcel_des3_ede_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1624) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1625) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1626) .min_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1627) .max_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1628) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1629) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1630) .cra_name = "cbc(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1631) .cra_driver_name = "safexcel-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1632) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1633) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1634) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1635) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1636) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1637) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1638) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1639) .cra_init = safexcel_skcipher_des3_cbc_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1640) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1641) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1642) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1643) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1644) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1645)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1646) static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1647) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1648) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1649)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1650) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1651) ctx->alg = SAFEXCEL_3DES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1652) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1653) ctx->blocksz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1654) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1655) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1656) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1657)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1658) struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1659) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1660) .algo_mask = SAFEXCEL_ALG_DES,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1661) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1662) .setkey = safexcel_des3_ede_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1663) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1664) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1665) .min_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1666) .max_keysize = DES3_EDE_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1667) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1668) .cra_name = "ecb(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1669) .cra_driver_name = "safexcel-ecb-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1670) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1671) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1672) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1673) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1674) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1675) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1676) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1677) .cra_init = safexcel_skcipher_des3_ecb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1678) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1679) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1680) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1681) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1682) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1683)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1684) static int safexcel_aead_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1685) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1686) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1687)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1688) return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1689) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1690)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1691) static int safexcel_aead_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1692) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1693) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1694)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1695) return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1696) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1697)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1698) static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1699) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1700) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1701) struct safexcel_alg_template *tmpl =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1702) container_of(tfm->__crt_alg, struct safexcel_alg_template,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1703) alg.aead.base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1704)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1705) crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1706) sizeof(struct safexcel_cipher_req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1707)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1708) ctx->base.priv = tmpl->priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1710) ctx->alg = SAFEXCEL_AES; /* default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1711) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1712) ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1713) ctx->ctrinit = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1714) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1715) ctx->aead = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1716) ctx->base.send = safexcel_aead_send;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1717) ctx->base.handle_result = safexcel_aead_handle_result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1718) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1719) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1721) static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1722) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1723) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1724)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1725) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1726) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1727) ctx->state_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1728) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1729) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1730)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1731) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1732) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1733) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1734) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1735) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1736) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1737) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1738) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1739) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1740) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1741) .cra_name = "authenc(hmac(sha1),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1742) .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1743) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1744) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1745) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1746) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1747) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1748) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1749) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1750) .cra_init = safexcel_aead_sha1_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1751) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1752) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1753) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1754) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1755) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1756)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1757) static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1758) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1759) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1760)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1761) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1762) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1763) ctx->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1764) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1765) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1766)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1767) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1768) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1769) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1770) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1771) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1772) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1773) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1774) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1775) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1776) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1777) .cra_name = "authenc(hmac(sha256),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1778) .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1779) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1780) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1781) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1782) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1783) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1784) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1785) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1786) .cra_init = safexcel_aead_sha256_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1787) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1788) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1789) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1790) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1791) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1792)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1793) static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1794) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1795) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1796)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1797) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1798) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1799) ctx->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1800) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1801) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1802)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1803) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1804) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1805) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1806) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1807) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1808) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1809) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1810) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1811) .maxauthsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1812) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1813) .cra_name = "authenc(hmac(sha224),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1814) .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1815) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1816) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1817) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1818) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1819) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1820) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1821) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1822) .cra_init = safexcel_aead_sha224_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1823) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1824) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1825) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1826) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1827) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1828)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1829) static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1830) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1831) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1832)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1833) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1834) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1835) ctx->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1836) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1837) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1838)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1839) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1840) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1841) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1842) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1843) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1844) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1845) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1846) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1847) .maxauthsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1848) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1849) .cra_name = "authenc(hmac(sha512),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1850) .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1851) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1852) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1853) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1854) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1855) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1856) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1857) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1858) .cra_init = safexcel_aead_sha512_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1859) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1860) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1861) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1862) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1863) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1864)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1865) static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1866) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1867) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1868)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1869) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1870) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1871) ctx->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1872) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1873) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1874)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1875) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1876) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1877) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1878) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1879) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1880) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1881) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1882) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1883) .maxauthsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1884) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1885) .cra_name = "authenc(hmac(sha384),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1886) .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1887) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1888) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1889) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1890) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1891) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1892) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1893) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1894) .cra_init = safexcel_aead_sha384_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1895) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1896) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1897) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1898) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1899) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1900)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1901) static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1902) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1903) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1904)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1905) safexcel_aead_sha1_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1906) ctx->alg = SAFEXCEL_3DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1907) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1908) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1909) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1910) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1911)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1912) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1913) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1914) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1915) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1916) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1917) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1918) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1919) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1920) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1921) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1922) .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1923) .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1924) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1925) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1926) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1927) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1928) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1929) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1930) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1931) .cra_init = safexcel_aead_sha1_des3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1932) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1933) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1934) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1935) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1936) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1937)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1938) static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1939) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1940) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1941)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1942) safexcel_aead_sha256_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1943) ctx->alg = SAFEXCEL_3DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1944) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1945) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1946) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1947) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1948)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1949) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1950) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1951) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1952) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1953) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1954) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1955) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1956) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1957) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1958) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1959) .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1960) .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1961) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1962) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1963) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1964) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1965) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1966) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1967) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1968) .cra_init = safexcel_aead_sha256_des3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1969) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1970) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1971) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1972) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1973) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1974)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1975) static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1976) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1977) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1978)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1979) safexcel_aead_sha224_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1980) ctx->alg = SAFEXCEL_3DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1981) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1982) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1983) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1984) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1985)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1986) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1987) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1988) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1989) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1990) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1991) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1992) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1993) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1994) .maxauthsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1995) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1996) .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1997) .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1998) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1999) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2000) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2001) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2002) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2003) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2004) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2005) .cra_init = safexcel_aead_sha224_des3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2006) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2007) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2008) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2009) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2010) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2011)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2012) static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2013) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2014) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2015)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2016) safexcel_aead_sha512_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2017) ctx->alg = SAFEXCEL_3DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2018) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2019) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2020) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2021) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2022)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2023) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2024) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2025) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2026) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2027) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2028) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2029) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2030) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2031) .maxauthsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2032) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2033) .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2034) .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2035) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2036) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2037) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2038) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2039) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2040) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2041) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2042) .cra_init = safexcel_aead_sha512_des3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2043) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2044) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2045) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2046) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2047) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2048)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2049) static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2050) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2051) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2052)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2053) safexcel_aead_sha384_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2054) ctx->alg = SAFEXCEL_3DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2055) ctx->blocksz = DES3_EDE_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2056) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2057) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2058) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2059)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2060) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2061) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2062) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2063) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2064) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2065) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2066) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2067) .ivsize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2068) .maxauthsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2069) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2070) .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2071) .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2072) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2073) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2074) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2075) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2076) .cra_blocksize = DES3_EDE_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2077) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2078) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2079) .cra_init = safexcel_aead_sha384_des3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2080) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2081) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2082) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2083) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2084) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2085)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2086) static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2087) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2088) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2089)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2090) safexcel_aead_sha1_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2091) ctx->alg = SAFEXCEL_DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2092) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2093) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2094) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2095) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2096)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2097) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2098) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2099) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2100) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2101) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2102) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2103) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2104) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2105) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2106) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2107) .cra_name = "authenc(hmac(sha1),cbc(des))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2108) .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2109) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2110) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2111) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2112) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2113) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2114) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2115) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2116) .cra_init = safexcel_aead_sha1_des_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2117) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2118) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2119) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2120) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2121) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2123) static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2124) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2125) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2127) safexcel_aead_sha256_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2128) ctx->alg = SAFEXCEL_DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2129) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2130) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2131) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2132) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2134) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2135) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2136) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2137) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2138) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2139) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2140) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2141) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2142) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2143) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2144) .cra_name = "authenc(hmac(sha256),cbc(des))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2145) .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2146) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2147) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2148) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2149) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2150) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2151) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2152) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2153) .cra_init = safexcel_aead_sha256_des_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2154) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2155) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2156) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2157) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2158) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2160) static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2161) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2162) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2164) safexcel_aead_sha224_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2165) ctx->alg = SAFEXCEL_DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2166) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2167) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2168) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2169) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2171) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2172) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2173) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2174) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2175) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2176) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2177) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2178) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2179) .maxauthsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2180) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2181) .cra_name = "authenc(hmac(sha224),cbc(des))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2182) .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2183) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2184) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2185) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2186) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2187) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2188) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2189) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2190) .cra_init = safexcel_aead_sha224_des_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2191) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2192) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2193) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2194) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2195) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2197) static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2198) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2199) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2201) safexcel_aead_sha512_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2202) ctx->alg = SAFEXCEL_DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2203) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2204) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2205) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2206) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2208) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2209) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2210) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2211) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2212) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2213) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2214) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2215) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2216) .maxauthsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2217) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2218) .cra_name = "authenc(hmac(sha512),cbc(des))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2219) .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2220) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2221) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2222) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2223) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2224) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2225) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2226) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2227) .cra_init = safexcel_aead_sha512_des_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2228) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2229) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2230) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2231) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2232) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2233)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2234) static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2235) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2236) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2237)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2238) safexcel_aead_sha384_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2239) ctx->alg = SAFEXCEL_DES; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2240) ctx->blocksz = DES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2241) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2242) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2243) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2245) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2246) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2247) .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2248) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2249) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2250) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2251) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2252) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2253) .maxauthsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2254) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2255) .cra_name = "authenc(hmac(sha384),cbc(des))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2256) .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2257) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2258) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2259) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2260) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2261) .cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2262) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2263) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2264) .cra_init = safexcel_aead_sha384_des_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2265) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2266) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2267) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2268) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2269) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2271) static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2272) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2273) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2274)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2275) safexcel_aead_sha1_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2276) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2277) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2278) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2279)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2280) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2281) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2282) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2283) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2284) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2285) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2286) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2287) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2288) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2289) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2290) .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2291) .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2292) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2293) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2294) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2295) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2296) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2297) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2298) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2299) .cra_init = safexcel_aead_sha1_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2300) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2301) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2302) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2303) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2304) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2306) static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2307) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2308) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2310) safexcel_aead_sha256_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2311) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2312) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2313) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2314)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2315) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2316) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2317) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2318) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2319) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2320) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2321) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2322) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2323) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2324) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2325) .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2326) .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2327) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2328) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2329) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2330) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2331) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2332) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2333) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2334) .cra_init = safexcel_aead_sha256_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2335) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2336) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2337) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2338) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2339) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2341) static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2342) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2343) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2344)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2345) safexcel_aead_sha224_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2346) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2347) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2348) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2349)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2350) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2351) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2352) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2353) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2354) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2355) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2356) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2357) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2358) .maxauthsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2359) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2360) .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2361) .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2362) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2363) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2364) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2365) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2366) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2367) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2368) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2369) .cra_init = safexcel_aead_sha224_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2370) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2371) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2372) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2373) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2374) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2375)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2376) static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2377) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2378) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2380) safexcel_aead_sha512_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2381) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2382) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2383) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2384)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2385) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2386) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2387) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2388) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2389) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2390) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2391) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2392) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2393) .maxauthsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2394) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2395) .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2396) .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2397) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2398) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2399) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2400) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2401) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2402) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2403) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2404) .cra_init = safexcel_aead_sha512_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2405) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2406) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2407) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2408) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2409) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2411) static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2412) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2413) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2415) safexcel_aead_sha384_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2416) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2417) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2418) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2420) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2421) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2422) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2423) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2424) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2425) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2426) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2427) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2428) .maxauthsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2429) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2430) .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2431) .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2432) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2433) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2434) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2435) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2436) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2437) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2438) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2439) .cra_init = safexcel_aead_sha384_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2440) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2441) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2442) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2443) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2444) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2446) static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2447) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2448) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2449) struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2450) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2451) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2452) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2453) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2454) unsigned int keylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2456) /* Check for illegal XTS keys */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2457) ret = xts_verify_key(ctfm, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2458) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2459) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2461) /* Only half of the key data is cipher key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2462) keylen = (len >> 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2463) ret = aes_expandkey(&aes, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2464) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2465) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2466)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2467) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2468) for (i = 0; i < keylen / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2469) if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2470) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2471) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2472) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2473) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2474) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2475)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2476) for (i = 0; i < keylen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2477) ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2478)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2479) /* The other half is the tweak key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2480) ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2481) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2482) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2484) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2485) for (i = 0; i < keylen / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2486) if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2487) aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2488) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2489) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2490) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2491) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2492) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2493)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2494) for (i = 0; i < keylen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2495) ctx->key[i + keylen / sizeof(u32)] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2496) cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2497)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2498) ctx->key_len = keylen << 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2500) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2501) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2502) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2503)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2504) static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2505) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2506) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2507)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2508) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2509) ctx->alg = SAFEXCEL_AES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2510) ctx->blocksz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2511) ctx->xts = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2512) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2513) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2514) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2515)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2516) static int safexcel_encrypt_xts(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2517) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2518) if (req->cryptlen < XTS_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2519) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2520) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2521) SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2522) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2523)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2524) static int safexcel_decrypt_xts(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2525) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2526) if (req->cryptlen < XTS_BLOCK_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2527) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2528) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2529) SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2530) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2531)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2532) struct safexcel_alg_template safexcel_alg_xts_aes = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2533) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2534) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2535) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2536) .setkey = safexcel_skcipher_aesxts_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2537) .encrypt = safexcel_encrypt_xts,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2538) .decrypt = safexcel_decrypt_xts,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2539) /* XTS actually uses 2 AES keys glued together */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2540) .min_keysize = AES_MIN_KEY_SIZE * 2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2541) .max_keysize = AES_MAX_KEY_SIZE * 2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2542) .ivsize = XTS_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2543) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2544) .cra_name = "xts(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2545) .cra_driver_name = "safexcel-xts-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2546) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2547) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2548) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2549) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2550) .cra_blocksize = XTS_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2551) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2552) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2553) .cra_init = safexcel_skcipher_aes_xts_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2554) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2555) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2556) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2557) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2558) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2559)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2560) static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2561) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2562) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2563) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2564) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2565) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2566) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2567) u32 hashkey[AES_BLOCK_SIZE >> 2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2568) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2569)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2570) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2571) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2572) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2573) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2574) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2576) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2577) for (i = 0; i < len / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2578) if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2579) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2580) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2581) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2582) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2583) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2584)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2585) for (i = 0; i < len / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2586) ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2587)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2588) ctx->key_len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2589)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2590) /* Compute hash key by encrypting zeroes with cipher key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2591) crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2592) crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2593) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2594) ret = crypto_cipher_setkey(ctx->hkaes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2595) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2596) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2597)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2598) memset(hashkey, 0, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2599) crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2600)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2601) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2602) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2603) if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2604) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2605) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2606) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2607) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2608) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2609)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2610) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2611) ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2612)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2613) memzero_explicit(hashkey, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2614) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2615) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2616) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2617)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2618) static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2619) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2620) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2621)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2622) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2623) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2624) ctx->state_sz = GHASH_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2625) ctx->xcm = EIP197_XCM_MODE_GCM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2626) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2627)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2628) ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2629) return PTR_ERR_OR_ZERO(ctx->hkaes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2630) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2631)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2632) static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2633) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2634) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2635)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2636) crypto_free_cipher(ctx->hkaes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2637) safexcel_aead_cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2638) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2639)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2640) static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2641) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2642) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2643) return crypto_gcm_check_authsize(authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2644) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2645)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2646) struct safexcel_alg_template safexcel_alg_gcm = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2647) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2648) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2649) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2650) .setkey = safexcel_aead_gcm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2651) .setauthsize = safexcel_aead_gcm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2652) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2653) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2654) .ivsize = GCM_AES_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2655) .maxauthsize = GHASH_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2656) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2657) .cra_name = "gcm(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2658) .cra_driver_name = "safexcel-gcm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2659) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2660) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2661) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2662) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2663) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2664) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2665) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2666) .cra_init = safexcel_aead_gcm_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2667) .cra_exit = safexcel_aead_gcm_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2668) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2669) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2670) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2671) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2672)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2673) static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2674) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2675) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2676) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2677) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2678) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2679) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2680) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2681)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2682) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2683) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2684) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2685) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2686) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2687)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2688) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2689) for (i = 0; i < len / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2690) if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2691) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2692) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2693) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2694) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2695) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2696)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2697) for (i = 0; i < len / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2698) ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2699) ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2700) cpu_to_be32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2701) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2702)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2703) ctx->key_len = len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2704) ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2705)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2706) if (len == AES_KEYSIZE_192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2707) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2708) else if (len == AES_KEYSIZE_256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2709) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2710) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2711) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2712)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2713) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2714) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2715) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2716)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2717) static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2718) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2719) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2721) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2722) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2723) ctx->state_sz = 3 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2724) ctx->xcm = EIP197_XCM_MODE_CCM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2725) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2726) ctx->ctrinit = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2727) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2728) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2729)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2730) static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2731) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2732) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2733) /* Borrowed from crypto/ccm.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2734) switch (authsize) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2735) case 4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2736) case 6:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2737) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2738) case 10:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2739) case 12:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2740) case 14:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2741) case 16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2742) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2743) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2744) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2745) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2746)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2747) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2748) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2749)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2750) static int safexcel_ccm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2751) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2752) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2753)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2754) if (req->iv[0] < 1 || req->iv[0] > 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2755) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2756)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2757) return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2758) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2759)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2760) static int safexcel_ccm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2761) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2762) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2763)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2764) if (req->iv[0] < 1 || req->iv[0] > 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2765) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2766)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2767) return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2768) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2769)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2770) struct safexcel_alg_template safexcel_alg_ccm = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2771) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2772) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2773) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2774) .setkey = safexcel_aead_ccm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2775) .setauthsize = safexcel_aead_ccm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2776) .encrypt = safexcel_ccm_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2777) .decrypt = safexcel_ccm_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2778) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2779) .maxauthsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2780) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2781) .cra_name = "ccm(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2782) .cra_driver_name = "safexcel-ccm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2783) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2784) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2785) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2786) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2787) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2788) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2789) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2790) .cra_init = safexcel_aead_ccm_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2791) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2792) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2793) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2794) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2795) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2796)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2797) static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2798) const u8 *key)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2799) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2800) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2802) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2803) if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2804) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2805)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2806) memcpy(ctx->key, key, CHACHA_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2807) ctx->key_len = CHACHA_KEY_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2808) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2809)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2810) static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2811) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2812) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2813) struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2814)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2815) if (len != CHACHA_KEY_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2816) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2817)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2818) safexcel_chacha20_setkey(ctx, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2819)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2820) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2821) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2822)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2823) static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2824) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2825) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2826)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2827) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2828) ctx->alg = SAFEXCEL_CHACHA20;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2829) ctx->ctrinit = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2830) ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2831) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2832) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2833)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2834) struct safexcel_alg_template safexcel_alg_chacha20 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2835) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2836) .algo_mask = SAFEXCEL_ALG_CHACHA20,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2837) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2838) .setkey = safexcel_skcipher_chacha20_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2839) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2840) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2841) .min_keysize = CHACHA_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2842) .max_keysize = CHACHA_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2843) .ivsize = CHACHA_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2844) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2845) .cra_name = "chacha20",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2846) .cra_driver_name = "safexcel-chacha20",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2847) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2848) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2849) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2850) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2851) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2852) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2853) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2854) .cra_init = safexcel_skcipher_chacha20_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2855) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2856) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2857) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2858) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2859) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2860)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2861) static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2862) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2863) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2864) struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2865)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2866) if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2867) len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2868) /* ESP variant has nonce appended to key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2869) len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2870) ctx->nonce = *(u32 *)(key + len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2871) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2872) if (len != CHACHA_KEY_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2873) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2874)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2875) safexcel_chacha20_setkey(ctx, key);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2876)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2877) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2878) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2879)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2880) static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2881) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2882) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2883) if (authsize != POLY1305_DIGEST_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2884) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2885) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2886) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2887)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2888) static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2889) enum safexcel_cipher_direction dir)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2890) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2891) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2892) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2893) struct crypto_tfm *tfm = crypto_aead_tfm(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2894) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2895) struct aead_request *subreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2896) u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2897) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2898)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2899) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2900) * Instead of wasting time detecting umpteen silly corner cases,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2901) * just dump all "small" requests to the fallback implementation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2902) * HW would not be faster on such small requests anyway.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2903) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2904) if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2905) req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2906) req->cryptlen > POLY1305_DIGEST_SIZE)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2907) return safexcel_queue_req(&req->base, creq, dir);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2908) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2909)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2910) /* HW cannot do full (AAD+payload) zero length, use fallback */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2911) memcpy(key, ctx->key, CHACHA_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2912) if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2913) /* ESP variant has nonce appended to the key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2914) key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2915) ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2916) CHACHA_KEY_SIZE +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2917) EIP197_AEAD_IPSEC_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2918) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2919) ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2920) CHACHA_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2921) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2922) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2923) crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2924) crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2925) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2926) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2927) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2928)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2929) aead_request_set_tfm(subreq, ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2930) aead_request_set_callback(subreq, req->base.flags, req->base.complete,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2931) req->base.data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2932) aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2933) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2934) aead_request_set_ad(subreq, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2935)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2936) return (dir == SAFEXCEL_ENCRYPT) ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2937) crypto_aead_encrypt(subreq) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2938) crypto_aead_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2939) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2940)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2941) static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2942) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2943) return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2944) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2945)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2946) static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2947) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2948) return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2949) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2950)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2951) static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2952) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2953) struct crypto_aead *aead = __crypto_aead_cast(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2954) struct aead_alg *alg = crypto_aead_alg(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2955) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2956)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2957) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2958)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2959) /* Allocate fallback implementation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2960) ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2961) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2962) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2963) if (IS_ERR(ctx->fback))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2964) return PTR_ERR(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2965)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2966) crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2967) sizeof(struct aead_request) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2968) crypto_aead_reqsize(ctx->fback)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2969)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2970) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2971) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2972)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2973) static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2974) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2975) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2976)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2977) safexcel_aead_fallback_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2978) ctx->alg = SAFEXCEL_CHACHA20;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2979) ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2980) CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2981) ctx->ctrinit = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2982) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2983) ctx->state_sz = 0; /* Precomputed by HW */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2984) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2985) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2986)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2987) static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2988) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2989) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2990)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2991) crypto_free_aead(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2992) safexcel_aead_cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2993) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2994)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2995) struct safexcel_alg_template safexcel_alg_chachapoly = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2996) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2997) .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2998) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2999) .setkey = safexcel_aead_chachapoly_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3000) .setauthsize = safexcel_aead_chachapoly_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3001) .encrypt = safexcel_aead_chachapoly_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3002) .decrypt = safexcel_aead_chachapoly_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3003) .ivsize = CHACHAPOLY_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3004) .maxauthsize = POLY1305_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3005) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3006) .cra_name = "rfc7539(chacha20,poly1305)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3007) .cra_driver_name = "safexcel-chacha20-poly1305",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3008) /* +1 to put it above HW chacha + SW poly */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3009) .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3010) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3011) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3012) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3013) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3014) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3015) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3016) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3017) .cra_init = safexcel_aead_chachapoly_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3018) .cra_exit = safexcel_aead_fallback_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3019) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3020) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3021) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3022) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3023)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3024) static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3025) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3026) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3027) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3028)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3029) ret = safexcel_aead_chachapoly_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3030) ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3031) ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3032) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3033) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3034)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3035) struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3036) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3037) .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3038) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3039) .setkey = safexcel_aead_chachapoly_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3040) .setauthsize = safexcel_aead_chachapoly_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3041) .encrypt = safexcel_aead_chachapoly_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3042) .decrypt = safexcel_aead_chachapoly_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3043) .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3044) .maxauthsize = POLY1305_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3045) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3046) .cra_name = "rfc7539esp(chacha20,poly1305)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3047) .cra_driver_name = "safexcel-chacha20-poly1305-esp",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3048) /* +1 to put it above HW chacha + SW poly */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3049) .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3050) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3051) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3052) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3053) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3054) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3055) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3056) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3057) .cra_init = safexcel_aead_chachapolyesp_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3058) .cra_exit = safexcel_aead_fallback_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3059) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3060) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3061) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3062) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3063)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3064) static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3065) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3066) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3067) struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3068) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3069) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3070)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3071) if (len != SM4_KEY_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3072) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3073)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3074) if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3075) if (memcmp(ctx->key, key, SM4_KEY_SIZE))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3076) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3077)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3078) memcpy(ctx->key, key, SM4_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3079) ctx->key_len = SM4_KEY_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3080)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3081) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3082) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3083)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3084) static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3085) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3086) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3087) if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3088) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3089) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3090) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3091) SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3092) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3093)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3094) static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3095) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3096) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3097) if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3098) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3099) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3100) return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3101) SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3102) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3104) static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3105) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3106) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3108) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3109) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3110) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3111) ctx->blocksz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3112) ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3113) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3114) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3116) struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3117) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3118) .algo_mask = SAFEXCEL_ALG_SM4,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3119) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3120) .setkey = safexcel_skcipher_sm4_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3121) .encrypt = safexcel_sm4_blk_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3122) .decrypt = safexcel_sm4_blk_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3123) .min_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3124) .max_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3125) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3126) .cra_name = "ecb(sm4)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3127) .cra_driver_name = "safexcel-ecb-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3128) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3129) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3130) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3131) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3132) .cra_blocksize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3133) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3134) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3135) .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3136) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3137) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3138) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3139) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3140) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3142) static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3143) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3144) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3145)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3146) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3147) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3148) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3149) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3150) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3151) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3153) struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3154) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3155) .algo_mask = SAFEXCEL_ALG_SM4,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3156) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3157) .setkey = safexcel_skcipher_sm4_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3158) .encrypt = safexcel_sm4_blk_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3159) .decrypt = safexcel_sm4_blk_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3160) .min_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3161) .max_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3162) .ivsize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3163) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3164) .cra_name = "cbc(sm4)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3165) .cra_driver_name = "safexcel-cbc-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3166) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3167) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3168) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3169) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3170) .cra_blocksize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3171) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3172) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3173) .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3174) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3175) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3176) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3177) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3178) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3180) static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3181) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3182) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3183)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3184) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3185) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3186) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3187) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3188) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3189) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3190)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3191) struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3192) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3193) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3194) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3195) .setkey = safexcel_skcipher_sm4_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3196) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3197) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3198) .min_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3199) .max_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3200) .ivsize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3201) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3202) .cra_name = "ofb(sm4)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3203) .cra_driver_name = "safexcel-ofb-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3204) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3205) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3206) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3207) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3208) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3209) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3210) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3211) .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3212) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3213) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3214) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3215) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3216) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3218) static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3219) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3220) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3222) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3223) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3224) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3225) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3226) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3227) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3228)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3229) struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3230) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3231) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3232) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3233) .setkey = safexcel_skcipher_sm4_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3234) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3235) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3236) .min_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3237) .max_keysize = SM4_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3238) .ivsize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3239) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3240) .cra_name = "cfb(sm4)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3241) .cra_driver_name = "safexcel-cfb-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3242) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3243) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3244) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3245) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3246) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3247) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3248) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3249) .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3250) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3251) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3252) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3253) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3254) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3255)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3256) static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3257) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3258) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3259) struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3260) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3261)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3262) /* last 4 bytes of key are the nonce! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3263) ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3264) /* exclude the nonce here */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3265) len -= CTR_RFC3686_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3266)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3267) return safexcel_skcipher_sm4_setkey(ctfm, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3268) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3269)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3270) static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3271) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3272) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3274) safexcel_skcipher_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3275) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3276) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3277) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3278) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3279) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3280)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3281) struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3282) .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3283) .algo_mask = SAFEXCEL_ALG_SM4,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3284) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3285) .setkey = safexcel_skcipher_sm4ctr_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3286) .encrypt = safexcel_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3287) .decrypt = safexcel_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3288) /* Add nonce size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3289) .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3290) .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3291) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3292) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3293) .cra_name = "rfc3686(ctr(sm4))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3294) .cra_driver_name = "safexcel-ctr-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3295) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3296) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3297) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3298) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3299) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3300) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3301) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3302) .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3303) .cra_exit = safexcel_skcipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3304) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3305) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3306) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3307) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3308)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3309) static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3310) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3311) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3312) if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3313) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3314)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3315) return safexcel_queue_req(&req->base, aead_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3316) SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3317) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3318)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3319) static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3320) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3321) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3322)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3323) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3324) if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3325) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3326)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3327) return safexcel_queue_req(&req->base, aead_request_ctx(req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3328) SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3329) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3330)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3331) static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3332) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3333) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3334)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3335) safexcel_aead_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3336) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3337) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3338) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3339) ctx->state_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3340) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3341) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3343) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3344) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3345) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3346) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3347) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3348) .encrypt = safexcel_aead_sm4_blk_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3349) .decrypt = safexcel_aead_sm4_blk_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3350) .ivsize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3351) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3352) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3353) .cra_name = "authenc(hmac(sha1),cbc(sm4))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3354) .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3355) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3356) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3357) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3358) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3359) .cra_blocksize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3360) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3361) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3362) .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3363) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3364) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3365) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3366) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3367) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3368)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3369) static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3370) const u8 *key, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3371) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3372) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3373) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3374)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3375) /* Keep fallback cipher synchronized */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3376) return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3377) safexcel_aead_setkey(ctfm, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3378) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3380) static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3381) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3382) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3383) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3384) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3385)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3386) /* Keep fallback cipher synchronized */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3387) return crypto_aead_setauthsize(ctx->fback, authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3388) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3389)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3390) static int safexcel_aead_fallback_crypt(struct aead_request *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3391) enum safexcel_cipher_direction dir)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3392) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3393) struct crypto_aead *aead = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3394) struct crypto_tfm *tfm = crypto_aead_tfm(aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3395) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3396) struct aead_request *subreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3397)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3398) aead_request_set_tfm(subreq, ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3399) aead_request_set_callback(subreq, req->base.flags, req->base.complete,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3400) req->base.data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3401) aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3402) req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3403) aead_request_set_ad(subreq, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3404)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3405) return (dir == SAFEXCEL_ENCRYPT) ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3406) crypto_aead_encrypt(subreq) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3407) crypto_aead_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3408) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3409)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3410) static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3411) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3412) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3413)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3414) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3415) if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3416) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3417) else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3418) return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3420) /* HW cannot do full (AAD+payload) zero length, use fallback */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3421) return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3422) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3423)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3424) static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3425) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3426) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3427) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3428)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3429) /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3430) if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3431) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3432) else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3433) /* If input length > 0 only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3434) return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3435)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3436) /* HW cannot do full (AAD+payload) zero length, use fallback */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3437) return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3438) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3439)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3440) static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3441) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3442) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3443)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3444) safexcel_aead_fallback_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3445) ctx->alg = SAFEXCEL_SM4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3446) ctx->blocksz = SM4_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3447) ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3448) ctx->state_sz = SM3_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3449) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3450) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3451)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3452) struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3453) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3454) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3455) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3456) .setkey = safexcel_aead_fallback_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3457) .setauthsize = safexcel_aead_fallback_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3458) .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3459) .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3460) .ivsize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3461) .maxauthsize = SM3_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3462) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3463) .cra_name = "authenc(hmac(sm3),cbc(sm4))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3464) .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3465) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3466) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3467) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3468) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3469) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3470) .cra_blocksize = SM4_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3471) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3472) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3473) .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3474) .cra_exit = safexcel_aead_fallback_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3475) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3476) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3477) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3478) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3479)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3480) static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3481) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3482) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3483)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3484) safexcel_aead_sm4cbc_sha1_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3485) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3486) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3487) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3488)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3489) struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3490) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3491) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3492) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3493) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3494) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3495) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3496) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3497) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3498) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3499) .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3500) .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3501) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3502) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3503) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3504) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3505) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3506) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3507) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3508) .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3509) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3510) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3511) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3512) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3513) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3514)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3515) static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3516) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3517) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3518)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3519) safexcel_aead_sm4cbc_sm3_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3520) ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3521) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3522) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3523)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3524) struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3525) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3526) .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3527) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3528) .setkey = safexcel_aead_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3529) .encrypt = safexcel_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3530) .decrypt = safexcel_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3531) .ivsize = CTR_RFC3686_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3532) .maxauthsize = SM3_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3533) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3534) .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3535) .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3536) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3537) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3538) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3539) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3540) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3541) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3542) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3543) .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3544) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3545) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3546) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3547) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3548) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3549)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3550) static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3551) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3552) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3553) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3554) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3555)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3556) /* last 4 bytes of key are the nonce! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3557) ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3558)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3559) len -= CTR_RFC3686_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3560) return safexcel_aead_gcm_setkey(ctfm, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3561) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3563) static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3564) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3565) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3566) return crypto_rfc4106_check_authsize(authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3567) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3568)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3569) static int safexcel_rfc4106_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3570) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3571) return crypto_ipsec_check_assoclen(req->assoclen) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3572) safexcel_aead_encrypt(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3573) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3574)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3575) static int safexcel_rfc4106_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3576) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3577) return crypto_ipsec_check_assoclen(req->assoclen) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3578) safexcel_aead_decrypt(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3579) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3580)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3581) static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3582) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3583) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3584) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3585)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3586) ret = safexcel_aead_gcm_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3587) ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3588) ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3589) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3590) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3591)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3592) struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3593) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3594) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3595) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3596) .setkey = safexcel_rfc4106_gcm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3597) .setauthsize = safexcel_rfc4106_gcm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3598) .encrypt = safexcel_rfc4106_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3599) .decrypt = safexcel_rfc4106_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3600) .ivsize = GCM_RFC4106_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3601) .maxauthsize = GHASH_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3602) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3603) .cra_name = "rfc4106(gcm(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3604) .cra_driver_name = "safexcel-rfc4106-gcm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3605) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3606) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3607) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3608) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3609) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3610) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3611) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3612) .cra_init = safexcel_rfc4106_gcm_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3613) .cra_exit = safexcel_aead_gcm_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3614) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3615) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3616) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3617)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3618) static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3619) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3620) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3621) if (authsize != GHASH_DIGEST_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3622) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3623)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3624) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3625) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3626)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3627) static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3628) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3629) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3630) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3631)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3632) ret = safexcel_aead_gcm_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3633) ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3634) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3635) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3636)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3637) struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3638) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3639) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3640) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3641) .setkey = safexcel_rfc4106_gcm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3642) .setauthsize = safexcel_rfc4543_gcm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3643) .encrypt = safexcel_rfc4106_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3644) .decrypt = safexcel_rfc4106_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3645) .ivsize = GCM_RFC4543_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3646) .maxauthsize = GHASH_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3647) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3648) .cra_name = "rfc4543(gcm(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3649) .cra_driver_name = "safexcel-rfc4543-gcm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3650) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3651) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3652) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3653) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3654) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3655) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3656) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3657) .cra_init = safexcel_rfc4543_gcm_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3658) .cra_exit = safexcel_aead_gcm_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3659) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3660) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3661) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3662)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3663) static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3664) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3665) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3666) struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3667) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3668)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3669) /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3670) *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3671) /* last 3 bytes of key are the nonce! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3672) memcpy((u8 *)&ctx->nonce + 1, key + len -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3673) EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3674) EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3675)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3676) len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3677) return safexcel_aead_ccm_setkey(ctfm, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3678) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3679)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3680) static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3681) unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3682) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3683) /* Borrowed from crypto/ccm.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3684) switch (authsize) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3685) case 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3686) case 12:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3687) case 16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3688) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3689) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3690) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3691) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3692)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3693) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3694) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3695)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3696) static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3697) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3698) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3699)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3700) /* Borrowed from crypto/ccm.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3701) if (req->assoclen != 16 && req->assoclen != 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3702) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3703)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3704) return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3705) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3706)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3707) static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3708) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3709) struct safexcel_cipher_req *creq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3710)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3711) /* Borrowed from crypto/ccm.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3712) if (req->assoclen != 16 && req->assoclen != 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3713) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3714)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3715) return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3716) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3717)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3718) static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3719) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3720) struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3721) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3722)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3723) ret = safexcel_aead_ccm_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3724) ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3725) ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3726) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3727) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3728)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3729) struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3730) .type = SAFEXCEL_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3731) .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3732) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3733) .setkey = safexcel_rfc4309_ccm_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3734) .setauthsize = safexcel_rfc4309_ccm_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3735) .encrypt = safexcel_rfc4309_ccm_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3736) .decrypt = safexcel_rfc4309_ccm_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3737) .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3738) .maxauthsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3739) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3740) .cra_name = "rfc4309(ccm(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3741) .cra_driver_name = "safexcel-rfc4309-ccm-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3742) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3743) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3744) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3745) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3746) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3747) .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3748) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3749) .cra_init = safexcel_rfc4309_ccm_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3750) .cra_exit = safexcel_aead_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3751) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3752) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3753) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3754) };