^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (C) 2017 Marvell
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Antoine Tenart <antoine.tenart@free-electrons.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <crypto/hmac.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <crypto/md5.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/sha3.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <crypto/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <crypto/sm3.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <crypto/internal/cipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/dmapool.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include "safexcel.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) struct safexcel_ahash_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) struct safexcel_context base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) u32 alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) u8 key_sz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) bool cbcmac;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) bool do_fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) bool fb_init_done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) bool fb_do_setkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) struct crypto_cipher *kaes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) struct crypto_ahash *fback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) struct crypto_shash *shpre;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) struct shash_desc *shdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) struct safexcel_ahash_req {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) bool last_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) bool finish;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) bool hmac;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) bool needs_inv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) bool hmac_zlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) bool len_is_le;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) bool not_first;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) bool xcbcmac;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) int nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) dma_addr_t result_dma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) u32 digest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) u8 state_sz; /* expected state size, only set once */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) u8 block_sz; /* block size, only set once */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) u8 digest_sz; /* output digest size, only set once */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) __le32 state[SHA3_512_BLOCK_SIZE /
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) sizeof(__le32)] __aligned(sizeof(__le32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) u64 len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) u64 processed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) dma_addr_t cache_dma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) unsigned int cache_sz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) return req->len - req->processed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) u32 input_length, u32 result_length,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) bool cbcmac)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) struct safexcel_token *token =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) (struct safexcel_token *)cdesc->control_data.token;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) token[0].packet_length = input_length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) input_length &= 15;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) if (unlikely(cbcmac && input_length)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) token[0].stat = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) token[1].packet_length = 16 - input_length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) eip197_noop_token(&token[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) EIP197_TOKEN_STAT_LAST_PACKET;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) token[2].packet_length = result_length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) eip197_noop_token(&token[3]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) struct safexcel_ahash_req *req,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) struct safexcel_command_desc *cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) u64 count = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) cdesc->control_data.control0 = ctx->alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) cdesc->control_data.control1 = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) * Copy the input digest if needed, and setup the context
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) * fields. Do this now as we need it to setup the first command
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) * descriptor.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) if (req->xcbcmac)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) if (!req->finish && req->xcbcmac)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) CONTEXT_CONTROL_DIGEST_XCM |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) CONTEXT_CONTROL_NO_FINISH_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) CONTEXT_CONTROL_SIZE(req->state_sz /
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) CONTEXT_CONTROL_DIGEST_XCM |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) CONTEXT_CONTROL_SIZE(req->state_sz /
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) } else if (!req->processed) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) /* First - and possibly only - block of basic hash only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) if (req->finish)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) cdesc->control_data.control0 |= req->digest |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) CONTEXT_CONTROL_RESTART_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) /* ensure its not 0! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) CONTEXT_CONTROL_SIZE(1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) cdesc->control_data.control0 |= req->digest |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) CONTEXT_CONTROL_RESTART_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) CONTEXT_CONTROL_NO_FINISH_HASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) /* ensure its not 0! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) CONTEXT_CONTROL_SIZE(1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) /* Hash continuation or HMAC, setup (inner) digest from state */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) if (req->finish) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) /* Compute digest count for hash/HMAC finish operations */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) req->hmac_zlen || (req->processed != req->block_sz)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) /* This is a hardware limitation, as the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) * counter must fit into an u32. This represents
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) * a fairly big amount of input data, so we
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) * shouldn't see this.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) if (unlikely(count & 0xffffffff00000000ULL)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) dev_warn(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) "Input data is too big\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) /* Special case: zero length HMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) req->hmac_zlen ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) (req->processed != req->block_sz)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) /* Basic hash continue operation, need digest + cnt */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) /* For zero-len HMAC, don't finalize, already padded! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) if (req->hmac_zlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) CONTEXT_CONTROL_NO_FINISH_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) cdesc->control_data.control1 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) CONTEXT_CONTROL_DIGEST_CNT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) ctx->base.ctxr->data[req->state_sz >> 2] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) cpu_to_le32(count);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) /* Clear zero-length HMAC flag for next operation! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) req->hmac_zlen = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) } else { /* HMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) /* Need outer digest for HMAC finalization */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) &ctx->base.opad, req->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) /* Single pass HMAC - no digest count */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) } else { /* Hash continuation, do not finish yet */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) cdesc->control_data.control0 |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) CONTEXT_CONTROL_TYPE_HASH_OUT |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) CONTEXT_CONTROL_NO_FINISH_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) static int safexcel_ahash_enqueue(struct ahash_request *areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) struct safexcel_result_desc *rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) u64 cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) *ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) dev_err(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) "hash: result: could not retrieve the result descriptor\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) *ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) *ret = safexcel_rdesc_check_errors(priv, rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) safexcel_complete(priv, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) if (sreq->nents) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) sreq->nents = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) if (sreq->result_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) sreq->result_dma = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) if (sreq->cache_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) sreq->cache_dma = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) sreq->cache_sz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) if (sreq->finish) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) if (sreq->hmac &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) /* Faking HMAC using hash - need to do outer hash */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) memcpy(sreq->cache, sreq->state,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) crypto_ahash_digestsize(ahash));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) sreq->len = sreq->block_sz +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) crypto_ahash_digestsize(ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) sreq->processed = sreq->block_sz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) sreq->hmac = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) if (priv->flags & EIP197_TRC_CACHE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) areq->nbytes = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) safexcel_ahash_enqueue(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) *should_complete = false; /* Not done yet */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) return 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) /* Undo final XOR with 0xffffffff ...*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) *(__le32 *)areq->result = ~sreq->state[0];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) memcpy(areq->result, sreq->state,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) crypto_ahash_digestsize(ahash));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) cache_len = safexcel_queued_len(sreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) if (cache_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) memcpy(sreq->cache, sreq->cache_next, cache_len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) *should_complete = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) return 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) struct safexcel_result_desc *rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) struct scatterlist *sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) struct safexcel_token *dmmy;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) u64 queued, len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) queued = safexcel_queued_len(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) if (queued <= HASH_CACHE_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) cache_len = queued;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) cache_len = queued - areq->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) if (!req->finish && !req->last_req) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) /* If this is not the last request and the queued data does not
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) * fit into full cache blocks, cache it for the next send call.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) extra = queued & (HASH_CACHE_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) /* If this is not the last request and the queued data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) * is a multiple of a block, cache the last one for now.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) if (!extra)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) extra = HASH_CACHE_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) req->cache_next, extra,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) areq->nbytes - extra);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) queued -= extra;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) if (!queued) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) *commands = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) *results = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) extra = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) if (unlikely(cache_len < AES_BLOCK_SIZE)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) * Cache contains less than 1 full block, complete.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) extra = AES_BLOCK_SIZE - cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) if (queued > cache_len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) /* More data follows: borrow bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) u64 tmp = queued - cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) skip = min_t(u64, tmp, extra);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) sg_pcopy_to_buffer(areq->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) sg_nents(areq->src),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) req->cache + cache_len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) skip, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) extra -= skip;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) memset(req->cache + cache_len + skip, 0, extra);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) if (!ctx->cbcmac && extra) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) // 10- padding for XCBCMAC & CMAC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) req->cache[cache_len + skip] = 0x80;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) // HW will use K2 iso K3 - compensate!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) u32 *cache = (void *)req->cache;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) u32 *ipad = ctx->base.ipad.word;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) u32 x;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) x = ipad[i] ^ ipad[i + 4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) cache[i] ^= swab(x);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) cache_len = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) queued = queued + extra;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) /* XCBC continue: XOR previous result into 1st word */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) len = queued;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) /* Add a command descriptor for the cached data, if any */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) if (cache_len) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) req->cache_dma = dma_map_single(priv->dev, req->cache,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) cache_len, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) if (dma_mapping_error(priv->dev, req->cache_dma))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) req->cache_sz = cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) first_cdesc = safexcel_add_cdesc(priv, ring, 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) (cache_len == len),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) req->cache_dma, cache_len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) len, ctx->base.ctxr_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) &dmmy);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) if (IS_ERR(first_cdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) ret = PTR_ERR(first_cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) goto unmap_cache;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) n_cdesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) queued -= cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) if (!queued)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) goto send_command;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) /* Now handle the current ahash request buffer(s) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) req->nents = dma_map_sg(priv->dev, areq->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) sg_nents_for_len(areq->src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) areq->nbytes),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) if (!req->nents) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) goto cdesc_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) for_each_sg(areq->src, sg, req->nents, i) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) int sglen = sg_dma_len(sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) if (unlikely(sglen <= skip)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) skip -= sglen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) /* Do not overflow the request */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) if ((queued + skip) <= sglen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) sglen = queued;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) sglen -= skip;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) !(queued - sglen),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) sg_dma_address(sg) + skip, sglen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) len, ctx->base.ctxr_dma, &dmmy);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) if (IS_ERR(cdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) ret = PTR_ERR(cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) goto unmap_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) if (!n_cdesc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) first_cdesc = cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) n_cdesc++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) queued -= sglen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) if (!queued)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) skip = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) send_command:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) /* Setup the context options */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) safexcel_context_control(ctx, req, first_cdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) /* Add the token */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) if (dma_mapping_error(priv->dev, req->result_dma)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) goto unmap_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) /* Add a result descriptor */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) req->digest_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) goto unmap_result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) req->processed += len - extra;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) *commands = n_cdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) *results = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) unmap_result:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) unmap_sg:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) if (req->nents) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) req->nents = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) cdesc_rollback:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) for (i = 0; i < n_cdesc; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) unmap_cache:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) if (req->cache_dma) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) req->cache_dma = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) req->cache_sz = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) struct safexcel_result_desc *rdesc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) int enq_ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) *ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) if (IS_ERR(rdesc)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) dev_err(priv->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) "hash: invalidate: could not retrieve the result descriptor\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) *ret = PTR_ERR(rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) *ret = safexcel_rdesc_check_errors(priv, rdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) safexcel_complete(priv, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) if (ctx->base.exit_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) dma_pool_free(priv->context_pool, ctx->base.ctxr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) *should_complete = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) return 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) ring = safexcel_select_ring(priv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) ctx->base.ring = ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) if (enq_ret != -EINPROGRESS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) *ret = enq_ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) *should_complete = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) return 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) bool *should_complete, int *ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) if (req->needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) req->needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) err = safexcel_handle_inv_result(priv, ring, async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) err = safexcel_handle_req_result(priv, ring, async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) should_complete, ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) static int safexcel_ahash_send_inv(struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) int ring, int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) ret = safexcel_invalidate_cache(async, ctx->base.priv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) ctx->base.ctxr_dma, ring);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) if (unlikely(ret))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) *commands = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) *results = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) static int safexcel_ahash_send(struct crypto_async_request *async,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) int ring, int *commands, int *results)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) struct ahash_request *areq = ahash_request_cast(async);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) if (req->needs_inv)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) ret = safexcel_ahash_send_inv(async, ring, commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) ret = safexcel_ahash_send_req(async, ring, commands, results);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) struct safexcel_inv_result result = {};
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) int ring = ctx->base.ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) memset(req, 0, EIP197_AHASH_REQ_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) /* create invalidation request */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) init_completion(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) safexcel_inv_complete, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) ctx = crypto_tfm_ctx(req->base.tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) ctx->base.exit_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) rctx->needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) wait_for_completion(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) if (result.error) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) dev_warn(priv->dev, "hash: completion error (%d)\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) result.error);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) return result.error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) /* safexcel_ahash_cache: cache data until at least one request can be sent to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) * the engine, aka. when there is at least 1 block size in the pipe.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) static int safexcel_ahash_cache(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) u64 cache_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) /* cache_len: everything accepted by the driver but not sent yet,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) * tot sz handled by update() - last req sz - tot sz handled by send()
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) cache_len = safexcel_queued_len(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) * In case there isn't enough bytes to proceed (less than a
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) * block size), cache the data until we have enough.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) req->cache + cache_len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) areq->nbytes, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) /* We couldn't cache all the data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) return -E2BIG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) static int safexcel_ahash_enqueue(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) int ret, ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) req->needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) if (ctx->base.ctxr) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) /* invalidate for *any* non-XCBC continuation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) ((req->not_first && !req->xcbcmac) ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) /* invalidate if (i)digest changed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) /* invalidate for HMAC finish with odigest changed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) (req->finish && req->hmac &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) &ctx->base.opad, req->state_sz))))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) * We're still setting needs_inv here, even though it is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) * cleared right away, because the needs_inv flag can be
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) * set in other functions and we want to keep the same
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) * logic.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) ctx->base.needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) if (ctx->base.needs_inv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) ctx->base.needs_inv = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) req->needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) ctx->base.ring = safexcel_select_ring(priv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) EIP197_GFP_FLAGS(areq->base),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) &ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) if (!ctx->base.ctxr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) req->not_first = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) ring = ctx->base.ring;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731) spin_lock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) spin_unlock_bh(&priv->ring[ring].queue_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) queue_work(priv->ring[ring].workqueue,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) &priv->ring[ring].work_data.work);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) static int safexcel_ahash_update(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) /* If the request is 0 length, do nothing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) if (!areq->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) /* Add request to the cache if it fits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751) ret = safexcel_ahash_cache(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) /* Update total request length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) req->len += areq->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) /* If not all data could fit into the cache, go process the excess.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) * Also go process immediately for an HMAC IV precompute, which
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) * will never be finished at all, but needs to be processed anyway.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) if ((ret && !req->finish) || req->last_req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) return safexcel_ahash_enqueue(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766) static int safexcel_ahash_final(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) req->finish = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773) if (unlikely(!req->len && !areq->nbytes)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775) * If we have an overall 0 length *hash* request:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) * The HW cannot do 0 length hash, so we provide the correct
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) * result directly here.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) memcpy(areq->result, md5_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) MD5_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783) memcpy(areq->result, sha1_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786) memcpy(areq->result, sha224_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) SHA224_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) memcpy(areq->result, sha256_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790) SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792) memcpy(areq->result, sha384_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793) SHA384_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) memcpy(areq->result, sha512_zero_message_hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796) SHA512_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797) else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798) memcpy(areq->result,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799) EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803) } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804) ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) req->len == sizeof(u32) && !areq->nbytes)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806) /* Zero length CRC32 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 809) } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 810) !areq->nbytes)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 811) /* Zero length CBC MAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 812) memset(areq->result, 0, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 813) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 814) } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 815) !areq->nbytes)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 816) /* Zero length (X)CBC/CMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 817) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 818)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 819) for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 820) u32 *result = (void *)areq->result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 821)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 822) /* K3 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 823) result[i] = swab(ctx->base.ipad.word[i + 4]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 824) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 825) areq->result[0] ^= 0x80; // 10- padding
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 826) crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 827) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 828) } else if (unlikely(req->hmac &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 829) (req->len == req->block_sz) &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 830) !areq->nbytes)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 831) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 832) * If we have an overall 0 length *HMAC* request:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 833) * For HMAC, we need to finalize the inner digest
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 834) * and then perform the outer hash.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 835) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 836)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 837) /* generate pad block in the cache */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 838) /* start with a hash block of all zeroes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 839) memset(req->cache, 0, req->block_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 840) /* set the first byte to 0x80 to 'append a 1 bit' */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 841) req->cache[0] = 0x80;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 842) /* add the length in bits in the last 2 bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 843) if (req->len_is_le) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 844) /* Little endian length word (e.g. MD5) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 845) req->cache[req->block_sz-8] = (req->block_sz << 3) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 846) 255;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 847) req->cache[req->block_sz-7] = (req->block_sz >> 5);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 848) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 849) /* Big endian length word (e.g. any SHA) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 850) req->cache[req->block_sz-2] = (req->block_sz >> 5);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 851) req->cache[req->block_sz-1] = (req->block_sz << 3) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 852) 255;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 853) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 854)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 855) req->len += req->block_sz; /* plus 1 hash block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 856)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 857) /* Set special zero-length HMAC flag */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 858) req->hmac_zlen = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 859)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 860) /* Finalize HMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 861) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 862) } else if (req->hmac) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 863) /* Finalize HMAC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 864) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 865) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 866)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 867) return safexcel_ahash_enqueue(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 868) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 869)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 870) static int safexcel_ahash_finup(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 871) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 872) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 873)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 874) req->finish = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 875)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 876) safexcel_ahash_update(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 877) return safexcel_ahash_final(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 878) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 879)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 880) static int safexcel_ahash_export(struct ahash_request *areq, void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 881) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 882) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 883) struct safexcel_ahash_export_state *export = out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 884)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 885) export->len = req->len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 886) export->processed = req->processed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 887)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 888) export->digest = req->digest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 889)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 890) memcpy(export->state, req->state, req->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 891) memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 892)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 893) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 894) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 895)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 896) static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 897) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 898) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 899) const struct safexcel_ahash_export_state *export = in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 900) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 901)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 902) ret = crypto_ahash_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 903) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 904) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 905)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 906) req->len = export->len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 907) req->processed = export->processed;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 908)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 909) req->digest = export->digest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 910)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 911) memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 912) memcpy(req->state, export->state, req->state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 913)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 914) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 915) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 916)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 917) static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 918) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 919) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 920) struct safexcel_alg_template *tmpl =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 921) container_of(__crypto_ahash_alg(tfm->__crt_alg),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 922) struct safexcel_alg_template, alg.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 923)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 924) ctx->base.priv = tmpl->priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 925) ctx->base.send = safexcel_ahash_send;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 926) ctx->base.handle_result = safexcel_handle_result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 927) ctx->fb_do_setkey = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 928)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 929) crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 930) sizeof(struct safexcel_ahash_req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 931) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 932) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 933)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 934) static int safexcel_sha1_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 935) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 936) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 937) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 938)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 939) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 940)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 941) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 942) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 943) req->state_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 944) req->digest_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 945) req->block_sz = SHA1_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 946)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 947) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 948) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 949)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 950) static int safexcel_sha1_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 951) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 952) int ret = safexcel_sha1_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 953)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 954) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 955) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 956)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 957) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 958) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 959)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 960) static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 961) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 962) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 963) struct safexcel_crypto_priv *priv = ctx->base.priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 964) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 965)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 966) /* context not allocated, skip invalidation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 967) if (!ctx->base.ctxr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 968) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 969)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 970) if (priv->flags & EIP197_TRC_CACHE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 971) ret = safexcel_ahash_exit_inv(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 972) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 973) dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 974) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 975) dma_pool_free(priv->context_pool, ctx->base.ctxr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 976) ctx->base.ctxr_dma);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 977) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 978) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 979)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 980) struct safexcel_alg_template safexcel_alg_sha1 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 981) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 982) .algo_mask = SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 983) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 984) .init = safexcel_sha1_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 985) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 986) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 987) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 988) .digest = safexcel_sha1_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 989) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 990) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 991) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 992) .digestsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 993) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 994) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 995) .cra_name = "sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 996) .cra_driver_name = "safexcel-sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 997) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 998) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 999) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1000) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1001) .cra_blocksize = SHA1_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1002) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1003) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1004) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1005) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1006) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1007) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1008) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1009) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1010)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1011) static int safexcel_hmac_sha1_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1012) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1013) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1014) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1015)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1016) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1017)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1018) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1019) memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1020) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1021) req->len = SHA1_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1022) req->processed = SHA1_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1023)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1024) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1025) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1026) req->state_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1027) req->digest_sz = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1028) req->block_sz = SHA1_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1029) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1030)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1031) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1032) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1033)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1034) static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1035) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1036) int ret = safexcel_hmac_sha1_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1037)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1038) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1039) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1040)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1041) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1042) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1043)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1044) struct safexcel_ahash_result {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1045) struct completion completion;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1046) int error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1047) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1048)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1049) static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1050) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1051) struct safexcel_ahash_result *result = req->data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1052)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1053) if (error == -EINPROGRESS)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1054) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1055)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1056) result->error = error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1057) complete(&result->completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1058) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1059)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1060) static int safexcel_hmac_init_pad(struct ahash_request *areq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1061) unsigned int blocksize, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1062) unsigned int keylen, u8 *ipad, u8 *opad)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1063) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1064) struct safexcel_ahash_result result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1065) struct scatterlist sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1066) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1067) u8 *keydup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1068)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1069) if (keylen <= blocksize) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1070) memcpy(ipad, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1071) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1072) keydup = kmemdup(key, keylen, GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1073) if (!keydup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1074) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1075)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1076) ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1077) safexcel_ahash_complete, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1078) sg_init_one(&sg, keydup, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1079) ahash_request_set_crypt(areq, &sg, ipad, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1080) init_completion(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1081)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1082) ret = crypto_ahash_digest(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1083) if (ret == -EINPROGRESS || ret == -EBUSY) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1084) wait_for_completion_interruptible(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1085) ret = result.error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1086) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1087)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1088) /* Avoid leaking */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1089) kfree_sensitive(keydup);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1090)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1091) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1092) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1093)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1094) keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1095) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1096)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1097) memset(ipad + keylen, 0, blocksize - keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1098) memcpy(opad, ipad, blocksize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1099)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1100) for (i = 0; i < blocksize; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1101) ipad[i] ^= HMAC_IPAD_VALUE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1102) opad[i] ^= HMAC_OPAD_VALUE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1103) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1105) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1106) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1108) static int safexcel_hmac_init_iv(struct ahash_request *areq,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1109) unsigned int blocksize, u8 *pad, void *state)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1110) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1111) struct safexcel_ahash_result result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1112) struct safexcel_ahash_req *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1113) struct scatterlist sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1114) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1115)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1116) ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1117) safexcel_ahash_complete, &result);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1118) sg_init_one(&sg, pad, blocksize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1119) ahash_request_set_crypt(areq, &sg, pad, blocksize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1120) init_completion(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1122) ret = crypto_ahash_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1123) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1124) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1125)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1126) req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1127) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1128) req->last_req = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1129)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1130) ret = crypto_ahash_update(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1131) if (ret && ret != -EINPROGRESS && ret != -EBUSY)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1132) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1134) wait_for_completion_interruptible(&result.completion);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1135) if (result.error)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1136) return result.error;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1137)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1138) return crypto_ahash_export(areq, state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1139) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1140)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1141) static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1142) unsigned int keylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1143) void *istate, void *ostate)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1144) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1145) struct ahash_request *areq;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1146) struct crypto_ahash *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1147) unsigned int blocksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1148) u8 *ipad, *opad;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1149) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1151) tfm = crypto_alloc_ahash(alg, 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1152) if (IS_ERR(tfm))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1153) return PTR_ERR(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1155) areq = ahash_request_alloc(tfm, GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1156) if (!areq) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1157) ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1158) goto free_ahash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1159) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1161) crypto_ahash_clear_flags(tfm, ~0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1162) blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1163)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1164) ipad = kcalloc(2, blocksize, GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1165) if (!ipad) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1166) ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1167) goto free_request;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1168) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1170) opad = ipad + blocksize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1171)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1172) ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1173) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1174) goto free_ipad;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1176) ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1177) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1178) goto free_ipad;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1180) ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1182) free_ipad:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1183) kfree(ipad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1184) free_request:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1185) ahash_request_free(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1186) free_ahash:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1187) crypto_free_ahash(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1188)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1189) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1190) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1192) int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1193) unsigned int keylen, const char *alg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1194) unsigned int state_sz)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1195) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1196) struct safexcel_crypto_priv *priv = base->priv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1197) struct safexcel_ahash_export_state istate, ostate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1198) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1200) ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1201) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1202) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1204) if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1205) (memcmp(&base->ipad, istate.state, state_sz) ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1206) memcmp(&base->opad, ostate.state, state_sz)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1207) base->needs_inv = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1209) memcpy(&base->ipad, &istate.state, state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1210) memcpy(&base->opad, &ostate.state, state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1211)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1212) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1213) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1214)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1215) static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1216) unsigned int keylen, const char *alg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1217) unsigned int state_sz)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1218) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1219) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1221) return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1222) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1224) static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1225) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1226) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1227) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1228) SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1229) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1230)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1231) struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1232) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1233) .algo_mask = SAFEXCEL_ALG_SHA1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1234) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1235) .init = safexcel_hmac_sha1_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1236) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1237) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1238) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1239) .digest = safexcel_hmac_sha1_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1240) .setkey = safexcel_hmac_sha1_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1241) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1242) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1243) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1244) .digestsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1245) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1246) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1247) .cra_name = "hmac(sha1)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1248) .cra_driver_name = "safexcel-hmac-sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1249) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1250) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1251) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1252) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1253) .cra_blocksize = SHA1_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1254) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1255) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1256) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1257) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1258) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1259) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1260) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1261) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1262)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1263) static int safexcel_sha256_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1264) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1265) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1266) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1267)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1268) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1269)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1270) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1271) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1272) req->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1273) req->digest_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1274) req->block_sz = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1276) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1277) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1279) static int safexcel_sha256_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1280) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1281) int ret = safexcel_sha256_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1282)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1283) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1284) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1285)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1286) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1287) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1288)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1289) struct safexcel_alg_template safexcel_alg_sha256 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1290) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1291) .algo_mask = SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1292) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1293) .init = safexcel_sha256_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1294) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1295) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1296) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1297) .digest = safexcel_sha256_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1298) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1299) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1300) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1301) .digestsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1302) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1303) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1304) .cra_name = "sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1305) .cra_driver_name = "safexcel-sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1306) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1307) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1308) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1309) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1310) .cra_blocksize = SHA256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1311) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1312) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1313) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1314) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1315) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1316) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1317) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1318) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1319)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1320) static int safexcel_sha224_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1321) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1322) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1323) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1325) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1326)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1327) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1328) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1329) req->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1330) req->digest_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1331) req->block_sz = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1332)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1333) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1334) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1336) static int safexcel_sha224_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1337) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1338) int ret = safexcel_sha224_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1339)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1340) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1341) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1343) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1344) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1346) struct safexcel_alg_template safexcel_alg_sha224 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1347) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1348) .algo_mask = SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1349) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1350) .init = safexcel_sha224_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1351) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1352) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1353) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1354) .digest = safexcel_sha224_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1355) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1356) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1357) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1358) .digestsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1359) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1360) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1361) .cra_name = "sha224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1362) .cra_driver_name = "safexcel-sha224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1363) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1364) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1365) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1366) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1367) .cra_blocksize = SHA224_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1368) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1369) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1370) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1371) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1372) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1373) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1374) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1375) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1376)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1377) static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1378) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1379) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1380) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1381) SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1382) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1384) static int safexcel_hmac_sha224_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1385) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1386) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1387) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1388)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1389) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1390)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1391) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1392) memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1393) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1394) req->len = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1395) req->processed = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1396)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1397) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1398) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1399) req->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1400) req->digest_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1401) req->block_sz = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1402) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1403)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1404) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1405) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1406)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1407) static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1408) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1409) int ret = safexcel_hmac_sha224_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1411) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1412) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1413)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1414) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1415) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1416)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1417) struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1418) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1419) .algo_mask = SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1420) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1421) .init = safexcel_hmac_sha224_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1422) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1423) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1424) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1425) .digest = safexcel_hmac_sha224_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1426) .setkey = safexcel_hmac_sha224_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1427) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1428) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1429) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1430) .digestsize = SHA224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1431) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1432) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1433) .cra_name = "hmac(sha224)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1434) .cra_driver_name = "safexcel-hmac-sha224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1435) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1436) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1437) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1438) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1439) .cra_blocksize = SHA224_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1440) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1441) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1442) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1443) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1444) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1445) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1446) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1447) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1448)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1449) static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1450) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1451) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1452) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1453) SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1454) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1456) static int safexcel_hmac_sha256_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1457) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1458) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1459) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1461) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1462)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1463) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1464) memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1465) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1466) req->len = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1467) req->processed = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1468)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1469) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1470) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1471) req->state_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1472) req->digest_sz = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1473) req->block_sz = SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1474) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1475)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1476) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1477) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1478)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1479) static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1480) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1481) int ret = safexcel_hmac_sha256_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1482)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1483) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1484) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1485)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1486) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1487) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1488)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1489) struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1490) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1491) .algo_mask = SAFEXCEL_ALG_SHA2_256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1492) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1493) .init = safexcel_hmac_sha256_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1494) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1495) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1496) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1497) .digest = safexcel_hmac_sha256_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1498) .setkey = safexcel_hmac_sha256_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1499) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1500) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1501) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1502) .digestsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1503) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1504) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1505) .cra_name = "hmac(sha256)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1506) .cra_driver_name = "safexcel-hmac-sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1507) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1508) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1509) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1510) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1511) .cra_blocksize = SHA256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1512) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1513) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1514) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1515) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1516) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1517) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1518) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1519) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1520)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1521) static int safexcel_sha512_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1522) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1523) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1524) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1525)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1526) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1527)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1528) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1529) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1530) req->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1531) req->digest_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1532) req->block_sz = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1533)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1534) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1535) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1536)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1537) static int safexcel_sha512_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1538) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1539) int ret = safexcel_sha512_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1540)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1541) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1542) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1543)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1544) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1545) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1546)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1547) struct safexcel_alg_template safexcel_alg_sha512 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1548) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1549) .algo_mask = SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1550) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1551) .init = safexcel_sha512_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1552) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1553) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1554) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1555) .digest = safexcel_sha512_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1556) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1557) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1558) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1559) .digestsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1560) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1561) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1562) .cra_name = "sha512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1563) .cra_driver_name = "safexcel-sha512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1564) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1565) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1566) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1567) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1568) .cra_blocksize = SHA512_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1569) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1570) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1571) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1572) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1573) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1574) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1575) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1576) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1577)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1578) static int safexcel_sha384_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1579) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1580) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1581) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1582)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1583) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1584)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1585) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1586) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1587) req->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1588) req->digest_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1589) req->block_sz = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1590)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1591) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1592) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1593)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1594) static int safexcel_sha384_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1595) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1596) int ret = safexcel_sha384_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1597)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1598) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1599) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1600)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1601) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1602) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1603)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1604) struct safexcel_alg_template safexcel_alg_sha384 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1605) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1606) .algo_mask = SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1607) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1608) .init = safexcel_sha384_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1609) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1610) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1611) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1612) .digest = safexcel_sha384_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1613) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1614) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1615) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1616) .digestsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1617) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1618) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1619) .cra_name = "sha384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1620) .cra_driver_name = "safexcel-sha384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1621) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1622) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1623) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1624) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1625) .cra_blocksize = SHA384_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1626) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1627) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1628) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1629) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1630) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1631) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1632) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1633) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1635) static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1636) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1637) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1638) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1639) SHA512_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1640) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1642) static int safexcel_hmac_sha512_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1643) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1644) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1645) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1646)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1647) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1648)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1649) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1650) memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1651) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1652) req->len = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1653) req->processed = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1654)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1655) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1656) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1657) req->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1658) req->digest_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1659) req->block_sz = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1660) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1661)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1662) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1663) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1664)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1665) static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1666) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1667) int ret = safexcel_hmac_sha512_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1668)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1669) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1670) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1671)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1672) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1673) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1674)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1675) struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1676) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1677) .algo_mask = SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1678) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1679) .init = safexcel_hmac_sha512_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1680) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1681) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1682) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1683) .digest = safexcel_hmac_sha512_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1684) .setkey = safexcel_hmac_sha512_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1685) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1686) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1687) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1688) .digestsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1689) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1690) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1691) .cra_name = "hmac(sha512)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1692) .cra_driver_name = "safexcel-hmac-sha512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1693) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1694) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1695) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1696) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1697) .cra_blocksize = SHA512_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1698) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1699) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1700) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1701) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1702) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1703) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1704) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1705) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1706)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1707) static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1708) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1709) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1710) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1711) SHA512_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1712) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1713)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1714) static int safexcel_hmac_sha384_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1715) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1716) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1717) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1718)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1719) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1721) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1722) memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1723) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1724) req->len = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1725) req->processed = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1726)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1727) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1728) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1729) req->state_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1730) req->digest_sz = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1731) req->block_sz = SHA512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1732) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1733)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1734) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1735) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1736)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1737) static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1738) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1739) int ret = safexcel_hmac_sha384_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1740)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1741) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1742) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1743)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1744) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1745) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1746)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1747) struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1748) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1749) .algo_mask = SAFEXCEL_ALG_SHA2_512,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1750) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1751) .init = safexcel_hmac_sha384_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1752) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1753) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1754) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1755) .digest = safexcel_hmac_sha384_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1756) .setkey = safexcel_hmac_sha384_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1757) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1758) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1759) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1760) .digestsize = SHA384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1761) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1762) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1763) .cra_name = "hmac(sha384)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1764) .cra_driver_name = "safexcel-hmac-sha384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1765) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1766) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1767) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1768) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1769) .cra_blocksize = SHA384_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1770) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1771) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1772) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1773) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1774) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1775) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1776) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1777) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1778)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1779) static int safexcel_md5_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1780) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1781) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1782) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1783)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1784) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1785)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1786) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1787) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1788) req->state_sz = MD5_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1789) req->digest_sz = MD5_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1790) req->block_sz = MD5_HMAC_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1791)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1792) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1793) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1794)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1795) static int safexcel_md5_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1796) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1797) int ret = safexcel_md5_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1798)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1799) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1800) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1802) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1803) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1804)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1805) struct safexcel_alg_template safexcel_alg_md5 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1806) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1807) .algo_mask = SAFEXCEL_ALG_MD5,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1808) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1809) .init = safexcel_md5_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1810) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1811) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1812) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1813) .digest = safexcel_md5_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1814) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1815) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1816) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1817) .digestsize = MD5_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1818) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1819) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1820) .cra_name = "md5",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1821) .cra_driver_name = "safexcel-md5",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1822) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1823) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1824) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1825) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1826) .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1827) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1828) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1829) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1830) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1831) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1832) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1833) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1834) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1835)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1836) static int safexcel_hmac_md5_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1837) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1838) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1839) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1840)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1841) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1842)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1843) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1844) memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1845) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1846) req->len = MD5_HMAC_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1847) req->processed = MD5_HMAC_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1848)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1849) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1850) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1851) req->state_sz = MD5_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1852) req->digest_sz = MD5_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1853) req->block_sz = MD5_HMAC_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1854) req->len_is_le = true; /* MD5 is little endian! ... */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1855) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1856)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1857) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1858) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1859)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1860) static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1861) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1862) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1863) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1864) MD5_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1865) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1866)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1867) static int safexcel_hmac_md5_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1868) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1869) int ret = safexcel_hmac_md5_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1870)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1871) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1872) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1873)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1874) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1875) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1876)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1877) struct safexcel_alg_template safexcel_alg_hmac_md5 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1878) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1879) .algo_mask = SAFEXCEL_ALG_MD5,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1880) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1881) .init = safexcel_hmac_md5_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1882) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1883) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1884) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1885) .digest = safexcel_hmac_md5_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1886) .setkey = safexcel_hmac_md5_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1887) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1888) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1889) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1890) .digestsize = MD5_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1891) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1892) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1893) .cra_name = "hmac(md5)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1894) .cra_driver_name = "safexcel-hmac-md5",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1895) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1896) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1897) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1898) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1899) .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1900) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1901) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1902) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1903) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1904) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1905) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1906) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1907) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1908)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1909) static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1910) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1911) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1912) int ret = safexcel_ahash_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1913)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1914) /* Default 'key' is all zeroes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1915) memset(&ctx->base.ipad, 0, sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1916) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1917) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1918)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1919) static int safexcel_crc32_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1920) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1921) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1922) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1923)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1924) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1925)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1926) /* Start from loaded key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1927) req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1928) /* Set processed to non-zero to enable invalidation detection */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1929) req->len = sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1930) req->processed = sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1931)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1932) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1933) req->digest = CONTEXT_CONTROL_DIGEST_XCM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1934) req->state_sz = sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1935) req->digest_sz = sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1936) req->block_sz = sizeof(u32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1937)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1938) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1939) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1940)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1941) static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1942) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1943) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1944) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1945)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1946) if (keylen != sizeof(u32))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1947) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1948)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1949) memcpy(&ctx->base.ipad, key, sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1950) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1951) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1952)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1953) static int safexcel_crc32_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1954) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1955) return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1956) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1957)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1958) struct safexcel_alg_template safexcel_alg_crc32 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1959) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1960) .algo_mask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1961) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1962) .init = safexcel_crc32_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1963) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1964) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1965) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1966) .digest = safexcel_crc32_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1967) .setkey = safexcel_crc32_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1968) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1969) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1970) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1971) .digestsize = sizeof(u32),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1972) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1973) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1974) .cra_name = "crc32",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1975) .cra_driver_name = "safexcel-crc32",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1976) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1977) .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1978) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1979) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1980) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1981) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1982) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1983) .cra_init = safexcel_crc32_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1984) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1985) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1986) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1987) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1988) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1989) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1990)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1991) static int safexcel_cbcmac_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1992) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1993) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1994) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1995)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1996) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1997)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1998) /* Start from loaded keys */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1999) memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2000) /* Set processed to non-zero to enable invalidation detection */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2001) req->len = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2002) req->processed = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2003)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2004) req->digest = CONTEXT_CONTROL_DIGEST_XCM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2005) req->state_sz = ctx->key_sz;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2006) req->digest_sz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2007) req->block_sz = AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2008) req->xcbcmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2009)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2010) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2011) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2012)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2013) static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2014) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2015) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2016) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2017) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2018) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2019)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2020) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2021) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2022) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2023)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2024) memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2025) for (i = 0; i < len / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2026) ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2027)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2028) if (len == AES_KEYSIZE_192) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2029) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2030) ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2031) } else if (len == AES_KEYSIZE_256) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2032) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2033) ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2034) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2035) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2036) ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2037) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2038) ctx->cbcmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2039)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2040) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2041) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2042) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2043)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2044) static int safexcel_cbcmac_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2045) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2046) return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2047) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2048)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2049) struct safexcel_alg_template safexcel_alg_cbcmac = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2050) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2051) .algo_mask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2052) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2053) .init = safexcel_cbcmac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2054) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2055) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2056) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2057) .digest = safexcel_cbcmac_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2058) .setkey = safexcel_cbcmac_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2059) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2060) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2061) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2062) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2063) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2064) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2065) .cra_name = "cbcmac(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2066) .cra_driver_name = "safexcel-cbcmac-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2067) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2068) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2069) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2070) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2071) .cra_blocksize = 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2072) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2073) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2074) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2075) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2076) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2077) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2078) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2079) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2080)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2081) static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2082) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2083) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2084) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2085) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2086) u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2087) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2088)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2089) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2090) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2091) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2092)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2093) /* precompute the XCBC key material */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2094) crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2095) crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2096) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2097) ret = crypto_cipher_setkey(ctx->kaes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2098) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2099) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2101) crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2102) "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2103) crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2104) "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2105) crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2106) "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2107) for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2108) ctx->base.ipad.word[i] = swab(key_tmp[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2110) crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2111) crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2112) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2113) ret = crypto_cipher_setkey(ctx->kaes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2114) (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2115) AES_MIN_KEY_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2116) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2117) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2119) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2120) ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2121) ctx->cbcmac = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2123) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2124) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2125) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2127) static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2128) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2129) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2131) safexcel_ahash_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2132) ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2133) return PTR_ERR_OR_ZERO(ctx->kaes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2134) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2136) static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2137) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2138) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2139)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2140) crypto_free_cipher(ctx->kaes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2141) safexcel_ahash_cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2142) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2143)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2144) struct safexcel_alg_template safexcel_alg_xcbcmac = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2145) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2146) .algo_mask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2147) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2148) .init = safexcel_cbcmac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2149) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2150) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2151) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2152) .digest = safexcel_cbcmac_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2153) .setkey = safexcel_xcbcmac_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2154) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2155) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2156) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2157) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2158) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2159) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2160) .cra_name = "xcbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2161) .cra_driver_name = "safexcel-xcbc-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2162) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2163) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2164) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2165) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2166) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2167) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2168) .cra_init = safexcel_xcbcmac_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2169) .cra_exit = safexcel_xcbcmac_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2170) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2171) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2172) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2173) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2174) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2176) static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2177) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2178) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2179) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2180) struct crypto_aes_ctx aes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2181) __be64 consts[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2182) u64 _const[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2183) u8 msb_mask, gfmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2184) int ret, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2186) ret = aes_expandkey(&aes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2187) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2188) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2190) for (i = 0; i < len / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2191) ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2193) /* precompute the CMAC key material */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2194) crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2195) crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2196) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2197) ret = crypto_cipher_setkey(ctx->kaes, key, len);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2198) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2199) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2200)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2201) /* code below borrowed from crypto/cmac.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2202) /* encrypt the zero block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2203) memset(consts, 0, AES_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2204) crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2206) gfmask = 0x87;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2207) _const[0] = be64_to_cpu(consts[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2208) _const[1] = be64_to_cpu(consts[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2209)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2210) /* gf(2^128) multiply zero-ciphertext with u and u^2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2211) for (i = 0; i < 4; i += 2) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2212) msb_mask = ((s64)_const[1] >> 63) & gfmask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2213) _const[1] = (_const[1] << 1) | (_const[0] >> 63);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2214) _const[0] = (_const[0] << 1) ^ msb_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2215)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2216) consts[i + 0] = cpu_to_be64(_const[1]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2217) consts[i + 1] = cpu_to_be64(_const[0]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2218) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2219) /* end of code borrowed from crypto/cmac.c */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2221) for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2222) ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2224) if (len == AES_KEYSIZE_192) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2225) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2226) ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2227) } else if (len == AES_KEYSIZE_256) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2228) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2229) ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2230) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2231) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2232) ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2233) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2234) ctx->cbcmac = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2236) memzero_explicit(&aes, sizeof(aes));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2237) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2238) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2240) struct safexcel_alg_template safexcel_alg_cmac = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2241) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2242) .algo_mask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2243) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2244) .init = safexcel_cbcmac_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2245) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2246) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2247) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2248) .digest = safexcel_cbcmac_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2249) .setkey = safexcel_cmac_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2250) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2251) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2252) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2253) .digestsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2254) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2255) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2256) .cra_name = "cmac(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2257) .cra_driver_name = "safexcel-cmac-aes",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2258) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2259) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2260) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2261) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2262) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2263) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2264) .cra_init = safexcel_xcbcmac_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2265) .cra_exit = safexcel_xcbcmac_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2266) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2267) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2268) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2269) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2270) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2271)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2272) static int safexcel_sm3_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2273) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2274) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2275) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2276)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2277) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2279) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2280) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2281) req->state_sz = SM3_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2282) req->digest_sz = SM3_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2283) req->block_sz = SM3_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2285) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2286) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2287)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2288) static int safexcel_sm3_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2289) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2290) int ret = safexcel_sm3_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2291)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2292) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2293) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2294)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2295) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2296) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2297)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2298) struct safexcel_alg_template safexcel_alg_sm3 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2299) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2300) .algo_mask = SAFEXCEL_ALG_SM3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2301) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2302) .init = safexcel_sm3_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2303) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2304) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2305) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2306) .digest = safexcel_sm3_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2307) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2308) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2309) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2310) .digestsize = SM3_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2311) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2312) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2313) .cra_name = "sm3",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2314) .cra_driver_name = "safexcel-sm3",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2315) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2316) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2317) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2318) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2319) .cra_blocksize = SM3_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2320) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2321) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2322) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2323) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2324) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2325) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2326) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2327) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2328)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2329) static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2330) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2331) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2332) return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2333) SM3_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2334) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2335)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2336) static int safexcel_hmac_sm3_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2337) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2338) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2339) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2341) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2343) /* Start from ipad precompute */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2344) memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2345) /* Already processed the key^ipad part now! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2346) req->len = SM3_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2347) req->processed = SM3_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2349) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2350) req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2351) req->state_sz = SM3_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2352) req->digest_sz = SM3_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2353) req->block_sz = SM3_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2354) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2355)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2356) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2357) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2358)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2359) static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2360) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2361) int ret = safexcel_hmac_sm3_init(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2363) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2364) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2365)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2366) return safexcel_ahash_finup(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2367) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2368)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2369) struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2370) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2371) .algo_mask = SAFEXCEL_ALG_SM3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2372) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2373) .init = safexcel_hmac_sm3_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2374) .update = safexcel_ahash_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2375) .final = safexcel_ahash_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2376) .finup = safexcel_ahash_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2377) .digest = safexcel_hmac_sm3_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2378) .setkey = safexcel_hmac_sm3_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2379) .export = safexcel_ahash_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2380) .import = safexcel_ahash_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2381) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2382) .digestsize = SM3_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2383) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2384) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2385) .cra_name = "hmac(sm3)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2386) .cra_driver_name = "safexcel-hmac-sm3",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2387) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2388) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2389) CRYPTO_ALG_ALLOCATES_MEMORY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2390) CRYPTO_ALG_KERN_DRIVER_ONLY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2391) .cra_blocksize = SM3_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2392) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2393) .cra_init = safexcel_ahash_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2394) .cra_exit = safexcel_ahash_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2395) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2396) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2397) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2398) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2399) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2400)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2401) static int safexcel_sha3_224_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2402) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2403) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2404) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2405) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2406)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2407) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2409) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2410) req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2411) req->state_sz = SHA3_224_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2412) req->digest_sz = SHA3_224_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2413) req->block_sz = SHA3_224_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2414) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2415) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2416) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2417) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2418)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2419) static int safexcel_sha3_fbcheck(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2420) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2421) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2422) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2423) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2424) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2425)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2426) if (ctx->do_fallback) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2427) ahash_request_set_tfm(subreq, ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2428) ahash_request_set_callback(subreq, req->base.flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2429) req->base.complete, req->base.data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2430) ahash_request_set_crypt(subreq, req->src, req->result,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2431) req->nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2432) if (!ctx->fb_init_done) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2433) if (ctx->fb_do_setkey) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2434) /* Set fallback cipher HMAC key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2435) u8 key[SHA3_224_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2436)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2437) memcpy(key, &ctx->base.ipad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2438) crypto_ahash_blocksize(ctx->fback) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2439) memcpy(key +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2440) crypto_ahash_blocksize(ctx->fback) / 2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2441) &ctx->base.opad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2442) crypto_ahash_blocksize(ctx->fback) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2443) ret = crypto_ahash_setkey(ctx->fback, key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2444) crypto_ahash_blocksize(ctx->fback));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2445) memzero_explicit(key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2446) crypto_ahash_blocksize(ctx->fback));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2447) ctx->fb_do_setkey = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2448) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2449) ret = ret ?: crypto_ahash_init(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2450) ctx->fb_init_done = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2451) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2452) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2453) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2454) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2456) static int safexcel_sha3_update(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2457) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2458) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2459) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2460) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2461)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2462) ctx->do_fallback = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2463) return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2464) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2465)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2466) static int safexcel_sha3_final(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2467) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2468) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2469) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2470) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2471)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2472) ctx->do_fallback = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2473) return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2474) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2475)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2476) static int safexcel_sha3_finup(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2477) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2478) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2479) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2480) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2481)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2482) ctx->do_fallback |= !req->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2483) if (ctx->do_fallback)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2484) /* Update or ex/import happened or len 0, cannot use the HW */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2485) return safexcel_sha3_fbcheck(req) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2486) crypto_ahash_finup(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2487) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2488) return safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2489) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2490)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2491) static int safexcel_sha3_digest_fallback(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2492) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2493) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2494) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2495) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2496)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2497) ctx->do_fallback = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2498) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2499) return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2500) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2502) static int safexcel_sha3_224_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2503) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2504) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2505) return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2506)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2507) /* HW cannot do zero length hash, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2508) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2509) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2510)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2511) static int safexcel_sha3_export(struct ahash_request *req, void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2512) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2513) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2514) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2515) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2516)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2517) ctx->do_fallback = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2518) return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2519) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2520)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2521) static int safexcel_sha3_import(struct ahash_request *req, const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2522) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2523) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2524) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2525) struct ahash_request *subreq = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2526)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2527) ctx->do_fallback = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2528) return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2529) // return safexcel_ahash_import(req, in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2530) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2531)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2532) static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2533) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2534) struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2535) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2536)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2537) safexcel_ahash_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2538)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2539) /* Allocate fallback implementation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2540) ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2541) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2542) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2543) if (IS_ERR(ctx->fback))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2544) return PTR_ERR(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2545)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2546) /* Update statesize from fallback algorithm! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2547) crypto_hash_alg_common(ahash)->statesize =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2548) crypto_ahash_statesize(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2549) crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2550) sizeof(struct ahash_request) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2551) crypto_ahash_reqsize(ctx->fback)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2552) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2553) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2554)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2555) static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2556) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2557) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2558)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2559) crypto_free_ahash(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2560) safexcel_ahash_cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2561) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2563) struct safexcel_alg_template safexcel_alg_sha3_224 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2564) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2565) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2566) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2567) .init = safexcel_sha3_224_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2568) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2569) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2570) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2571) .digest = safexcel_sha3_224_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2572) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2573) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2574) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2575) .digestsize = SHA3_224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2576) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2577) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2578) .cra_name = "sha3-224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2579) .cra_driver_name = "safexcel-sha3-224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2580) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2581) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2582) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2583) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2584) .cra_blocksize = SHA3_224_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2585) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2586) .cra_init = safexcel_sha3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2587) .cra_exit = safexcel_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2588) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2589) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2590) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2591) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2592) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2593)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2594) static int safexcel_sha3_256_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2595) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2596) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2597) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2598) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2599)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2600) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2601)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2602) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2603) req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2604) req->state_sz = SHA3_256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2605) req->digest_sz = SHA3_256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2606) req->block_sz = SHA3_256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2607) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2608) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2609) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2610) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2611)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2612) static int safexcel_sha3_256_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2613) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2614) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2615) return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2616)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2617) /* HW cannot do zero length hash, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2618) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2619) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2620)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2621) struct safexcel_alg_template safexcel_alg_sha3_256 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2622) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2623) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2624) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2625) .init = safexcel_sha3_256_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2626) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2627) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2628) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2629) .digest = safexcel_sha3_256_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2630) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2631) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2632) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2633) .digestsize = SHA3_256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2634) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2635) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2636) .cra_name = "sha3-256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2637) .cra_driver_name = "safexcel-sha3-256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2638) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2639) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2640) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2641) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2642) .cra_blocksize = SHA3_256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2643) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2644) .cra_init = safexcel_sha3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2645) .cra_exit = safexcel_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2646) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2647) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2648) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2649) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2650) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2651)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2652) static int safexcel_sha3_384_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2653) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2654) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2655) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2656) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2657)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2658) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2659)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2660) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2661) req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2662) req->state_sz = SHA3_384_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2663) req->digest_sz = SHA3_384_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2664) req->block_sz = SHA3_384_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2665) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2666) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2667) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2668) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2669)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2670) static int safexcel_sha3_384_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2671) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2672) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2673) return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2674)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2675) /* HW cannot do zero length hash, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2676) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2677) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2678)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2679) struct safexcel_alg_template safexcel_alg_sha3_384 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2680) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2681) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2682) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2683) .init = safexcel_sha3_384_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2684) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2685) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2686) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2687) .digest = safexcel_sha3_384_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2688) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2689) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2690) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2691) .digestsize = SHA3_384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2692) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2693) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2694) .cra_name = "sha3-384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2695) .cra_driver_name = "safexcel-sha3-384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2696) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2697) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2698) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2699) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2700) .cra_blocksize = SHA3_384_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2701) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2702) .cra_init = safexcel_sha3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2703) .cra_exit = safexcel_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2704) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2705) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2706) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2707) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2708) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2710) static int safexcel_sha3_512_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2711) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2712) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2713) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2714) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2715)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2716) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2717)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2718) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2719) req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2720) req->state_sz = SHA3_512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2721) req->digest_sz = SHA3_512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2722) req->block_sz = SHA3_512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2723) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2724) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2725) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2726) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2727)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2728) static int safexcel_sha3_512_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2729) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2730) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2731) return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2732)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2733) /* HW cannot do zero length hash, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2734) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2735) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2736)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2737) struct safexcel_alg_template safexcel_alg_sha3_512 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2738) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2739) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2740) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2741) .init = safexcel_sha3_512_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2742) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2743) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2744) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2745) .digest = safexcel_sha3_512_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2746) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2747) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2748) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2749) .digestsize = SHA3_512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2750) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2751) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2752) .cra_name = "sha3-512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2753) .cra_driver_name = "safexcel-sha3-512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2754) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2755) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2756) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2757) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2758) .cra_blocksize = SHA3_512_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2759) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2760) .cra_init = safexcel_sha3_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2761) .cra_exit = safexcel_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2762) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2763) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2764) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2765) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2766) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2767)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2768) static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2769) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2770) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2771) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2772)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2773) ret = safexcel_sha3_cra_init(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2774) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2775) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2776)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2777) /* Allocate precalc basic digest implementation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2778) ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2779) if (IS_ERR(ctx->shpre))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2780) return PTR_ERR(ctx->shpre);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2781)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2782) ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2783) crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2784) if (!ctx->shdesc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2785) crypto_free_shash(ctx->shpre);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2786) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2787) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2788) ctx->shdesc->tfm = ctx->shpre;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2789) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2790) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2791)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2792) static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2793) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2794) struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2795)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2796) crypto_free_ahash(ctx->fback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2797) crypto_free_shash(ctx->shpre);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2798) kfree(ctx->shdesc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2799) safexcel_ahash_cra_exit(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2800) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2801)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2802) static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2803) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2804) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2805) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2806) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2807)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2808) if (keylen > crypto_ahash_blocksize(tfm)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2809) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2810) * If the key is larger than the blocksize, then hash it
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2811) * first using our fallback cipher
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2812) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2813) ret = crypto_shash_digest(ctx->shdesc, key, keylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2814) ctx->base.ipad.byte);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2815) keylen = crypto_shash_digestsize(ctx->shpre);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2816)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2817) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2818) * If the digest is larger than half the blocksize, we need to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2819) * move the rest to opad due to the way our HMAC infra works.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2820) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2821) if (keylen > crypto_ahash_blocksize(tfm) / 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2822) /* Buffers overlap, need to use memmove iso memcpy! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2823) memmove(&ctx->base.opad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2824) ctx->base.ipad.byte +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2825) crypto_ahash_blocksize(tfm) / 2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2826) keylen - crypto_ahash_blocksize(tfm) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2827) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2828) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2829) * Copy the key to our ipad & opad buffers
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2830) * Note that ipad and opad each contain one half of the key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2831) * to match the existing HMAC driver infrastructure.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2832) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2833) if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2834) memcpy(&ctx->base.ipad, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2835) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2836) memcpy(&ctx->base.ipad, key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2837) crypto_ahash_blocksize(tfm) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2838) memcpy(&ctx->base.opad,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2839) key + crypto_ahash_blocksize(tfm) / 2,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2840) keylen - crypto_ahash_blocksize(tfm) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2841) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2842) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2843)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2844) /* Pad key with zeroes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2845) if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2846) memset(ctx->base.ipad.byte + keylen, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2847) crypto_ahash_blocksize(tfm) / 2 - keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2848) memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2849) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2850) memset(ctx->base.opad.byte + keylen -
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2851) crypto_ahash_blocksize(tfm) / 2, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2852) crypto_ahash_blocksize(tfm) - keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2853) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2854)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2855) /* If doing fallback, still need to set the new key! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2856) ctx->fb_do_setkey = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2857) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2858) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2859)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2860) static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2861) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2862) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2863) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2864) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2865)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2866) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2867)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2868) /* Copy (half of) the key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2869) memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2870) /* Start of HMAC should have len == processed == blocksize */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2871) req->len = SHA3_224_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2872) req->processed = SHA3_224_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2873) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2874) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2875) req->state_sz = SHA3_224_BLOCK_SIZE / 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2876) req->digest_sz = SHA3_224_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2877) req->block_sz = SHA3_224_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2878) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2879) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2880) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2881) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2882) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2883)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2884) static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2885) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2886) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2887) return safexcel_hmac_sha3_224_init(req) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2888) safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2889)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2890) /* HW cannot do zero length HMAC, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2891) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2892) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2893)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2894) static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2895) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2896) return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2897) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2898)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2899) struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2900) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2901) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2902) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2903) .init = safexcel_hmac_sha3_224_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2904) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2905) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2906) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2907) .digest = safexcel_hmac_sha3_224_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2908) .setkey = safexcel_hmac_sha3_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2909) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2910) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2911) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2912) .digestsize = SHA3_224_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2913) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2914) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2915) .cra_name = "hmac(sha3-224)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2916) .cra_driver_name = "safexcel-hmac-sha3-224",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2917) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2918) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2919) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2920) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2921) .cra_blocksize = SHA3_224_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2922) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2923) .cra_init = safexcel_hmac_sha3_224_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2924) .cra_exit = safexcel_hmac_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2925) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2926) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2927) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2928) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2929) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2930)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2931) static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2932) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2933) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2934) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2935) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2936)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2937) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2938)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2939) /* Copy (half of) the key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2940) memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2941) /* Start of HMAC should have len == processed == blocksize */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2942) req->len = SHA3_256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2943) req->processed = SHA3_256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2944) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2945) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2946) req->state_sz = SHA3_256_BLOCK_SIZE / 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2947) req->digest_sz = SHA3_256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2948) req->block_sz = SHA3_256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2949) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2950) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2951) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2952) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2953) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2954)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2955) static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2956) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2957) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2958) return safexcel_hmac_sha3_256_init(req) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2959) safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2960)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2961) /* HW cannot do zero length HMAC, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2962) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2963) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2964)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2965) static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2966) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2967) return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2968) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2969)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2970) struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2971) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2972) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2973) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2974) .init = safexcel_hmac_sha3_256_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2975) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2976) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2977) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2978) .digest = safexcel_hmac_sha3_256_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2979) .setkey = safexcel_hmac_sha3_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2980) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2981) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2982) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2983) .digestsize = SHA3_256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2984) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2985) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2986) .cra_name = "hmac(sha3-256)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2987) .cra_driver_name = "safexcel-hmac-sha3-256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2988) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2989) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2990) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2991) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2992) .cra_blocksize = SHA3_256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2993) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2994) .cra_init = safexcel_hmac_sha3_256_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2995) .cra_exit = safexcel_hmac_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2996) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2997) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2998) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2999) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3000) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3001)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3002) static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3003) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3004) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3005) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3006) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3007)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3008) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3009)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3010) /* Copy (half of) the key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3011) memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3012) /* Start of HMAC should have len == processed == blocksize */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3013) req->len = SHA3_384_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3014) req->processed = SHA3_384_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3015) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3016) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3017) req->state_sz = SHA3_384_BLOCK_SIZE / 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3018) req->digest_sz = SHA3_384_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3019) req->block_sz = SHA3_384_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3020) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3021) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3022) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3023) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3024) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3025)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3026) static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3027) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3028) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3029) return safexcel_hmac_sha3_384_init(req) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3030) safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3031)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3032) /* HW cannot do zero length HMAC, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3033) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3034) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3035)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3036) static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3037) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3038) return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3039) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3040)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3041) struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3042) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3043) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3044) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3045) .init = safexcel_hmac_sha3_384_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3046) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3047) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3048) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3049) .digest = safexcel_hmac_sha3_384_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3050) .setkey = safexcel_hmac_sha3_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3051) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3052) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3053) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3054) .digestsize = SHA3_384_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3055) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3056) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3057) .cra_name = "hmac(sha3-384)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3058) .cra_driver_name = "safexcel-hmac-sha3-384",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3059) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3060) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3061) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3062) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3063) .cra_blocksize = SHA3_384_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3064) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3065) .cra_init = safexcel_hmac_sha3_384_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3066) .cra_exit = safexcel_hmac_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3067) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3068) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3069) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3070) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3071) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3072)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3073) static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3074) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3075) struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3076) struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3077) struct safexcel_ahash_req *req = ahash_request_ctx(areq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3078)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3079) memset(req, 0, sizeof(*req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3080)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3081) /* Copy (half of) the key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3082) memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3083) /* Start of HMAC should have len == processed == blocksize */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3084) req->len = SHA3_512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3085) req->processed = SHA3_512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3086) ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3087) req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3088) req->state_sz = SHA3_512_BLOCK_SIZE / 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3089) req->digest_sz = SHA3_512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3090) req->block_sz = SHA3_512_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3091) req->hmac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3092) ctx->do_fallback = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3093) ctx->fb_init_done = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3094) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3095) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3096)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3097) static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3098) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3099) if (req->nbytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3100) return safexcel_hmac_sha3_512_init(req) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3101) safexcel_ahash_finup(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3102)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3103) /* HW cannot do zero length HMAC, use fallback instead */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3104) return safexcel_sha3_digest_fallback(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3105) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3106)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3107) static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3108) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3109) return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3110) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3111) struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3112) .type = SAFEXCEL_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3113) .algo_mask = SAFEXCEL_ALG_SHA3,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3114) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3115) .init = safexcel_hmac_sha3_512_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3116) .update = safexcel_sha3_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3117) .final = safexcel_sha3_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3118) .finup = safexcel_sha3_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3119) .digest = safexcel_hmac_sha3_512_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3120) .setkey = safexcel_hmac_sha3_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3121) .export = safexcel_sha3_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3122) .import = safexcel_sha3_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3123) .halg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3124) .digestsize = SHA3_512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3125) .statesize = sizeof(struct safexcel_ahash_export_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3126) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3127) .cra_name = "hmac(sha3-512)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3128) .cra_driver_name = "safexcel-hmac-sha3-512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3129) .cra_priority = SAFEXCEL_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3130) .cra_flags = CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3131) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3132) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3133) .cra_blocksize = SHA3_512_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3134) .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3135) .cra_init = safexcel_hmac_sha3_512_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3136) .cra_exit = safexcel_hmac_sha3_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3137) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3138) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3139) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3140) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3141) };