^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * K3 SA2UL crypto accelerator driver
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Authors: Keerthy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Vitaly Andrianov
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Tero Kristo
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/clk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/dmaengine.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/dmapool.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/of_device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/platform_device.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/pm_runtime.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <crypto/aes.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <crypto/authenc.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <crypto/des.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <crypto/internal/aead.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #include "sa2ul.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) /* Byte offset for key in encryption security context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #define SC_ENC_KEY_OFFSET (1 + 27 + 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) /* Byte offset for Aux-1 in encryption security context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define SA_CMDL_UPD_ENC 0x0001
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #define SA_CMDL_UPD_AUTH 0x0002
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define SA_CMDL_UPD_ENC_IV 0x0004
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #define SA_CMDL_UPD_AUTH_IV 0x0008
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #define SA_CMDL_UPD_AUX_KEY 0x0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #define SA_AUTH_SUBKEY_LEN 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #define SA_CMDL_PAYLOAD_LENGTH_MASK 0xFFFF
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) #define SA_CMDL_SOP_BYPASS_LEN_MASK 0xFF000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) #define MODE_CONTROL_BYTES 27
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define SA_HASH_PROCESSING 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #define SA_CRYPTO_PROCESSING 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) #define SA_UPLOAD_HASH_TO_TLR BIT(6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #define SA_SW0_FLAGS_MASK 0xF0000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) #define SA_SW0_CMDL_INFO_MASK 0x1F00000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #define SA_SW0_CMDL_PRESENT BIT(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) #define SA_SW0_ENG_ID_MASK 0x3E000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #define SA_SW0_DEST_INFO_PRESENT BIT(30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) #define SA_SW2_EGRESS_LENGTH 0xFF000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) #define SA_BASIC_HASH 0x10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #define SHA256_DIGEST_WORDS 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) /* Make 32-bit word from 4 bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) #define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) ((b2) << 8) | (b3))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) /* size of SCCTL structure in bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) #define SA_SCCTL_SZ 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) /* Max Authentication tag size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) #define SA_MAX_AUTH_TAG_SZ 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) #define PRIV_ID 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #define PRIV 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) static struct device *sa_k3_dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) * struct sa_cmdl_cfg - Command label configuration descriptor
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * @aalg: authentication algorithm ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * @enc_eng_id: Encryption Engine ID supported by the SA hardware
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) * @auth_eng_id: Authentication Engine ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * @iv_size: Initialization Vector size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * @akey: Authentication key
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * @akey_len: Authentication key length
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) * @enc: True, if this is an encode request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) struct sa_cmdl_cfg {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) int aalg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) u8 enc_eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) u8 auth_eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) u8 iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) const u8 *akey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) u16 akey_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) bool enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) * struct algo_data - Crypto algorithm specific data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) * @enc_eng: Encryption engine info structure
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) * @auth_eng: Authentication engine info structure
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) * @auth_ctrl: Authentication control word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) * @hash_size: Size of digest
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) * @iv_idx: iv index in psdata
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) * @iv_out_size: iv out size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) * @ealg_id: Encryption Algorithm ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) * @aalg_id: Authentication algorithm ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) * @mci_enc: Mode Control Instruction for Encryption algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) * @mci_dec: Mode Control Instruction for Decryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) * @inv_key: Whether the encryption algorithm demands key inversion
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) * @ctx: Pointer to the algorithm context
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) * @keyed_mac: Whether the authentication algorithm has key
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) * @prep_iopad: Function pointer to generate intermediate ipad/opad
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) struct algo_data {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) struct sa_eng_info enc_eng;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) struct sa_eng_info auth_eng;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) u8 auth_ctrl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) u8 hash_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) u8 iv_idx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) u8 iv_out_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) u8 ealg_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) u8 aalg_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) u8 *mci_enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) u8 *mci_dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) bool inv_key;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) struct sa_tfm_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) bool keyed_mac;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) void (*prep_iopad)(struct algo_data *algo, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) u16 key_sz, __be32 *ipad, __be32 *opad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) * @type: Type of the crypto algorithm.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) * @alg: Union of crypto algorithm definitions.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) * @registered: Flag indicating if the crypto algorithm is already registered
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) struct sa_alg_tmpl {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) u32 type; /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) union {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) struct skcipher_alg skcipher;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) struct ahash_alg ahash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) struct aead_alg aead;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) } alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) bool registered;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) * struct sa_mapped_sg: scatterlist information for tx and rx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) * @mapped: Set to true if the @sgt is mapped
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) * @dir: mapping direction used for @sgt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) * @split_sg: Set if the sg is split and needs to be freed up
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) * @static_sg: Static scatterlist entry for overriding data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) * @sgt: scatterlist table for DMA API use
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) struct sa_mapped_sg {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) bool mapped;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) enum dma_data_direction dir;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) struct scatterlist static_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) struct scatterlist *split_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) struct sg_table sgt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) * struct sa_rx_data: RX Packet miscellaneous data place holder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) * @req: crypto request data pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) * @ddev: pointer to the DMA device
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) * @tx_in: dma_async_tx_descriptor pointer for rx channel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) * @enc: Flag indicating either encryption or decryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) * @enc_iv_size: Initialisation vector size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) * @iv_idx: Initialisation vector index
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) struct sa_rx_data {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) void *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) struct device *ddev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) struct dma_async_tx_descriptor *tx_in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) struct sa_mapped_sg mapped_sg[2];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) u8 enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) u8 enc_iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) u8 iv_idx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) * struct sa_req: SA request definition
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) * @dev: device for the request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) * @size: total data to the xmitted via DMA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) * @enc_offset: offset of cipher data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) * @enc_size: data to be passed to cipher engine
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) * @enc_iv: cipher IV
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) * @auth_offset: offset of the authentication data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) * @auth_size: size of the authentication data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) * @auth_iv: authentication IV
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) * @type: algorithm type for the request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) * @cmdl: command label pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) * @base: pointer to the base request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) * @ctx: pointer to the algorithm context data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) * @enc: true if this is an encode request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) * @src: source data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) * @dst: destination data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) * @callback: DMA callback for the request
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) * @mdata_size: metadata size passed to DMA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) struct sa_req {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) struct device *dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) u16 size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) u8 enc_offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) u16 enc_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) u8 *enc_iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) u8 auth_offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) u16 auth_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) u8 *auth_iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) u32 *cmdl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) struct crypto_async_request *base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) struct sa_tfm_ctx *ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) bool enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) struct scatterlist *src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) struct scatterlist *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) dma_async_tx_callback callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) u16 mdata_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) * For CBC (Cipher Block Chaining) mode for encryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) static u8 mci_cbc_enc_array[3][MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) { 0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) { 0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) { 0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) * For CBC (Cipher Block Chaining) mode for decryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) static u8 mci_cbc_dec_array[3][MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) { 0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) { 0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) { 0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) * For CBC (Cipher Block Chaining) mode for encryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) static u8 mci_cbc_enc_no_iv_array[3][MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) { 0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) { 0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) { 0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) * For CBC (Cipher Block Chaining) mode for decryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) static u8 mci_cbc_dec_no_iv_array[3][MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) { 0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) { 0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) { 0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) * For ECB (Electronic Code Book) mode for encryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) static u8 mci_ecb_enc_array[3][27] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) { 0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) { 0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) { 0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) * Mode Control Instructions for various Key lengths 128, 192, 256
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) * For ECB (Electronic Code Book) mode for decryption
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) static u8 mci_ecb_dec_array[3][27] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) { 0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) { 0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) { 0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) * Mode Control Instructions for DES algorithm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) * For CBC (Cipher Block Chaining) mode and ECB mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) * encryption and for decryption respectively
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) static u8 mci_cbc_3des_enc_array[MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) static u8 mci_cbc_3des_dec_array[MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) static u8 mci_ecb_3des_enc_array[MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) static u8 mci_ecb_3des_dec_array[MODE_CONTROL_BYTES] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 0x00, 0x00, 0x00,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) * Perform 16 byte or 128 bit swizzling
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) * The SA2UL Expects the security context to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) * be in little Endian and the bus width is 128 bits or 16 bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) * Hence swap 16 bytes at a time from higher to lower address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) static void sa_swiz_128(u8 *in, u16 len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) u8 data[16];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) int i, j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) for (i = 0; i < len; i += 16) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) memcpy(data, &in[i], 16);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) for (j = 0; j < 16; j++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) in[i + j] = data[15 - j];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) /* Prepare the ipad and opad from key as per SHA algorithm step 1*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) static void prepare_kiopad(u8 *k_ipad, u8 *k_opad, const u8 *key, u16 key_sz)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) for (i = 0; i < key_sz; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) k_ipad[i] = key[i] ^ 0x36;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) k_opad[i] = key[i] ^ 0x5c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) /* Instead of XOR with 0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) for (; i < SHA1_BLOCK_SIZE; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) k_ipad[i] = 0x36;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) k_opad[i] = 0x5c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) static void sa_export_shash(struct shash_desc *hash, int block_size,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) int digest_size, __be32 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) union {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) struct sha1_state sha1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) struct sha256_state sha256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) struct sha512_state sha512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) } sha;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) void *state;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) u32 *result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) switch (digest_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) case SHA1_DIGEST_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) state = &sha.sha1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) result = sha.sha1.state;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) case SHA256_DIGEST_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) state = &sha.sha256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) result = sha.sha256.state;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) dev_err(sa_k3_dev, "%s: bad digest_size=%d\n", __func__,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) digest_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) return;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) crypto_shash_export(hash, state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) for (i = 0; i < digest_size >> 2; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) out[i] = cpu_to_be32(result[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) static void sa_prepare_iopads(struct algo_data *data, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) u16 key_sz, __be32 *ipad, __be32 *opad)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) SHASH_DESC_ON_STACK(shash, data->ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) int block_size = crypto_shash_blocksize(data->ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) int digest_size = crypto_shash_digestsize(data->ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) u8 k_ipad[SHA1_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) u8 k_opad[SHA1_BLOCK_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) shash->tfm = data->ctx->shash;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) prepare_kiopad(k_ipad, k_opad, key, key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) memzero_explicit(ipad, block_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) memzero_explicit(opad, block_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) crypto_shash_init(shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) crypto_shash_update(shash, k_ipad, block_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) sa_export_shash(shash, block_size, digest_size, ipad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) crypto_shash_init(shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) crypto_shash_update(shash, k_opad, block_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) sa_export_shash(shash, block_size, digest_size, opad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) /* Derive the inverse key used in AES-CBC decryption operation */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) static inline int sa_aes_inv_key(u8 *inv_key, const u8 *key, u16 key_sz)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) struct crypto_aes_ctx ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) int key_pos;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) if (aes_expandkey(&ctx, key, key_sz)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) if (key_sz == AES_KEYSIZE_192) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) /* Based crypto_aes_expand_key logic */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) switch (key_sz) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) case AES_KEYSIZE_128:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) case AES_KEYSIZE_192:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) key_pos = key_sz + 24;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) case AES_KEYSIZE_256:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) key_pos = key_sz + 24 - 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) memcpy(inv_key, &ctx.key_enc[key_pos], key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) /* Set Security context for the encryption engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) static int sa_set_sc_enc(struct algo_data *ad, const u8 *key, u16 key_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) u8 enc, u8 *sc_buf)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) const u8 *mci = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) /* Set Encryption mode selector to crypto processing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) sc_buf[0] = SA_CRYPTO_PROCESSING;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) if (enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) mci = ad->mci_enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) mci = ad->mci_dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) /* Set the mode control instructions in security context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) if (mci)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) memcpy(&sc_buf[1], mci, MODE_CONTROL_BYTES);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) /* For AES-CBC decryption get the inverse key */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) if (ad->inv_key && !enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) if (sa_aes_inv_key(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) /* For all other cases: key is used */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) memcpy(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) /* Set Security context for the authentication engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) static void sa_set_sc_auth(struct algo_data *ad, const u8 *key, u16 key_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) u8 *sc_buf)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) __be32 ipad[64], opad[64];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) /* Set Authentication mode selector to hash processing */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) sc_buf[0] = SA_HASH_PROCESSING;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) sc_buf[1] = SA_UPLOAD_HASH_TO_TLR;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) sc_buf[1] |= ad->auth_ctrl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) /* Copy the keys or ipad/opad */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) if (ad->keyed_mac) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) ad->prep_iopad(ad, key, key_sz, ipad, opad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) /* Copy ipad to AuthKey */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) memcpy(&sc_buf[32], ipad, ad->hash_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) /* Copy opad to Aux-1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) memcpy(&sc_buf[64], opad, ad->hash_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) /* basic hash */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) sc_buf[1] |= SA_BASIC_HASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) static inline void sa_copy_iv(__be32 *out, const u8 *iv, bool size16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) int j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) for (j = 0; j < ((size16) ? 4 : 2); j++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) *out = cpu_to_be32(*((u32 *)iv));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) iv += 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) out++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) /* Format general command label */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) static int sa_format_cmdl_gen(struct sa_cmdl_cfg *cfg, u8 *cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) struct sa_cmdl_upd_info *upd_info)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) u8 enc_offset = 0, auth_offset = 0, total = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) u8 enc_next_eng = SA_ENG_ID_OUTPORT2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) u8 auth_next_eng = SA_ENG_ID_OUTPORT2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) u32 *word_ptr = (u32 *)cmdl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) /* Clear the command label */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) memzero_explicit(cmdl, (SA_MAX_CMDL_WORDS * sizeof(u32)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) /* Iniialize the command update structure */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) memzero_explicit(upd_info, sizeof(*upd_info));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) if (cfg->enc_eng_id && cfg->auth_eng_id) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) if (cfg->enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) auth_offset = SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) enc_next_eng = cfg->auth_eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) if (cfg->iv_size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) auth_offset += cfg->iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) enc_offset = SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) auth_next_eng = cfg->enc_eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) if (cfg->enc_eng_id) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) upd_info->flags |= SA_CMDL_UPD_ENC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) upd_info->enc_size.index = enc_offset >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) upd_info->enc_offset.index = upd_info->enc_size.index + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) /* Encryption command label */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) cmdl[enc_offset + SA_CMDL_OFFSET_NESC] = enc_next_eng;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) /* Encryption modes requiring IV */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) if (cfg->iv_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) upd_info->flags |= SA_CMDL_UPD_ENC_IV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) upd_info->enc_iv.index =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) (enc_offset + SA_CMDL_HEADER_SIZE_BYTES) >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) upd_info->enc_iv.size = cfg->iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) cmdl[enc_offset + SA_CMDL_OFFSET_OPTION_CTRL1] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) total += SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) if (cfg->auth_eng_id) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) upd_info->flags |= SA_CMDL_UPD_AUTH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) upd_info->auth_size.index = auth_offset >> 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) upd_info->auth_offset.index = upd_info->auth_size.index + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) cmdl[auth_offset + SA_CMDL_OFFSET_NESC] = auth_next_eng;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) cmdl[auth_offset + SA_CMDL_OFFSET_LABEL_LEN] =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) total += SA_CMDL_HEADER_SIZE_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) total = roundup(total, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) for (i = 0; i < total / 4; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) word_ptr[i] = swab32(word_ptr[i]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) return total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) /* Update Command label */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) static inline void sa_update_cmdl(struct sa_req *req, u32 *cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) struct sa_cmdl_upd_info *upd_info)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) int i = 0, j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) cmdl[upd_info->enc_size.index] |= req->enc_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) cmdl[upd_info->enc_offset.index] &=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) ~SA_CMDL_SOP_BYPASS_LEN_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) cmdl[upd_info->enc_offset.index] |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) ((u32)req->enc_offset <<
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) u32 *enc_iv = (u32 *)req->enc_iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) data[j] = cpu_to_be32(*enc_iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) enc_iv++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) cmdl[upd_info->auth_size.index] |= req->auth_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) cmdl[upd_info->auth_offset.index] &=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) ~SA_CMDL_SOP_BYPASS_LEN_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) cmdl[upd_info->auth_offset.index] |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) ((u32)req->auth_offset <<
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) req->auth_iv,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) (upd_info->auth_iv.size > 8));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) int offset = (req->auth_size & 0xF) ? 4 : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) memcpy(&cmdl[upd_info->aux_key_info.index],
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) &upd_info->aux_key[offset], 16);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) /* Format SWINFO words to be sent to SA */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) static
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) void sa_set_swinfo(u8 eng_id, u16 sc_id, dma_addr_t sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) u8 cmdl_present, u8 cmdl_offset, u8 flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) u8 hash_size, u32 *swinfo)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) swinfo[0] = sc_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) swinfo[0] |= (flags << __ffs(SA_SW0_FLAGS_MASK));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) if (likely(cmdl_present))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) swinfo[0] |= ((cmdl_offset | SA_SW0_CMDL_PRESENT) <<
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) __ffs(SA_SW0_CMDL_INFO_MASK));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) swinfo[0] |= (eng_id << __ffs(SA_SW0_ENG_ID_MASK));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) swinfo[0] |= SA_SW0_DEST_INFO_PRESENT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678) swinfo[1] = (u32)(sc_phys & 0xFFFFFFFFULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) swinfo[2] = (u32)((sc_phys & 0xFFFFFFFF00000000ULL) >> 32);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) swinfo[2] |= (hash_size << __ffs(SA_SW2_EGRESS_LENGTH));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) /* Dump the security context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) static void sa_dump_sc(u8 *buf, dma_addr_t dma_addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) #ifdef DEBUG
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) dev_info(sa_k3_dev, "Security context dump:: 0x%pad\n", &dma_addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) 16, 1, buf, SA_CTX_MAX_SZ, false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) static
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) int sa_init_sc(struct sa_ctx_info *ctx, const u8 *enc_key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) u16 enc_key_sz, const u8 *auth_key, u16 auth_key_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) struct algo_data *ad, u8 enc, u32 *swinfo)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) int enc_sc_offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699) int auth_sc_offset = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) u8 *sc_buf = ctx->sc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) u16 sc_id = ctx->sc_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) u8 first_engine = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) memzero_explicit(sc_buf, SA_CTX_MAX_SZ);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706) if (ad->auth_eng.eng_id) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) if (enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) first_engine = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) first_engine = ad->auth_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713) auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) sc_buf[1] = SA_SCCTL_FE_AUTH_ENC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) if (!ad->hash_size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) ad->hash_size = roundup(ad->hash_size, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720) enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) first_engine = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) sc_buf[1] = SA_SCCTL_FE_ENC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) ad->hash_size = ad->iv_out_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) /* SCCTL Owner info: 0=host, 1=CP_ACE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727) sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) memcpy(&sc_buf[2], &sc_id, 2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) sc_buf[4] = 0x0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) sc_buf[5] = PRIV_ID;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731) sc_buf[6] = PRIV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) sc_buf[7] = 0x0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734) /* Prepare context for encryption engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) if (ad->enc_eng.sc_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) if (sa_set_sc_enc(ad, enc_key, enc_key_sz, enc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) &sc_buf[enc_sc_offset]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741) /* Prepare context for authentication engine */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) if (ad->auth_eng.sc_size)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) sa_set_sc_auth(ad, auth_key, auth_key_sz,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) &sc_buf[auth_sc_offset]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) /* Set the ownership of context to CP_ACE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0x80;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) /* swizzle the security context */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) sa_swiz_128(sc_buf, SA_CTX_MAX_SZ);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752) sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755) sa_dump_sc(sc_buf, ctx->sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) /* Free the per direction context memory */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) static void sa_free_ctx_info(struct sa_ctx_info *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762) struct sa_crypto_data *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) unsigned long bn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766) bn = ctx->sc_id - data->sc_id_start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) spin_lock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) __clear_bit(bn, data->ctx_bm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) data->sc_id--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) spin_unlock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) if (ctx->sc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773) dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) ctx->sc = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778) static int sa_init_ctx_info(struct sa_ctx_info *ctx,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) struct sa_crypto_data *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) unsigned long bn;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) spin_lock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786) __set_bit(bn, data->ctx_bm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) data->sc_id++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788) spin_unlock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790) ctx->sc_id = (u16)(data->sc_id_start + bn);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 792) ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 793) if (!ctx->sc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 794) dev_err(&data->pdev->dev, "Failed to allocate SC memory\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 795) err = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 796) goto scid_rollback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 797) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 798)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 799) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 800)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 801) scid_rollback:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 802) spin_lock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 803) __clear_bit(bn, data->ctx_bm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 804) data->sc_id--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 805) spin_unlock(&data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 806)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 807) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 808) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 809)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 810) static void sa_cipher_cra_exit(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 811) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 812) struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 813) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 814)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 815) dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 816) __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 817) ctx->dec.sc_id, &ctx->dec.sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 818)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 819) sa_free_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 820) sa_free_ctx_info(&ctx->dec, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 821)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 822) crypto_free_sync_skcipher(ctx->fallback.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 823) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 824)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 825) static int sa_cipher_cra_init(struct crypto_skcipher *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 826) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 827) struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 828) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 829) const char *name = crypto_tfm_alg_name(&tfm->base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 830) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 831)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 832) memzero_explicit(ctx, sizeof(*ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 833) ctx->dev_data = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 834)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 835) ret = sa_init_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 836) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 837) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 838) ret = sa_init_ctx_info(&ctx->dec, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 839) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 840) sa_free_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 841) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 842) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 843)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 844) ctx->fallback.skcipher =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 845) crypto_alloc_sync_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 846)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 847) if (IS_ERR(ctx->fallback.skcipher)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 848) dev_err(sa_k3_dev, "Error allocating fallback algo %s\n", name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 849) return PTR_ERR(ctx->fallback.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 850) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 851)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 852) dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 853) __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 854) ctx->dec.sc_id, &ctx->dec.sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 855) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 856) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 857)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 858) static int sa_cipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 859) unsigned int keylen, struct algo_data *ad)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 860) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 861) struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 862) int cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 863) struct sa_cmdl_cfg cfg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 864) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 865)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 866) if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 867) keylen != AES_KEYSIZE_256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 868) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 869)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 870) ad->enc_eng.eng_id = SA_ENG_ID_EM1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 871) ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 872)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 873) memzero_explicit(&cfg, sizeof(cfg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 874) cfg.enc_eng_id = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 875) cfg.iv_size = crypto_skcipher_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 876)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 877) crypto_sync_skcipher_clear_flags(ctx->fallback.skcipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 878) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 879) crypto_sync_skcipher_set_flags(ctx->fallback.skcipher,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 880) tfm->base.crt_flags &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 881) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 882) ret = crypto_sync_skcipher_setkey(ctx->fallback.skcipher, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 883) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 884) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 885)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 886) /* Setup Encryption Security Context & Command label template */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 887) if (sa_init_sc(&ctx->enc, key, keylen, NULL, 0, ad, 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 888) &ctx->enc.epib[1]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 889) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 890)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 891) cmdl_len = sa_format_cmdl_gen(&cfg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 892) (u8 *)ctx->enc.cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 893) &ctx->enc.cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 894) if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 895) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 896)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 897) ctx->enc.cmdl_size = cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 898)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 899) /* Setup Decryption Security Context & Command label template */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 900) if (sa_init_sc(&ctx->dec, key, keylen, NULL, 0, ad, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 901) &ctx->dec.epib[1]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 902) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 903)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 904) cfg.enc_eng_id = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 905) cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 906) &ctx->dec.cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 907)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 908) if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 909) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 910)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 911) ctx->dec.cmdl_size = cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 912) ctx->iv_idx = ad->iv_idx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 913)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 914) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 915)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 916) badkey:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 917) dev_err(sa_k3_dev, "%s: badkey\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 918) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 919) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 920)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 921) static int sa_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 922) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 923) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 924) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 925) /* Convert the key size (16/24/32) to the key size index (0/1/2) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 926) int key_idx = (keylen >> 3) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 927)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 928) if (key_idx >= 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 929) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 930)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 931) ad.mci_enc = mci_cbc_enc_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 932) ad.mci_dec = mci_cbc_dec_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 933) ad.inv_key = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 934) ad.ealg_id = SA_EALG_ID_AES_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 935) ad.iv_idx = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 936) ad.iv_out_size = 16;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 937)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 938) return sa_cipher_setkey(tfm, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 939) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 940)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 941) static int sa_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 942) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 943) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 944) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 945) /* Convert the key size (16/24/32) to the key size index (0/1/2) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 946) int key_idx = (keylen >> 3) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 947)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 948) if (key_idx >= 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 949) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 950)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 951) ad.mci_enc = mci_ecb_enc_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 952) ad.mci_dec = mci_ecb_dec_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 953) ad.inv_key = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 954) ad.ealg_id = SA_EALG_ID_AES_ECB;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 955)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 956) return sa_cipher_setkey(tfm, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 957) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 958)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 959) static int sa_3des_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 960) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 961) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 962) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 963)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 964) ad.mci_enc = mci_cbc_3des_enc_array;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 965) ad.mci_dec = mci_cbc_3des_dec_array;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 966) ad.ealg_id = SA_EALG_ID_3DES_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 967) ad.iv_idx = 6;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 968) ad.iv_out_size = 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 969)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 970) return sa_cipher_setkey(tfm, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 971) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 972)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 973) static int sa_3des_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 974) unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 975) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 976) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 977)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 978) ad.mci_enc = mci_ecb_3des_enc_array;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 979) ad.mci_dec = mci_ecb_3des_dec_array;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 980)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 981) return sa_cipher_setkey(tfm, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 982) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 983)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 984) static void sa_sync_from_device(struct sa_rx_data *rxd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 985) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 986) struct sg_table *sgt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 987)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 988) if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 989) sgt = &rxd->mapped_sg[0].sgt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 990) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 991) sgt = &rxd->mapped_sg[1].sgt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 992)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 993) dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 994) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 995)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 996) static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 997) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 998) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 999)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1000) for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1001) struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1002)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1003) if (mapped_sg->mapped) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1004) dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1005) mapped_sg->dir, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1006) kfree(mapped_sg->split_sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1007) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1008) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1009)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1010) kfree(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1011) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1012)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1013) static void sa_aes_dma_in_callback(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1014) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1015) struct sa_rx_data *rxd = (struct sa_rx_data *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1016) struct skcipher_request *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1017) u32 *result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1018) __be32 *mdptr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1019) size_t ml, pl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1020) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1021)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1022) sa_sync_from_device(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1023) req = container_of(rxd->req, struct skcipher_request, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1024)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1025) if (req->iv) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1026) mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1027) &ml);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1028) result = (u32 *)req->iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1029)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1030) for (i = 0; i < (rxd->enc_iv_size / 4); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1031) result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1032) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1033)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1034) sa_free_sa_rx_data(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1035)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1036) skcipher_request_complete(req, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1037) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1038)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1039) static void
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1040) sa_prepare_tx_desc(u32 *mdptr, u32 pslen, u32 *psdata, u32 epiblen, u32 *epib)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1041) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1042) u32 *out, *in;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1043) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1044)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1045) for (out = mdptr, in = epib, i = 0; i < epiblen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1046) *out++ = *in++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1047)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1048) mdptr[4] = (0xFFFF << 16);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1049) for (out = &mdptr[5], in = psdata, i = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1050) i < pslen / sizeof(u32); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1051) *out++ = *in++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1052) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1053)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1054) static int sa_run(struct sa_req *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1055) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1056) struct sa_rx_data *rxd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1057) gfp_t gfp_flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1058) u32 cmdl[SA_MAX_CMDL_WORDS];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1059) struct sa_crypto_data *pdata = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1060) struct device *ddev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1061) struct dma_chan *dma_rx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1062) int sg_nents, src_nents, dst_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1063) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1064) size_t pl, ml, split_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1065) struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1066) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1067) struct dma_async_tx_descriptor *tx_out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1068) u32 *mdptr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1069) bool diff_dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1070) enum dma_data_direction dir_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1071) struct sa_mapped_sg *mapped_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1072)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1073) gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1074) GFP_KERNEL : GFP_ATOMIC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1075)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1076) rxd = kzalloc(sizeof(*rxd), gfp_flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1077) if (!rxd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1078) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1079)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1080) if (req->src != req->dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1081) diff_dst = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1082) dir_src = DMA_TO_DEVICE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1083) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1084) diff_dst = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1085) dir_src = DMA_BIDIRECTIONAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1086) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1087)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1088) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1089) * SA2UL has an interesting feature where the receive DMA channel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1090) * is selected based on the data passed to the engine. Within the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1091) * transition range, there is also a space where it is impossible
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1092) * to determine where the data will end up, and this should be
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1093) * avoided. This will be handled by the SW fallback mechanism by
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1094) * the individual algorithm implementations.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1095) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1096) if (req->size >= 256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1097) dma_rx = pdata->dma_rx2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1098) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1099) dma_rx = pdata->dma_rx1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1101) ddev = dma_rx->device->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1102) rxd->ddev = ddev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1104) memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1106) sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1108) if (req->type != CRYPTO_ALG_TYPE_AHASH) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1109) if (req->enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1110) req->type |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1111) (SA_REQ_SUBTYPE_ENC << SA_REQ_SUBTYPE_SHIFT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1112) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1113) req->type |=
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1114) (SA_REQ_SUBTYPE_DEC << SA_REQ_SUBTYPE_SHIFT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1115) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1116)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1117) cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1119) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1120) * Map the packets, first we check if the data fits into a single
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1121) * sg entry and use that if possible. If it does not fit, we check
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1122) * if we need to do sg_split to align the scatterlist data on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1123) * actual data size being processed by the crypto engine.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1124) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1125) src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1126) sg_nents = sg_nents_for_len(src, req->size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1128) split_size = req->size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1129)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1130) mapped_sg = &rxd->mapped_sg[0];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1131) if (sg_nents == 1 && split_size <= req->src->length) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1132) src = &mapped_sg->static_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1133) src_nents = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1134) sg_init_table(src, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1135) sg_set_page(src, sg_page(req->src), split_size,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1136) req->src->offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1137)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1138) mapped_sg->sgt.sgl = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1139) mapped_sg->sgt.orig_nents = src_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1140) ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1141) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1142) kfree(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1143) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1144) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1145)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1146) mapped_sg->dir = dir_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1147) mapped_sg->mapped = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1148) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1149) mapped_sg->sgt.sgl = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1150) mapped_sg->sgt.orig_nents = sg_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1151) ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1152) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1153) kfree(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1154) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1155) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1156)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1157) mapped_sg->dir = dir_src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1158) mapped_sg->mapped = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1159)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1160) ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1161) &split_size, &src, &src_nents, gfp_flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1162) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1163) src_nents = mapped_sg->sgt.nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1164) src = mapped_sg->sgt.sgl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1165) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1166) mapped_sg->split_sg = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1167) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1168) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1170) dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1171)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1172) if (!diff_dst) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1173) dst_nents = src_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1174) dst = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1175) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1176) dst_nents = sg_nents_for_len(req->dst, req->size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1177) mapped_sg = &rxd->mapped_sg[1];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1178)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1179) if (dst_nents == 1 && split_size <= req->dst->length) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1180) dst = &mapped_sg->static_sg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1181) dst_nents = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1182) sg_init_table(dst, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1183) sg_set_page(dst, sg_page(req->dst), split_size,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1184) req->dst->offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1186) mapped_sg->sgt.sgl = dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1187) mapped_sg->sgt.orig_nents = dst_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1188) ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1189) DMA_FROM_DEVICE, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1190) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1191) goto err_cleanup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1193) mapped_sg->dir = DMA_FROM_DEVICE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1194) mapped_sg->mapped = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1195) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1196) mapped_sg->sgt.sgl = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1197) mapped_sg->sgt.orig_nents = dst_nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1198) ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1199) DMA_FROM_DEVICE, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1200) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1201) goto err_cleanup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1202)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1203) mapped_sg->dir = DMA_FROM_DEVICE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1204) mapped_sg->mapped = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1206) ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1207) 0, 1, &split_size, &dst, &dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1208) gfp_flags);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1209) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1210) dst_nents = mapped_sg->sgt.nents;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1211) dst = mapped_sg->sgt.sgl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1212) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1213) mapped_sg->split_sg = dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1214) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1215) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1216) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1218) rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1219) DMA_DEV_TO_MEM,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1220) DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1221) if (!rxd->tx_in) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1222) dev_err(pdata->dev, "IN prep_slave_sg() failed\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1223) ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1224) goto err_cleanup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1225) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1226)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1227) rxd->req = (void *)req->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1228) rxd->enc = req->enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1229) rxd->iv_idx = req->ctx->iv_idx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1230) rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1231) rxd->tx_in->callback = req->callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1232) rxd->tx_in->callback_param = rxd;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1233)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1234) tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1235) src_nents, DMA_MEM_TO_DEV,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1236) DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1237)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1238) if (!tx_out) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1239) dev_err(pdata->dev, "OUT prep_slave_sg() failed\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1240) ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1241) goto err_cleanup;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1242) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1243)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1244) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1245) * Prepare metadata for DMA engine. This essentially describes the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1246) * crypto algorithm to be used, data sizes, different keys etc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1247) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1248) mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(tx_out, &pl, &ml);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1250) sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1251) sizeof(u32))), cmdl, sizeof(sa_ctx->epib),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1252) sa_ctx->epib);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1253)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1254) ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1255) dmaengine_desc_set_metadata_len(tx_out, req->mdata_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1256)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1257) dmaengine_submit(tx_out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1258) dmaengine_submit(rxd->tx_in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1259)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1260) dma_async_issue_pending(dma_rx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1261) dma_async_issue_pending(pdata->dma_tx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1262)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1263) return -EINPROGRESS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1264)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1265) err_cleanup:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1266) sa_free_sa_rx_data(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1267)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1268) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1269) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1271) static int sa_cipher_run(struct skcipher_request *req, u8 *iv, int enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1272) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1273) struct sa_tfm_ctx *ctx =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1274) crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1275) struct crypto_alg *alg = req->base.tfm->__crt_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1276) struct sa_req sa_req = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1277) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1279) if (!req->cryptlen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1280) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1282) if (req->cryptlen % alg->cra_blocksize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1283) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1285) /* Use SW fallback if the data size is not supported */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1286) if (req->cryptlen > SA_MAX_DATA_SZ ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1287) (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1288) req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1289) SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1290)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1291) skcipher_request_set_sync_tfm(subreq, ctx->fallback.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1292) skcipher_request_set_callback(subreq, req->base.flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1293) NULL, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1294) skcipher_request_set_crypt(subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1295) req->cryptlen, req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1296) if (enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1297) ret = crypto_skcipher_encrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1298) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1299) ret = crypto_skcipher_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1300)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1301) skcipher_request_zero(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1302) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1303) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1304)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1305) sa_req.size = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1306) sa_req.enc_size = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1307) sa_req.src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1308) sa_req.dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1309) sa_req.enc_iv = iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1310) sa_req.type = CRYPTO_ALG_TYPE_SKCIPHER;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1311) sa_req.enc = enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1312) sa_req.callback = sa_aes_dma_in_callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1313) sa_req.mdata_size = 44;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1314) sa_req.base = &req->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1315) sa_req.ctx = ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1316)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1317) return sa_run(&sa_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1318) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1319)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1320) static int sa_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1321) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1322) return sa_cipher_run(req, req->iv, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1323) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1325) static int sa_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1326) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1327) return sa_cipher_run(req, req->iv, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1328) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1329)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1330) static void sa_sha_dma_in_callback(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1331) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1332) struct sa_rx_data *rxd = (struct sa_rx_data *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1333) struct ahash_request *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1334) struct crypto_ahash *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1335) unsigned int authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1336) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1337) size_t ml, pl;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1338) u32 *result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1339) __be32 *mdptr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1340)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1341) sa_sync_from_device(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1342) req = container_of(rxd->req, struct ahash_request, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1343) tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1344) authsize = crypto_ahash_digestsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1346) mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1347) result = (u32 *)req->result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1349) for (i = 0; i < (authsize / 4); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1350) result[i] = be32_to_cpu(mdptr[i + 4]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1351)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1352) sa_free_sa_rx_data(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1353)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1354) ahash_request_complete(req, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1355) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1356)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1357) static int zero_message_process(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1358) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1359) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1360) int sa_digest_size = crypto_ahash_digestsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1361)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1362) switch (sa_digest_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1363) case SHA1_DIGEST_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1364) memcpy(req->result, sha1_zero_message_hash, sa_digest_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1365) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1366) case SHA256_DIGEST_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1367) memcpy(req->result, sha256_zero_message_hash, sa_digest_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1368) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1369) case SHA512_DIGEST_SIZE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1370) memcpy(req->result, sha512_zero_message_hash, sa_digest_size);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1371) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1372) default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1373) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1374) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1375)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1376) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1377) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1378)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1379) static int sa_sha_run(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1380) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1381) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1382) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1383) struct sa_req sa_req = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1384) size_t auth_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1385)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1386) auth_len = req->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1387)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1388) if (!auth_len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1389) return zero_message_process(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1390)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1391) if (auth_len > SA_MAX_DATA_SZ ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1392) (auth_len >= SA_UNSAFE_DATA_SZ_MIN &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1393) auth_len <= SA_UNSAFE_DATA_SZ_MAX)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1394) struct ahash_request *subreq = &rctx->fallback_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1395) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1396)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1397) ahash_request_set_tfm(subreq, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1398) subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1399)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1400) crypto_ahash_init(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1401)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1402) subreq->nbytes = auth_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1403) subreq->src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1404) subreq->result = req->result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1405)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1406) ret |= crypto_ahash_update(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1407)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1408) subreq->nbytes = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1409)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1410) ret |= crypto_ahash_final(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1411)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1412) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1413) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1415) sa_req.size = auth_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1416) sa_req.auth_size = auth_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1417) sa_req.src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1418) sa_req.dst = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1419) sa_req.enc = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1420) sa_req.type = CRYPTO_ALG_TYPE_AHASH;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1421) sa_req.callback = sa_sha_dma_in_callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1422) sa_req.mdata_size = 28;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1423) sa_req.ctx = ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1424) sa_req.base = &req->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1425)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1426) return sa_run(&sa_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1427) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1428)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1429) static int sa_sha_setup(struct sa_tfm_ctx *ctx, struct algo_data *ad)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1430) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1431) int bs = crypto_shash_blocksize(ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1432) int cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1433) struct sa_cmdl_cfg cfg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1434)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1435) ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1436) ad->auth_eng.eng_id = SA_ENG_ID_AM1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1437) ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1438)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1439) memset(ctx->authkey, 0, bs);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1440) memset(&cfg, 0, sizeof(cfg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1441) cfg.aalg = ad->aalg_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1442) cfg.enc_eng_id = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1443) cfg.auth_eng_id = ad->auth_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1444) cfg.iv_size = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1445) cfg.akey = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1446) cfg.akey_len = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1447)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1448) /* Setup Encryption Security Context & Command label template */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1449) if (sa_init_sc(&ctx->enc, NULL, 0, NULL, 0, ad, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1450) &ctx->enc.epib[1]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1451) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1452)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1453) cmdl_len = sa_format_cmdl_gen(&cfg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1454) (u8 *)ctx->enc.cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1455) &ctx->enc.cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1456) if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1457) goto badkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1458)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1459) ctx->enc.cmdl_size = cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1460)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1461) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1462)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1463) badkey:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1464) dev_err(sa_k3_dev, "%s: badkey\n", __func__);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1465) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1466) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1467)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1468) static int sa_sha_cra_init_alg(struct crypto_tfm *tfm, const char *alg_base)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1469) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1470) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1471) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1472) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1473)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1474) memset(ctx, 0, sizeof(*ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1475) ctx->dev_data = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1476) ret = sa_init_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1477) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1478) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1479)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1480) if (alg_base) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1481) ctx->shash = crypto_alloc_shash(alg_base, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1482) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1483) if (IS_ERR(ctx->shash)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1484) dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1485) alg_base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1486) return PTR_ERR(ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1487) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1488) /* for fallback */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1489) ctx->fallback.ahash =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1490) crypto_alloc_ahash(alg_base, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1491) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1492) if (IS_ERR(ctx->fallback.ahash)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1493) dev_err(ctx->dev_data->dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1494) "Could not load fallback driver\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1495) return PTR_ERR(ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1496) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1497) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1498)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1499) dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1500) __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1501) ctx->dec.sc_id, &ctx->dec.sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1502)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1503) crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1504) sizeof(struct sa_sha_req_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1505) crypto_ahash_reqsize(ctx->fallback.ahash));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1506)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1507) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1508) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1509)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1510) static int sa_sha_digest(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1511) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1512) return sa_sha_run(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1513) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1514)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1515) static int sa_sha_init(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1516) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1517) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1518) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1519) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1520)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1521) dev_dbg(sa_k3_dev, "init: digest size: %u, rctx=%p\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1522) crypto_ahash_digestsize(tfm), rctx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1523)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1524) ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1525) rctx->fallback_req.base.flags =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1526) req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1527)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1528) return crypto_ahash_init(&rctx->fallback_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1529) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1530)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1531) static int sa_sha_update(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1532) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1533) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1534) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1535) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1536)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1537) ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1538) rctx->fallback_req.base.flags =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1539) req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1540) rctx->fallback_req.nbytes = req->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1541) rctx->fallback_req.src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1542)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1543) return crypto_ahash_update(&rctx->fallback_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1544) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1545)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1546) static int sa_sha_final(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1547) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1548) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1549) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1550) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1551)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1552) ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1553) rctx->fallback_req.base.flags =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1554) req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1555) rctx->fallback_req.result = req->result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1556)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1557) return crypto_ahash_final(&rctx->fallback_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1558) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1559)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1560) static int sa_sha_finup(struct ahash_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1561) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1562) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1563) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1564) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1565)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1566) ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1567) rctx->fallback_req.base.flags =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1568) req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1569)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1570) rctx->fallback_req.nbytes = req->nbytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1571) rctx->fallback_req.src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1572) rctx->fallback_req.result = req->result;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1573)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1574) return crypto_ahash_finup(&rctx->fallback_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1575) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1576)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1577) static int sa_sha_import(struct ahash_request *req, const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1578) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1579) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1580) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1581) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1582)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1583) ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1584) rctx->fallback_req.base.flags = req->base.flags &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1585) CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1586)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1587) return crypto_ahash_import(&rctx->fallback_req, in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1588) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1589)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1590) static int sa_sha_export(struct ahash_request *req, void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1591) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1592) struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1593) struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1594) struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1595) struct ahash_request *subreq = &rctx->fallback_req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1596)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1597) ahash_request_set_tfm(subreq, ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1598) subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1599)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1600) return crypto_ahash_export(subreq, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1601) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1602)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1603) static int sa_sha1_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1604) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1605) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1606) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1607)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1608) sa_sha_cra_init_alg(tfm, "sha1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1609)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1610) ad.aalg_id = SA_AALG_ID_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1611) ad.hash_size = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1612) ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1613)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1614) sa_sha_setup(ctx, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1615)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1616) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1617) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1618)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1619) static int sa_sha256_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1620) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1621) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1622) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1623)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1624) sa_sha_cra_init_alg(tfm, "sha256");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1625)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1626) ad.aalg_id = SA_AALG_ID_SHA2_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1627) ad.hash_size = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1628) ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1629)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1630) sa_sha_setup(ctx, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1631)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1632) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1633) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1634)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1635) static int sa_sha512_cra_init(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1636) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1637) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1638) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1639)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1640) sa_sha_cra_init_alg(tfm, "sha512");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1641)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1642) ad.aalg_id = SA_AALG_ID_SHA2_512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1643) ad.hash_size = SHA512_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1644) ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA512;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1645)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1646) sa_sha_setup(ctx, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1647)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1648) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1649) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1650)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1651) static void sa_sha_cra_exit(struct crypto_tfm *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1652) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1653) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1654) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1655)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1656) dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1657) __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1658) ctx->dec.sc_id, &ctx->dec.sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1659)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1660) if (crypto_tfm_alg_type(tfm) == CRYPTO_ALG_TYPE_AHASH)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1661) sa_free_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1662)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1663) crypto_free_shash(ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1664) crypto_free_ahash(ctx->fallback.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1665) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1666)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1667) static void sa_aead_dma_in_callback(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1668) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1669) struct sa_rx_data *rxd = (struct sa_rx_data *)data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1670) struct aead_request *req;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1671) struct crypto_aead *tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1672) unsigned int start;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1673) unsigned int authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1674) u8 auth_tag[SA_MAX_AUTH_TAG_SZ];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1675) size_t pl, ml;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1676) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1677) int err = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1678) u16 auth_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1679) u32 *mdptr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1680)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1681) sa_sync_from_device(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1682) req = container_of(rxd->req, struct aead_request, base);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1683) tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1684) start = req->assoclen + req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1685) authsize = crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1686)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1687) mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1688) for (i = 0; i < (authsize / 4); i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1689) mdptr[i + 4] = swab32(mdptr[i + 4]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1690)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1691) auth_len = req->assoclen + req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1692)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1693) if (rxd->enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1694) scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1695) 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1696) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1697) auth_len -= authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1698) start -= authsize;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1699) scatterwalk_map_and_copy(auth_tag, req->src, start, authsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1700) 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1701)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1702) err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1703) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1704)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1705) sa_free_sa_rx_data(rxd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1706)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1707) aead_request_complete(req, err);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1708) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1709)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1710) static int sa_cra_init_aead(struct crypto_aead *tfm, const char *hash,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1711) const char *fallback)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1712) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1713) struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1714) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1715) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1716)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1717) memzero_explicit(ctx, sizeof(*ctx));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1718)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1719) ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1720) if (IS_ERR(ctx->shash)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1721) dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n", hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1722) return PTR_ERR(ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1723) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1724)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1725) ctx->fallback.aead = crypto_alloc_aead(fallback, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1726) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1727)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1728) if (IS_ERR(ctx->fallback.aead)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1729) dev_err(sa_k3_dev, "fallback driver %s couldn't be loaded\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1730) fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1731) return PTR_ERR(ctx->fallback.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1732) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1733)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1734) crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1735) crypto_aead_reqsize(ctx->fallback.aead));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1736)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1737) ret = sa_init_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1738) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1739) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1740)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1741) ret = sa_init_ctx_info(&ctx->dec, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1742) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1743) sa_free_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1744) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1745) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1746)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1747) dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1748) __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1749) ctx->dec.sc_id, &ctx->dec.sc_phys);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1750)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1751) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1752) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1753)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1754) static int sa_cra_init_aead_sha1(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1755) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1756) return sa_cra_init_aead(tfm, "sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1757) "authenc(hmac(sha1-ce),cbc(aes-ce))");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1758) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1759)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1760) static int sa_cra_init_aead_sha256(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1761) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1762) return sa_cra_init_aead(tfm, "sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1763) "authenc(hmac(sha256-ce),cbc(aes-ce))");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1764) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1765)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1766) static void sa_exit_tfm_aead(struct crypto_aead *tfm)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1767) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1768) struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1769) struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1770)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1771) crypto_free_shash(ctx->shash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1772) crypto_free_aead(ctx->fallback.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1773)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1774) sa_free_ctx_info(&ctx->enc, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1775) sa_free_ctx_info(&ctx->dec, data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1776) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1777)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1778) /* AEAD algorithm configuration interface function */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1779) static int sa_aead_setkey(struct crypto_aead *authenc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1780) const u8 *key, unsigned int keylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1781) struct algo_data *ad)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1782) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1783) struct sa_tfm_ctx *ctx = crypto_aead_ctx(authenc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1784) struct crypto_authenc_keys keys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1785) int cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1786) struct sa_cmdl_cfg cfg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1787) int key_idx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1788)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1789) if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1790) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1791)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1792) /* Convert the key size (16/24/32) to the key size index (0/1/2) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1793) key_idx = (keys.enckeylen >> 3) - 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1794) if (key_idx >= 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1795) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1796)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1797) ad->ctx = ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1798) ad->enc_eng.eng_id = SA_ENG_ID_EM1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1799) ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1800) ad->auth_eng.eng_id = SA_ENG_ID_AM1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1801) ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1802) ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1803) ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1804) ad->inv_key = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1805) ad->keyed_mac = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1806) ad->ealg_id = SA_EALG_ID_AES_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1807) ad->prep_iopad = sa_prepare_iopads;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1808)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1809) memset(&cfg, 0, sizeof(cfg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1810) cfg.enc = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1811) cfg.aalg = ad->aalg_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1812) cfg.enc_eng_id = ad->enc_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1813) cfg.auth_eng_id = ad->auth_eng.eng_id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1814) cfg.iv_size = crypto_aead_ivsize(authenc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1815) cfg.akey = keys.authkey;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1816) cfg.akey_len = keys.authkeylen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1817)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1818) /* Setup Encryption Security Context & Command label template */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1819) if (sa_init_sc(&ctx->enc, keys.enckey, keys.enckeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1820) keys.authkey, keys.authkeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1821) ad, 1, &ctx->enc.epib[1]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1822) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1823)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1824) cmdl_len = sa_format_cmdl_gen(&cfg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1825) (u8 *)ctx->enc.cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1826) &ctx->enc.cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1827) if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1828) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1829)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1830) ctx->enc.cmdl_size = cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1831)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1832) /* Setup Decryption Security Context & Command label template */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1833) if (sa_init_sc(&ctx->dec, keys.enckey, keys.enckeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1834) keys.authkey, keys.authkeylen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1835) ad, 0, &ctx->dec.epib[1]))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1836) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1837)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1838) cfg.enc = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1839) cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1840) &ctx->dec.cmdl_upd_info);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1841)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1842) if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1843) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1844)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1845) ctx->dec.cmdl_size = cmdl_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1846)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1847) crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1848) crypto_aead_set_flags(ctx->fallback.aead,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1849) crypto_aead_get_flags(authenc) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1850) CRYPTO_TFM_REQ_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1851) crypto_aead_setkey(ctx->fallback.aead, key, keylen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1852)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1853) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1854) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1855)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1856) static int sa_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1857) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1858) struct sa_tfm_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1859)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1860) return crypto_aead_setauthsize(ctx->fallback.aead, authsize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1861) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1862)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1863) static int sa_aead_cbc_sha1_setkey(struct crypto_aead *authenc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1864) const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1865) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1866) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1867)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1868) ad.ealg_id = SA_EALG_ID_AES_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1869) ad.aalg_id = SA_AALG_ID_HMAC_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1870) ad.hash_size = SHA1_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1871) ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1872)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1873) return sa_aead_setkey(authenc, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1874) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1875)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1876) static int sa_aead_cbc_sha256_setkey(struct crypto_aead *authenc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1877) const u8 *key, unsigned int keylen)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1878) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1879) struct algo_data ad = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1880)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1881) ad.ealg_id = SA_EALG_ID_AES_CBC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1882) ad.aalg_id = SA_AALG_ID_HMAC_SHA2_256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1883) ad.hash_size = SHA256_DIGEST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1884) ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1885)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1886) return sa_aead_setkey(authenc, key, keylen, &ad);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1887) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1888)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1889) static int sa_aead_run(struct aead_request *req, u8 *iv, int enc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1890) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1891) struct crypto_aead *tfm = crypto_aead_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1892) struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1893) struct sa_req sa_req = { 0 };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1894) size_t auth_size, enc_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1895)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1896) enc_size = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1897) auth_size = req->assoclen + req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1898)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1899) if (!enc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1900) enc_size -= crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1901) auth_size -= crypto_aead_authsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1902) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1903)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1904) if (auth_size > SA_MAX_DATA_SZ ||
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1905) (auth_size >= SA_UNSAFE_DATA_SZ_MIN &&
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1906) auth_size <= SA_UNSAFE_DATA_SZ_MAX)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1907) struct aead_request *subreq = aead_request_ctx(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1908) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1909)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1910) aead_request_set_tfm(subreq, ctx->fallback.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1911) aead_request_set_callback(subreq, req->base.flags,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1912) req->base.complete, req->base.data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1913) aead_request_set_crypt(subreq, req->src, req->dst,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1914) req->cryptlen, req->iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1915) aead_request_set_ad(subreq, req->assoclen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1916)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1917) ret = enc ? crypto_aead_encrypt(subreq) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1918) crypto_aead_decrypt(subreq);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1919) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1920) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1921)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1922) sa_req.enc_offset = req->assoclen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1923) sa_req.enc_size = enc_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1924) sa_req.auth_size = auth_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1925) sa_req.size = auth_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1926) sa_req.enc_iv = iv;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1927) sa_req.type = CRYPTO_ALG_TYPE_AEAD;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1928) sa_req.enc = enc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1929) sa_req.callback = sa_aead_dma_in_callback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1930) sa_req.mdata_size = 52;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1931) sa_req.base = &req->base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1932) sa_req.ctx = ctx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1933) sa_req.src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1934) sa_req.dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1935)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1936) return sa_run(&sa_req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1937) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1938)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1939) /* AEAD algorithm encrypt interface function */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1940) static int sa_aead_encrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1941) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1942) return sa_aead_run(req, req->iv, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1943) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1944)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1945) /* AEAD algorithm decrypt interface function */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1946) static int sa_aead_decrypt(struct aead_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1947) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1948) return sa_aead_run(req, req->iv, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1949) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1950)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1951) static struct sa_alg_tmpl sa_algs[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1952) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1953) .type = CRYPTO_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1954) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1955) .base.cra_name = "cbc(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1956) .base.cra_driver_name = "cbc-aes-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1957) .base.cra_priority = 30000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1958) .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1959) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1960) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1961) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1962) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1963) .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1964) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1965) .init = sa_cipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1966) .exit = sa_cipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1967) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1968) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1969) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1970) .setkey = sa_aes_cbc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1971) .encrypt = sa_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1972) .decrypt = sa_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1973) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1974) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1975) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1976) .type = CRYPTO_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1977) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1978) .base.cra_name = "ecb(aes)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1979) .base.cra_driver_name = "ecb-aes-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1980) .base.cra_priority = 30000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1981) .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1982) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1983) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1984) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1985) .base.cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1986) .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1987) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1988) .init = sa_cipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1989) .exit = sa_cipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1990) .min_keysize = AES_MIN_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1991) .max_keysize = AES_MAX_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1992) .setkey = sa_aes_ecb_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1993) .encrypt = sa_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1994) .decrypt = sa_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1995) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1996) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1997) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1998) .type = CRYPTO_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1999) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2000) .base.cra_name = "cbc(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2001) .base.cra_driver_name = "cbc-des3-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2002) .base.cra_priority = 30000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2003) .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2004) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2005) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2006) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2007) .base.cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2008) .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2009) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2010) .init = sa_cipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2011) .exit = sa_cipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2012) .min_keysize = 3 * DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2013) .max_keysize = 3 * DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2014) .ivsize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2015) .setkey = sa_3des_cbc_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2016) .encrypt = sa_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2017) .decrypt = sa_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2018) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2019) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2020) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2021) .type = CRYPTO_ALG_TYPE_SKCIPHER,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2022) .alg.skcipher = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2023) .base.cra_name = "ecb(des3_ede)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2024) .base.cra_driver_name = "ecb-des3-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2025) .base.cra_priority = 30000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2026) .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2027) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2028) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2029) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2030) .base.cra_blocksize = DES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2031) .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2032) .base.cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2033) .init = sa_cipher_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2034) .exit = sa_cipher_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2035) .min_keysize = 3 * DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2036) .max_keysize = 3 * DES_KEY_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2037) .setkey = sa_3des_ecb_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2038) .encrypt = sa_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2039) .decrypt = sa_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2040) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2041) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2042) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2043) .type = CRYPTO_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2044) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2045) .halg.base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2046) .cra_name = "sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2047) .cra_driver_name = "sha1-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2048) .cra_priority = 400,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2049) .cra_flags = CRYPTO_ALG_TYPE_AHASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2050) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2051) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2052) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2053) .cra_blocksize = SHA1_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2054) .cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2055) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2056) .cra_init = sa_sha1_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2057) .cra_exit = sa_sha_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2058) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2059) .halg.digestsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2060) .halg.statesize = sizeof(struct sa_sha_req_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2061) sizeof(struct sha1_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2062) .init = sa_sha_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2063) .update = sa_sha_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2064) .final = sa_sha_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2065) .finup = sa_sha_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2066) .digest = sa_sha_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2067) .export = sa_sha_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2068) .import = sa_sha_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2069) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2070) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2071) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2072) .type = CRYPTO_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2073) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2074) .halg.base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2075) .cra_name = "sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2076) .cra_driver_name = "sha256-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2077) .cra_priority = 400,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2078) .cra_flags = CRYPTO_ALG_TYPE_AHASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2079) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2080) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2081) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2082) .cra_blocksize = SHA256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2083) .cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2084) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2085) .cra_init = sa_sha256_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2086) .cra_exit = sa_sha_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2087) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2088) .halg.digestsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2089) .halg.statesize = sizeof(struct sa_sha_req_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2090) sizeof(struct sha256_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2091) .init = sa_sha_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2092) .update = sa_sha_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2093) .final = sa_sha_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2094) .finup = sa_sha_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2095) .digest = sa_sha_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2096) .export = sa_sha_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2097) .import = sa_sha_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2098) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2099) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2100) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2101) .type = CRYPTO_ALG_TYPE_AHASH,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2102) .alg.ahash = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2103) .halg.base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2104) .cra_name = "sha512",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2105) .cra_driver_name = "sha512-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2106) .cra_priority = 400,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2107) .cra_flags = CRYPTO_ALG_TYPE_AHASH |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2108) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2109) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2110) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2111) .cra_blocksize = SHA512_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2112) .cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2113) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2114) .cra_init = sa_sha512_cra_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2115) .cra_exit = sa_sha_cra_exit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2116) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2117) .halg.digestsize = SHA512_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2118) .halg.statesize = sizeof(struct sa_sha_req_ctx) +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2119) sizeof(struct sha512_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2120) .init = sa_sha_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2121) .update = sa_sha_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2122) .final = sa_sha_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2123) .finup = sa_sha_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2124) .digest = sa_sha_digest,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2125) .export = sa_sha_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2126) .import = sa_sha_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2127) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2128) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2129) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2130) .type = CRYPTO_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2131) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2132) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2133) .cra_name = "authenc(hmac(sha1),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2134) .cra_driver_name =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2135) "authenc(hmac(sha1),cbc(aes))-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2136) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2137) .cra_flags = CRYPTO_ALG_TYPE_AEAD |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2138) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2139) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2140) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2141) .cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2142) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2143) .cra_priority = 3000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2144) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2145) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2146) .maxauthsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2148) .init = sa_cra_init_aead_sha1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2149) .exit = sa_exit_tfm_aead,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2150) .setkey = sa_aead_cbc_sha1_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2151) .setauthsize = sa_aead_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2152) .encrypt = sa_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2153) .decrypt = sa_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2154) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2155) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2156) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2157) .type = CRYPTO_ALG_TYPE_AEAD,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2158) .alg.aead = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2159) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2160) .cra_name = "authenc(hmac(sha256),cbc(aes))",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2161) .cra_driver_name =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2162) "authenc(hmac(sha256),cbc(aes))-sa2ul",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2163) .cra_blocksize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2164) .cra_flags = CRYPTO_ALG_TYPE_AEAD |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2165) CRYPTO_ALG_KERN_DRIVER_ONLY |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2166) CRYPTO_ALG_ASYNC |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2167) CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2168) .cra_ctxsize = sizeof(struct sa_tfm_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2169) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2170) .cra_alignmask = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2171) .cra_priority = 3000,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2172) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2173) .ivsize = AES_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2174) .maxauthsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2176) .init = sa_cra_init_aead_sha256,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2177) .exit = sa_exit_tfm_aead,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2178) .setkey = sa_aead_cbc_sha256_setkey,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2179) .setauthsize = sa_aead_setauthsize,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2180) .encrypt = sa_aead_encrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2181) .decrypt = sa_aead_decrypt,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2182) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2183) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2184) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2186) /* Register the algorithms in crypto framework */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2187) static void sa_register_algos(const struct device *dev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2188) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2189) char *alg_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2190) u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2191) int i, err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2193) for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2194) type = sa_algs[i].type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2195) if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2196) alg_name = sa_algs[i].alg.skcipher.base.cra_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2197) err = crypto_register_skcipher(&sa_algs[i].alg.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2198) } else if (type == CRYPTO_ALG_TYPE_AHASH) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2199) alg_name = sa_algs[i].alg.ahash.halg.base.cra_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2200) err = crypto_register_ahash(&sa_algs[i].alg.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2201) } else if (type == CRYPTO_ALG_TYPE_AEAD) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2202) alg_name = sa_algs[i].alg.aead.base.cra_name;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2203) err = crypto_register_aead(&sa_algs[i].alg.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2204) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2205) dev_err(dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2206) "un-supported crypto algorithm (%d)",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2207) sa_algs[i].type);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2208) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2209) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2211) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2212) dev_err(dev, "Failed to register '%s'\n", alg_name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2213) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2214) sa_algs[i].registered = true;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2215) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2216) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2218) /* Unregister the algorithms in crypto framework */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2219) static void sa_unregister_algos(const struct device *dev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2220) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2221) u32 type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2222) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2223)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2224) for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2225) type = sa_algs[i].type;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2226) if (!sa_algs[i].registered)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2227) continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2228) if (type == CRYPTO_ALG_TYPE_SKCIPHER)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2229) crypto_unregister_skcipher(&sa_algs[i].alg.skcipher);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2230) else if (type == CRYPTO_ALG_TYPE_AHASH)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2231) crypto_unregister_ahash(&sa_algs[i].alg.ahash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2232) else if (type == CRYPTO_ALG_TYPE_AEAD)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2233) crypto_unregister_aead(&sa_algs[i].alg.aead);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2234)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2235) sa_algs[i].registered = false;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2237) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2238)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2239) static int sa_init_mem(struct sa_crypto_data *dev_data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2240) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2241) struct device *dev = &dev_data->pdev->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2242) /* Setup dma pool for security context buffers */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2243) dev_data->sc_pool = dma_pool_create("keystone-sc", dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2244) SA_CTX_MAX_SZ, 64, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2245) if (!dev_data->sc_pool) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2246) dev_err(dev, "Failed to create dma pool");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2247) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2248) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2249)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2250) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2251) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2252)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2253) static int sa_dma_init(struct sa_crypto_data *dd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2254) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2255) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2256) struct dma_slave_config cfg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2258) dd->dma_rx1 = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2259) dd->dma_tx = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2260) dd->dma_rx2 = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2261)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2262) ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2263) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2264) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2265)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2266) dd->dma_rx1 = dma_request_chan(dd->dev, "rx1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2267) if (IS_ERR(dd->dma_rx1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2268) return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2269) "Unable to request rx1 DMA channel\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2270)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2271) dd->dma_rx2 = dma_request_chan(dd->dev, "rx2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2272) if (IS_ERR(dd->dma_rx2)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2273) ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2274) "Unable to request rx2 DMA channel\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2275) goto err_dma_rx2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2276) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2277)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2278) dd->dma_tx = dma_request_chan(dd->dev, "tx");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2279) if (IS_ERR(dd->dma_tx)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2280) ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2281) "Unable to request tx DMA channel\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2282) goto err_dma_tx;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2283) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2285) memzero_explicit(&cfg, sizeof(cfg));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2287) cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2288) cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2289) cfg.src_maxburst = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2290) cfg.dst_maxburst = 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2291)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2292) ret = dmaengine_slave_config(dd->dma_rx1, &cfg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2293) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2294) dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2295) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2296) goto err_dma_config;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2297) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2298)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2299) ret = dmaengine_slave_config(dd->dma_rx2, &cfg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2300) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2301) dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2302) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2303) goto err_dma_config;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2304) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2305)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2306) ret = dmaengine_slave_config(dd->dma_tx, &cfg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2307) if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2308) dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2309) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2310) goto err_dma_config;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2311) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2312)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2313) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2314)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2315) err_dma_config:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2316) dma_release_channel(dd->dma_tx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2317) err_dma_tx:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2318) dma_release_channel(dd->dma_rx2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2319) err_dma_rx2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2320) dma_release_channel(dd->dma_rx1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2321)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2322) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2323) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2325) static int sa_link_child(struct device *dev, void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2326) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2327) struct device *parent = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2328)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2329) device_link_add(dev, parent, DL_FLAG_AUTOPROBE_CONSUMER);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2330)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2331) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2332) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2333)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2334) static int sa_ul_probe(struct platform_device *pdev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2335) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2336) struct device *dev = &pdev->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2337) struct device_node *node = dev->of_node;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2338) struct resource *res;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2339) static void __iomem *saul_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2340) struct sa_crypto_data *dev_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2341) u32 val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2342) int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2343)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2344) dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2345) if (!dev_data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2346) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2347)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2348) sa_k3_dev = dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2349) dev_data->dev = dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2350) dev_data->pdev = pdev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2351) platform_set_drvdata(pdev, dev_data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2352) dev_set_drvdata(sa_k3_dev, dev_data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2353)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2354) pm_runtime_enable(dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2355) ret = pm_runtime_resume_and_get(dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2356) if (ret < 0) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2357) dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2358) ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2359) pm_runtime_disable(dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2360) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2361) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2363) sa_init_mem(dev_data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2364) ret = sa_dma_init(dev_data);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2365) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2366) goto destroy_dma_pool;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2368) spin_lock_init(&dev_data->scid_lock);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2369) res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2370) saul_base = devm_ioremap_resource(dev, res);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2371)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2372) dev_data->base = saul_base;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2373) val = SA_EEC_ENCSS_EN | SA_EEC_AUTHSS_EN | SA_EEC_CTXCACH_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2374) SA_EEC_CPPI_PORT_IN_EN | SA_EEC_CPPI_PORT_OUT_EN |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2375) SA_EEC_TRNG_EN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2376)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2377) writel_relaxed(val, saul_base + SA_ENGINE_ENABLE_CONTROL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2378)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2379) sa_register_algos(dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2380)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2381) ret = of_platform_populate(node, NULL, NULL, &pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2382) if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2383) goto release_dma;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2384)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2385) device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2386)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2387) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2388)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2389) release_dma:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2390) sa_unregister_algos(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2392) dma_release_channel(dev_data->dma_rx2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2393) dma_release_channel(dev_data->dma_rx1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2394) dma_release_channel(dev_data->dma_tx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2395)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2396) destroy_dma_pool:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2397) dma_pool_destroy(dev_data->sc_pool);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2398)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2399) pm_runtime_put_sync(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2400) pm_runtime_disable(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2401)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2402) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2403) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2404)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2405) static int sa_ul_remove(struct platform_device *pdev)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2406) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2407) struct sa_crypto_data *dev_data = platform_get_drvdata(pdev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2409) sa_unregister_algos(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2410)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2411) dma_release_channel(dev_data->dma_rx2);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2412) dma_release_channel(dev_data->dma_rx1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2413) dma_release_channel(dev_data->dma_tx);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2414)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2415) dma_pool_destroy(dev_data->sc_pool);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2416)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2417) platform_set_drvdata(pdev, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2418)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2419) pm_runtime_put_sync(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2420) pm_runtime_disable(&pdev->dev);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2421)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2422) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2423) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2424)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2425) static const struct of_device_id of_match[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2426) {.compatible = "ti,j721e-sa2ul",},
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2427) {.compatible = "ti,am654-sa2ul",},
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2428) {},
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2429) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2430) MODULE_DEVICE_TABLE(of, of_match);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2431)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2432) static struct platform_driver sa_ul_driver = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2433) .probe = sa_ul_probe,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2434) .remove = sa_ul_remove,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2435) .driver = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2436) .name = "saul-crypto",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2437) .of_match_table = of_match,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2438) },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2439) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2440) module_platform_driver(sa_ul_driver);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2441) MODULE_LICENSE("GPL v2");