^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0-or-later
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Cryptographic API.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Support for VIA PadLock hardware crypto engine.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Copyright (c) 2006 Michal Ludvig <michal@logix.cz>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <crypto/internal/hash.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <crypto/padlock.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <crypto/sha.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <linux/err.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/init.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/interrupt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm/cpu_device_id.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm/fpu/api.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) struct padlock_sha_desc {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) struct shash_desc fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) struct padlock_sha_ctx {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) struct crypto_shash *fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) static int padlock_sha_init(struct shash_desc *desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) dctx->fallback.tfm = ctx->fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) return crypto_shash_init(&dctx->fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) static int padlock_sha_update(struct shash_desc *desc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) const u8 *data, unsigned int length)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) return crypto_shash_update(&dctx->fallback, data, length);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) static int padlock_sha_export(struct shash_desc *desc, void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) return crypto_shash_export(&dctx->fallback, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) static int padlock_sha_import(struct shash_desc *desc, const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) dctx->fallback.tfm = ctx->fallback;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) return crypto_shash_import(&dctx->fallback, in);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) static inline void padlock_output_block(uint32_t *src,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) uint32_t *dst, size_t count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) while (count--)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) *dst++ = swab32(*src++);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) static int padlock_sha1_finup(struct shash_desc *desc, const u8 *in,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) unsigned int count, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) /* We can't store directly to *out as it may be unaligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) /* BTW Don't reduce the buffer size below 128 Bytes!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * PadLock microcode needs it that big. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) ((aligned(STACK_ALIGN)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) struct sha1_state state;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) unsigned int space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) unsigned int leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) err = crypto_shash_export(&dctx->fallback, &state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) if (state.count + count > ULONG_MAX)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) return crypto_shash_finup(&dctx->fallback, in, count, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) leftover = ((state.count - 1) & (SHA1_BLOCK_SIZE - 1)) + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) space = SHA1_BLOCK_SIZE - leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) if (space) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) if (count > space) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) err = crypto_shash_update(&dctx->fallback, in, space) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) crypto_shash_export(&dctx->fallback, &state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) count -= space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) in += space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) memcpy(state.buffer + leftover, in, count);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) in = state.buffer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) count += leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) state.count &= ~(SHA1_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) memcpy(result, &state.state, SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) asm volatile (".byte 0xf3,0x0f,0xa6,0xc8" /* rep xsha1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) : \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) : "c"((unsigned long)state.count + count), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) "a"((unsigned long)state.count), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) "S"(in), "D"(result));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) padlock_output_block((uint32_t *)result, (uint32_t *)out, 5);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) static int padlock_sha1_final(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) u8 buf[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) return padlock_sha1_finup(desc, buf, 0, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) static int padlock_sha256_finup(struct shash_desc *desc, const u8 *in,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) unsigned int count, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) /* We can't store directly to *out as it may be unaligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) /* BTW Don't reduce the buffer size below 128 Bytes!
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) * PadLock microcode needs it that big. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) ((aligned(STACK_ALIGN)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) struct padlock_sha_desc *dctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) struct sha256_state state;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) unsigned int space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) unsigned int leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) err = crypto_shash_export(&dctx->fallback, &state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) if (state.count + count > ULONG_MAX)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) return crypto_shash_finup(&dctx->fallback, in, count, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) space = SHA256_BLOCK_SIZE - leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) if (space) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) if (count > space) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) err = crypto_shash_update(&dctx->fallback, in, space) ?:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) crypto_shash_export(&dctx->fallback, &state);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) if (err)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) count -= space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) in += space;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) memcpy(state.buf + leftover, in, count);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) in = state.buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) count += leftover;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) state.count &= ~(SHA1_BLOCK_SIZE - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) memcpy(result, &state.state, SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) asm volatile (".byte 0xf3,0x0f,0xa6,0xd0" /* rep xsha256 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) : \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) : "c"((unsigned long)state.count + count), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) "a"((unsigned long)state.count), \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) "S"(in), "D"(result));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) padlock_output_block((uint32_t *)result, (uint32_t *)out, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) static int padlock_sha256_final(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) u8 buf[4];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) return padlock_sha256_finup(desc, buf, 0, out);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) static int padlock_init_tfm(struct crypto_shash *hash)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) const char *fallback_driver_name = crypto_shash_alg_name(hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) struct padlock_sha_ctx *ctx = crypto_shash_ctx(hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) struct crypto_shash *fallback_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) /* Allocate a fallback and abort if it failed. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) CRYPTO_ALG_NEED_FALLBACK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) if (IS_ERR(fallback_tfm)) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) fallback_driver_name);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) return PTR_ERR(fallback_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) ctx->fallback = fallback_tfm;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) hash->descsize += crypto_shash_descsize(fallback_tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) static void padlock_exit_tfm(struct crypto_shash *hash)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) struct padlock_sha_ctx *ctx = crypto_shash_ctx(hash);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) crypto_free_shash(ctx->fallback);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) static struct shash_alg sha1_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) .digestsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) .init = padlock_sha_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) .update = padlock_sha_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) .finup = padlock_sha1_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) .final = padlock_sha1_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) .export = padlock_sha_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) .import = padlock_sha_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) .init_tfm = padlock_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) .exit_tfm = padlock_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) .descsize = sizeof(struct padlock_sha_desc),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) .statesize = sizeof(struct sha1_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) .cra_name = "sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) .cra_driver_name = "sha1-padlock",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) .cra_priority = PADLOCK_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) .cra_blocksize = SHA1_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) .cra_ctxsize = sizeof(struct padlock_sha_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) static struct shash_alg sha256_alg = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) .digestsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) .init = padlock_sha_init,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) .update = padlock_sha_update,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) .finup = padlock_sha256_finup,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) .final = padlock_sha256_final,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) .export = padlock_sha_export,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) .import = padlock_sha_import,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) .init_tfm = padlock_init_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) .exit_tfm = padlock_exit_tfm,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) .descsize = sizeof(struct padlock_sha_desc),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) .statesize = sizeof(struct sha256_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) .cra_name = "sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) .cra_driver_name = "sha256-padlock",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) .cra_priority = PADLOCK_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) .cra_blocksize = SHA256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) .cra_ctxsize = sizeof(struct padlock_sha_ctx),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) /* Add two shash_alg instance for hardware-implemented *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) * multiple-parts hash supported by VIA Nano Processor.*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) static int padlock_sha1_init_nano(struct shash_desc *desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) struct sha1_state *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) *sctx = (struct sha1_state){
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) static int padlock_sha1_update_nano(struct shash_desc *desc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) const u8 *data, unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) struct sha1_state *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) unsigned int partial, done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) const u8 *src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) ((aligned(STACK_ALIGN)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) partial = sctx->count & 0x3f;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) sctx->count += len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) done = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) src = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) memcpy(dst, (u8 *)(sctx->state), SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) if ((partial + len) >= SHA1_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) /* Append the bytes in state's buffer to a block to handle */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) if (partial) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) done = -partial;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) memcpy(sctx->buffer + partial, data,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) done + SHA1_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) src = sctx->buffer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) asm volatile (".byte 0xf3,0x0f,0xa6,0xc8"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) : "+S"(src), "+D"(dst) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) : "a"((long)-1), "c"((unsigned long)1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) done += SHA1_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) src = data + done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) /* Process the left bytes from the input data */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) if (len - done >= SHA1_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) asm volatile (".byte 0xf3,0x0f,0xa6,0xc8"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) : "+S"(src), "+D"(dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) : "a"((long)-1),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) "c"((unsigned long)((len - done) / SHA1_BLOCK_SIZE)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) done += ((len - done) - (len - done) % SHA1_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) src = data + done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) partial = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) memcpy((u8 *)(sctx->state), dst, SHA1_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) memcpy(sctx->buffer + partial, src, len - done);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) static int padlock_sha1_final_nano(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) struct sha1_state *state = (struct sha1_state *)shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) unsigned int partial, padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) __be64 bits;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) static const u8 padding[64] = { 0x80, };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) bits = cpu_to_be64(state->count << 3);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) /* Pad out to 56 mod 64 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) partial = state->count & 0x3f;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) padlock_sha1_update_nano(desc, padding, padlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) /* Append length field bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) padlock_sha1_update_nano(desc, (const u8 *)&bits, sizeof(bits));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) /* Swap to output */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 5);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) static int padlock_sha256_init_nano(struct shash_desc *desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) struct sha256_state *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) *sctx = (struct sha256_state){
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) .state = { SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7},
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) static int padlock_sha256_update_nano(struct shash_desc *desc, const u8 *data,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) unsigned int len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) struct sha256_state *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) unsigned int partial, done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) const u8 *src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) /*The PHE require the out buffer must 128 bytes and 16-bytes aligned*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) ((aligned(STACK_ALIGN)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) partial = sctx->count & 0x3f;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) sctx->count += len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) done = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) src = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) memcpy(dst, (u8 *)(sctx->state), SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) if ((partial + len) >= SHA256_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) /* Append the bytes in state's buffer to a block to handle */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) if (partial) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) done = -partial;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) memcpy(sctx->buf + partial, data,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) done + SHA256_BLOCK_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) src = sctx->buf;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) asm volatile (".byte 0xf3,0x0f,0xa6,0xd0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) : "+S"(src), "+D"(dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) : "a"((long)-1), "c"((unsigned long)1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) done += SHA256_BLOCK_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) src = data + done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) /* Process the left bytes from input data*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) if (len - done >= SHA256_BLOCK_SIZE) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) asm volatile (".byte 0xf3,0x0f,0xa6,0xd0"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) : "+S"(src), "+D"(dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) : "a"((long)-1),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) "c"((unsigned long)((len - done) / 64)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) done += ((len - done) - (len - done) % 64);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) src = data + done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) partial = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) memcpy((u8 *)(sctx->state), dst, SHA256_DIGEST_SIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) memcpy(sctx->buf + partial, src, len - done);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) static int padlock_sha256_final_nano(struct shash_desc *desc, u8 *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) struct sha256_state *state =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) (struct sha256_state *)shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) unsigned int partial, padlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) __be64 bits;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) static const u8 padding[64] = { 0x80, };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) bits = cpu_to_be64(state->count << 3);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) /* Pad out to 56 mod 64 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) partial = state->count & 0x3f;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) padlock_sha256_update_nano(desc, padding, padlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) /* Append length field bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) padlock_sha256_update_nano(desc, (const u8 *)&bits, sizeof(bits));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) /* Swap to output */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) padlock_output_block((uint32_t *)(state->state), (uint32_t *)out, 8);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) static int padlock_sha_export_nano(struct shash_desc *desc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) void *out)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) int statesize = crypto_shash_statesize(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) void *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) memcpy(out, sctx, statesize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) static int padlock_sha_import_nano(struct shash_desc *desc,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) const void *in)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) int statesize = crypto_shash_statesize(desc->tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) void *sctx = shash_desc_ctx(desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) memcpy(sctx, in, statesize);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) static struct shash_alg sha1_alg_nano = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) .digestsize = SHA1_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) .init = padlock_sha1_init_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) .update = padlock_sha1_update_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) .final = padlock_sha1_final_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) .export = padlock_sha_export_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) .import = padlock_sha_import_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) .descsize = sizeof(struct sha1_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) .statesize = sizeof(struct sha1_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) .cra_name = "sha1",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) .cra_driver_name = "sha1-padlock-nano",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) .cra_priority = PADLOCK_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) .cra_blocksize = SHA1_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) static struct shash_alg sha256_alg_nano = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) .digestsize = SHA256_DIGEST_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) .init = padlock_sha256_init_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) .update = padlock_sha256_update_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) .final = padlock_sha256_final_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) .export = padlock_sha_export_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) .import = padlock_sha_import_nano,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) .descsize = sizeof(struct sha256_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) .statesize = sizeof(struct sha256_state),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) .base = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) .cra_name = "sha256",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) .cra_driver_name = "sha256-padlock-nano",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) .cra_priority = PADLOCK_CRA_PRIORITY,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) .cra_blocksize = SHA256_BLOCK_SIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) .cra_module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) static const struct x86_cpu_id padlock_sha_ids[] = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) X86_MATCH_FEATURE(X86_FEATURE_PHE, NULL),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) {}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) MODULE_DEVICE_TABLE(x86cpu, padlock_sha_ids);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) static int __init padlock_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) int rc = -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) struct cpuinfo_x86 *c = &cpu_data(0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) struct shash_alg *sha1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) struct shash_alg *sha256;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) if (!x86_match_cpu(padlock_sha_ids) || !boot_cpu_has(X86_FEATURE_PHE_EN))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) /* Register the newly added algorithm module if on *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) * VIA Nano processor, or else just do as before */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) if (c->x86_model < 0x0f) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) sha1 = &sha1_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) sha256 = &sha256_alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) sha1 = &sha1_alg_nano;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) sha256 = &sha256_alg_nano;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) rc = crypto_register_shash(sha1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) goto out;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) rc = crypto_register_shash(sha256);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) if (rc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) goto out_unreg1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) printk(KERN_NOTICE PFX "Using VIA PadLock ACE for SHA1/SHA256 algorithms.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) out_unreg1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) crypto_unregister_shash(sha1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) printk(KERN_ERR PFX "VIA PadLock SHA1/SHA256 initialization failed.\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) return rc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) static void __exit padlock_fini(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) struct cpuinfo_x86 *c = &cpu_data(0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) if (c->x86_model >= 0x0f) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) crypto_unregister_shash(&sha1_alg_nano);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) crypto_unregister_shash(&sha256_alg_nano);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) crypto_unregister_shash(&sha1_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) crypto_unregister_shash(&sha256_alg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) module_init(padlock_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) module_exit(padlock_fini);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) MODULE_DESCRIPTION("VIA PadLock SHA1/SHA256 algorithms support.");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) MODULE_LICENSE("GPL");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) MODULE_AUTHOR("Michal Ludvig");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) MODULE_ALIAS_CRYPTO("sha1-all");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) MODULE_ALIAS_CRYPTO("sha256-all");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) MODULE_ALIAS_CRYPTO("sha1-padlock");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) MODULE_ALIAS_CRYPTO("sha256-padlock");