^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * Key Wrapping: RFC3394 / NIST SP800-38F
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * Copyright (C) 2015, Stephan Mueller <smueller@chronox.de>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Redistribution and use in source and binary forms, with or without
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * modification, are permitted provided that the following conditions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * are met:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * 1. Redistributions of source code must retain the above copyright
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) * notice, and the entire permission notice in its entirety,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * including the disclaimer of warranties.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * 2. Redistributions in binary form must reproduce the above copyright
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * notice, this list of conditions and the following disclaimer in the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * documentation and/or other materials provided with the distribution.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) * 3. The name of the author may not be used to endorse or promote
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) * products derived from this software without specific prior
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) * written permission.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) * ALTERNATIVELY, this product may be distributed under the terms of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * the GNU General Public License, in which case the provisions of the GPL2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * are required INSTEAD OF the above restrictions. (This clause is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * necessary due to a potential bad interaction between the GPL and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * the restrictions contained in a BSD-style copyright.)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) * DAMAGE.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) * Note for using key wrapping:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) * * The result of the encryption operation is the ciphertext starting
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) * with the 2nd semiblock. The first semiblock is provided as the IV.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) * The IV used to start the encryption operation is the default IV.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) * * The input for the decryption is the first semiblock handed in as an
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) * IV. The ciphertext is the data starting with the 2nd semiblock. The
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) * return code of the decryption operation will be EBADMSG in case an
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) * integrity error occurs.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) * To obtain the full result of an encryption as expected by SP800-38F, the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) * caller must allocate a buffer of plaintext + 8 bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) * unsigned int datalen = ptlen + crypto_skcipher_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) * u8 data[datalen];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) * u8 *iv = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) * u8 *pt = data + crypto_skcipher_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) * <ensure that pt contains the plaintext of size ptlen>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) * sg_init_one(&sg, pt, ptlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) * skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) * ==> After encryption, data now contains full KW result as per SP800-38F.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) * In case of decryption, ciphertext now already has the expected length
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) * and must be segmented appropriately:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) * unsigned int datalen = CTLEN;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) * u8 data[datalen];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) * <ensure that data contains full ciphertext>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) * u8 *iv = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) * u8 *ct = data + crypto_skcipher_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) * unsigned int ctlen = datalen - crypto_skcipher_ivsize(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) * sg_init_one(&sg, ct, ctlen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) * skcipher_request_set_crypt(req, &sg, &sg, ctlen, iv);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * ==> After decryption (which hopefully does not return EBADMSG), the ct
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * pointer now points to the plaintext of size ctlen.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * Note 2: KWP is not implemented as this would defy in-place operation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * If somebody wants to wrap non-aligned data, he should simply pad
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * the input with zeros to fill it up to the 8 byte boundary.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) #include <linux/crypto.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) #include <linux/scatterlist.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) #include <crypto/scatterwalk.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) #include <crypto/internal/cipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) #include <crypto/internal/skcipher.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) struct crypto_kw_block {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) #define SEMIBSIZE 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) __be64 A;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) __be64 R;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) * Fast forward the SGL to the "end" length minus SEMIBSIZE.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) * The start in the SGL defined by the fast-forward is returned with
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) * the walk variable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) static void crypto_kw_scatterlist_ff(struct scatter_walk *walk,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) struct scatterlist *sg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) unsigned int end)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) unsigned int skip = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) /* The caller should only operate on full SEMIBLOCKs. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) BUG_ON(end < SEMIBSIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) skip = end - SEMIBSIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) while (sg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) if (sg->length > skip) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) scatterwalk_start(walk, sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) scatterwalk_advance(walk, skip);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) } else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) skip -= sg->length;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) sg = sg_next(sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) static int crypto_kw_decrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) struct crypto_kw_block block;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) u64 t = 6 * ((req->cryptlen) >> 3);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) unsigned int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) int ret = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) * Require at least 2 semiblocks (note, the 3rd semiblock that is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) * required by SP800-38F is the IV.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) /* Place the IV into block A */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) memcpy(&block.A, req->iv, SEMIBSIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) * src scatterlist is read-only. dst scatterlist is r/w. During the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) * first loop, src points to req->src and dst to req->dst. For any
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) * subsequent round, the code operates on req->dst only.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) for (i = 0; i < 6; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) struct scatter_walk src_walk, dst_walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) unsigned int nbytes = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) while (nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) /* move pointer by nbytes in the SGL */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) crypto_kw_scatterlist_ff(&src_walk, src, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) /* get the source block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) /* perform KW operation: modify IV with counter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) block.A ^= cpu_to_be64(t);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) t--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) /* perform KW operation: decrypt block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) crypto_cipher_decrypt_one(cipher, (u8 *)&block,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) (u8 *)&block);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) /* move pointer by nbytes in the SGL */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) crypto_kw_scatterlist_ff(&dst_walk, dst, nbytes);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) /* Copy block->R into place */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) nbytes -= SEMIBSIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) /* we now start to operate on the dst SGL only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) src = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) /* Perform authentication check */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) if (block.A != cpu_to_be64(0xa6a6a6a6a6a6a6a6ULL))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) ret = -EBADMSG;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) memzero_explicit(&block, sizeof(struct crypto_kw_block));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) static int crypto_kw_encrypt(struct skcipher_request *req)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) struct crypto_kw_block block;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) struct scatterlist *src, *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) u64 t = 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) unsigned int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) * Require at least 2 semiblocks (note, the 3rd semiblock that is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) * required by SP800-38F is the IV that occupies the first semiblock.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) * This means that the dst memory must be one semiblock larger than src.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) * Also ensure that the given data is aligned to semiblock.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) * Place the predefined IV into block A -- for encrypt, the caller
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) * does not need to provide an IV, but he needs to fetch the final IV.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) block.A = cpu_to_be64(0xa6a6a6a6a6a6a6a6ULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) * src scatterlist is read-only. dst scatterlist is r/w. During the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) * first loop, src points to req->src and dst to req->dst. For any
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) * subsequent round, the code operates on req->dst only.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) src = req->src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) for (i = 0; i < 6; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) struct scatter_walk src_walk, dst_walk;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) unsigned int nbytes = req->cryptlen;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) scatterwalk_start(&src_walk, src);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) scatterwalk_start(&dst_walk, dst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) while (nbytes) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) /* get the source block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) scatterwalk_copychunks(&block.R, &src_walk, SEMIBSIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) false);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) /* perform KW operation: encrypt block */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) crypto_cipher_encrypt_one(cipher, (u8 *)&block,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) (u8 *)&block);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) /* perform KW operation: modify IV with counter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) block.A ^= cpu_to_be64(t);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) t++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) /* Copy block->R into place */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) scatterwalk_copychunks(&block.R, &dst_walk, SEMIBSIZE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) true);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) nbytes -= SEMIBSIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) /* we now start to operate on the dst SGL only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) src = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) dst = req->dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) /* establish the IV for the caller to pick up */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) memcpy(req->iv, &block.A, SEMIBSIZE);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) memzero_explicit(&block, sizeof(struct crypto_kw_block));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) static int crypto_kw_create(struct crypto_template *tmpl, struct rtattr **tb)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) struct skcipher_instance *inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) struct crypto_alg *alg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) int err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) inst = skcipher_alloc_instance_simple(tmpl, tb);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) if (IS_ERR(inst))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) return PTR_ERR(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) alg = skcipher_ialg_simple(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) err = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) /* Section 5.1 requirement for KW */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) if (alg->cra_blocksize != sizeof(struct crypto_kw_block))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) goto out_free_inst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) inst->alg.base.cra_blocksize = SEMIBSIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) inst->alg.base.cra_alignmask = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) inst->alg.ivsize = SEMIBSIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) inst->alg.encrypt = crypto_kw_encrypt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) inst->alg.decrypt = crypto_kw_decrypt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) err = skcipher_register_instance(tmpl, inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) if (err) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) out_free_inst:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) inst->free(inst);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) return err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) static struct crypto_template crypto_kw_tmpl = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) .name = "kw",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) .create = crypto_kw_create,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) .module = THIS_MODULE,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) static int __init crypto_kw_init(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) return crypto_register_template(&crypto_kw_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) static void __exit crypto_kw_exit(void)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) crypto_unregister_template(&crypto_kw_tmpl);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) subsys_initcall(crypto_kw_init);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) module_exit(crypto_kw_exit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) MODULE_LICENSE("Dual BSD/GPL");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) MODULE_ALIAS_CRYPTO("kw");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) MODULE_IMPORT_NS(CRYPTO_INTERNAL);