^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /* Copyright (c) 2016-2018, NXP Semiconductors
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * Copyright (c) 2018-2019, Vladimir Oltean <olteanv@gmail.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/packing.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #include <linux/bitops.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <linux/types.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) static int get_le_offset(int offset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) int closest_multiple_of_4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) closest_multiple_of_4 = (offset / 4) * 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) offset -= closest_multiple_of_4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) return closest_multiple_of_4 + (3 - offset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static int get_reverse_lsw32_offset(int offset, size_t len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) int closest_multiple_of_4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) int word_index;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) word_index = offset / 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) closest_multiple_of_4 = word_index * 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) offset -= closest_multiple_of_4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) word_index = (len / 4) - word_index - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) return word_index * 4 + offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) static u64 bit_reverse(u64 val, unsigned int width)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) u64 new_val = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) unsigned int bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) unsigned int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) for (i = 0; i < width; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) bit = (val & (1 << i)) != 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) new_val |= (bit << (width - i - 1));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) return new_val;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) static void adjust_for_msb_right_quirk(u64 *to_write, int *box_start_bit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) int *box_end_bit, u8 *box_mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) int box_bit_width = *box_start_bit - *box_end_bit + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) int new_box_start_bit, new_box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) *to_write >>= *box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) *to_write = bit_reverse(*to_write, box_bit_width);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) *to_write <<= *box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) new_box_end_bit = box_bit_width - *box_start_bit - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) new_box_start_bit = box_bit_width - *box_end_bit - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) *box_mask = GENMASK_ULL(new_box_start_bit, new_box_end_bit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) *box_start_bit = new_box_start_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) *box_end_bit = new_box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) /**
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) * packing - Convert numbers (currently u64) between a packed and an unpacked
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) * format. Unpacked means laid out in memory in the CPU's native
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) * understanding of integers, while packed means anything else that
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) * requires translation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) * @pbuf: Pointer to a buffer holding the packed value.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) * @uval: Pointer to an u64 holding the unpacked value.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) * @startbit: The index (in logical notation, compensated for quirks) where
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) * the packed value starts within pbuf. Must be larger than, or
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) * equal to, endbit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) * @endbit: The index (in logical notation, compensated for quirks) where
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) * the packed value ends within pbuf. Must be smaller than, or equal
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) * to, startbit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) * @pbuflen: The length in bytes of the packed buffer pointed to by @pbuf.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) * @op: If PACK, then uval will be treated as const pointer and copied (packed)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) * into pbuf, between startbit and endbit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) * If UNPACK, then pbuf will be treated as const pointer and the logical
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * value between startbit and endbit will be copied (unpacked) to uval.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * @quirks: A bit mask of QUIRK_LITTLE_ENDIAN, QUIRK_LSW32_IS_FIRST and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) * QUIRK_MSB_ON_THE_RIGHT.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) * Return: 0 on success, EINVAL or ERANGE if called incorrectly. Assuming
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) * correct usage, return code may be discarded.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) * If op is PACK, pbuf is modified.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) * If op is UNPACK, uval is modified.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) int packing(void *pbuf, u64 *uval, int startbit, int endbit, size_t pbuflen,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) enum packing_op op, u8 quirks)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) /* Number of bits for storing "uval"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) * also width of the field to access in the pbuf
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) u64 value_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) /* Logical byte indices corresponding to the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) * start and end of the field.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) int plogical_first_u8, plogical_last_u8, box;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) /* startbit is expected to be larger than endbit */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) if (startbit < endbit)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) /* Invalid function call */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) value_width = startbit - endbit + 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) if (value_width > 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) return -ERANGE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) /* Check if "uval" fits in "value_width" bits.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) * If value_width is 64, the check will fail, but any
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) * 64-bit uval will surely fit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) if (op == PACK && value_width < 64 && (*uval >= (1ull << value_width)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) /* Cannot store "uval" inside "value_width" bits.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) * Truncating "uval" is most certainly not desirable,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) * so simply erroring out is appropriate.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) return -ERANGE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) /* Initialize parameter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) if (op == UNPACK)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) *uval = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) /* Iterate through an idealistic view of the pbuf as an u64 with
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) * no quirks, u8 by u8 (aligned at u8 boundaries), from high to low
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) * logical bit significance. "box" denotes the current logical u8.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) plogical_first_u8 = startbit / 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) plogical_last_u8 = endbit / 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) for (box = plogical_first_u8; box >= plogical_last_u8; box--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) /* Bit indices into the currently accessed 8-bit box */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) int box_start_bit, box_end_bit, box_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) u8 box_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) /* Corresponding bits from the unpacked u64 parameter */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) int proj_start_bit, proj_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) u64 proj_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) /* This u8 may need to be accessed in its entirety
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) * (from bit 7 to bit 0), or not, depending on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) * input arguments startbit and endbit.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) if (box == plogical_first_u8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) box_start_bit = startbit % 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) box_start_bit = 7;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) if (box == plogical_last_u8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) box_end_bit = endbit % 8;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) box_end_bit = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) /* We have determined the box bit start and end.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) * Now we calculate where this (masked) u8 box would fit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) * in the unpacked (CPU-readable) u64 - the u8 box's
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) * projection onto the unpacked u64. Though the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) * box is u8, the projection is u64 because it may fall
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) * anywhere within the unpacked u64.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) proj_start_bit = ((box * 8) + box_start_bit) - endbit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) proj_end_bit = ((box * 8) + box_end_bit) - endbit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) proj_mask = GENMASK_ULL(proj_start_bit, proj_end_bit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) box_mask = GENMASK_ULL(box_start_bit, box_end_bit);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) /* Determine the offset of the u8 box inside the pbuf,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) * adjusted for quirks. The adjusted box_addr will be used for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) * effective addressing inside the pbuf (so it's not
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) * logical any longer).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) box_addr = pbuflen - box - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) if (quirks & QUIRK_LITTLE_ENDIAN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) box_addr = get_le_offset(box_addr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) if (quirks & QUIRK_LSW32_IS_FIRST)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) box_addr = get_reverse_lsw32_offset(box_addr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) pbuflen);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) if (op == UNPACK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) u64 pval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) /* Read from pbuf, write to uval */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) pval = ((u8 *)pbuf)[box_addr] & box_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) if (quirks & QUIRK_MSB_ON_THE_RIGHT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) adjust_for_msb_right_quirk(&pval,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) &box_start_bit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) &box_end_bit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) &box_mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) pval >>= box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) pval <<= proj_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) *uval &= ~proj_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) *uval |= pval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) } else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) u64 pval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) /* Write to pbuf, read from uval */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) pval = (*uval) & proj_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) pval >>= proj_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) if (quirks & QUIRK_MSB_ON_THE_RIGHT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) adjust_for_msb_right_quirk(&pval,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) &box_start_bit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) &box_end_bit,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) &box_mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) pval <<= box_end_bit;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) ((u8 *)pbuf)[box_addr] &= ~box_mask;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) ((u8 *)pbuf)[box_addr] |= pval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) EXPORT_SYMBOL(packing);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) MODULE_DESCRIPTION("Generic bitfield packing and unpacking");