^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * arch/xtensa/lib/strncpy_user.S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * This file is subject to the terms and conditions of the GNU General
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Public License. See the file "COPYING" in the main directory of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * this archive for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Returns: -EFAULT if exception before terminator, N if the entire
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * buffer filled, else strlen.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * Copyright (C) 2002 Tensilica Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/asmmacro.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <asm/core.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * char *__strncpy_user(char *dst, const char *src, size_t len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #ifdef __XTENSA_EB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) # define MASK0 0xff000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) # define MASK1 0x00ff0000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) # define MASK2 0x0000ff00
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) # define MASK3 0x000000ff
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) # define MASK0 0x000000ff
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) # define MASK1 0x0000ff00
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) # define MASK2 0x00ff0000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) # define MASK3 0xff000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) # Register use
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) # a0/ return address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) # a1/ stack pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) # a2/ return value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) # a3/ src
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) # a4/ len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) # a5/ mask0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) # a6/ mask1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) # a7/ mask2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) # a8/ mask3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) # a9/ tmp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) # a10/ tmp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) # a11/ dst
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) # a12/ tmp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) ENTRY(__strncpy_user)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) abi_entry_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) # a2/ dst, a3/ src, a4/ len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) mov a11, a2 # leave dst in return value register
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) beqz a4, .Lret # if len is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) movi a5, MASK0 # mask for byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) movi a6, MASK1 # mask for byte 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) movi a7, MASK2 # mask for byte 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) movi a8, MASK3 # mask for byte 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) bbsi.l a3, 0, .Lsrc1mod2 # if only 8-bit aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) bbsi.l a3, 1, .Lsrc2mod4 # if only 16-bit aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) .Lsrcaligned: # return here when src is word-aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) srli a12, a4, 2 # number of loop iterations with 4B per loop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) movi a9, 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) bnone a11, a9, .Laligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) j .Ldstunaligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) .Lsrc1mod2: # src address is odd
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) EX(11f) l8ui a9, a3, 0 # get byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) addi a3, a3, 1 # advance src pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) EX(10f) s8i a9, a11, 0 # store byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) beqz a9, .Lret # if byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) addi a11, a11, 1 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) addi a4, a4, -1 # decrement len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) beqz a4, .Lret # if len is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) bbci.l a3, 1, .Lsrcaligned # if src is now word-aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) .Lsrc2mod4: # src address is 2 mod 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) EX(11f) l8ui a9, a3, 0 # get byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) /* 1-cycle interlock */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) EX(10f) s8i a9, a11, 0 # store byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) beqz a9, .Lret # if byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) addi a11, a11, 1 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) addi a4, a4, -1 # decrement len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) beqz a4, .Lret # if len is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) EX(11f) l8ui a9, a3, 1 # get byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) addi a3, a3, 2 # advance src pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) EX(10f) s8i a9, a11, 0 # store byte 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) beqz a9, .Lret # if byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) addi a11, a11, 1 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) addi a4, a4, -1 # decrement len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) bnez a4, .Lsrcaligned # if len is nonzero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) .Lret:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) * dst is word-aligned, src is word-aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) .align 4 # 1 mod 4 alignment for LOOPNEZ
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) .byte 0 # (0 mod 4 alignment for LBEG)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) .Laligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) #if XCHAL_HAVE_LOOPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) loopnez a12, .Loop1done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) beqz a12, .Loop1done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) slli a12, a12, 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) add a12, a12, a11 # a12 = end of last 4B chunck
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) .Loop1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) EX(11f) l32i a9, a3, 0 # get word from src
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) addi a3, a3, 4 # advance src pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) bnone a9, a5, .Lz0 # if byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) bnone a9, a6, .Lz1 # if byte 1 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) bnone a9, a7, .Lz2 # if byte 2 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) EX(10f) s32i a9, a11, 0 # store word to dst
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) bnone a9, a8, .Lz3 # if byte 3 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) addi a11, a11, 4 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #if !XCHAL_HAVE_LOOPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) blt a11, a12, .Loop1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) .Loop1done:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) bbci.l a4, 1, .L100
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) # copy 2 bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) EX(11f) l16ui a9, a3, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) addi a3, a3, 2 # advance src pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) #ifdef __XTENSA_EB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) bnone a9, a7, .Lz0 # if byte 2 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) bnone a9, a8, .Lz1 # if byte 3 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) bnone a9, a5, .Lz0 # if byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) bnone a9, a6, .Lz1 # if byte 1 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) EX(10f) s16i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) addi a11, a11, 2 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) .L100:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) bbci.l a4, 0, .Lret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) EX(11f) l8ui a9, a3, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) /* slot */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) EX(10f) s8i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) beqz a9, .Lret # if byte is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) addi a11, a11, 1-3 # advance dst ptr 1, but also cancel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) # the effect of adding 3 in .Lz3 code
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) /* fall thru to .Lz3 and "retw" */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) .Lz3: # byte 3 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) addi a11, a11, 3 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) .Lz0: # byte 0 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) #ifdef __XTENSA_EB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) movi a9, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) #endif /* __XTENSA_EB__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) EX(10f) s8i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) .Lz1: # byte 1 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) #ifdef __XTENSA_EB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) extui a9, a9, 16, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) #endif /* __XTENSA_EB__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) EX(10f) s16i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) addi a11, a11, 1 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) .Lz2: # byte 2 is zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) #ifdef __XTENSA_EB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) extui a9, a9, 16, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) #endif /* __XTENSA_EB__ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) EX(10f) s16i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) movi a9, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) EX(10f) s8i a9, a11, 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) addi a11, a11, 2 # advance dst pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) .align 4 # 1 mod 4 alignment for LOOPNEZ
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) .byte 0 # (0 mod 4 alignment for LBEG)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) .Ldstunaligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) * for now just use byte copy loop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) #if XCHAL_HAVE_LOOPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) loopnez a4, .Lunalignedend
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) beqz a4, .Lunalignedend
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) add a12, a11, a4 # a12 = ending address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) #endif /* XCHAL_HAVE_LOOPS */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) .Lnextbyte:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) EX(11f) l8ui a9, a3, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) addi a3, a3, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) EX(10f) s8i a9, a11, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) beqz a9, .Lunalignedend
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) addi a11, a11, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #if !XCHAL_HAVE_LOOPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) blt a11, a12, .Lnextbyte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) .Lunalignedend:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) sub a2, a11, a2 # compute strlen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) abi_ret_default
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) ENDPROC(__strncpy_user)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) .section .fixup, "ax"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) .align 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) /* For now, just return -EFAULT. Future implementations might
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) * like to clear remaining kernel space, like the fixup
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) * implementation in memset(). Thus, we differentiate between
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) * load/store fixups. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 10:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) 11:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) movi a2, -EFAULT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) abi_ret_default