Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2)  * This file is subject to the terms and conditions of the GNU General Public
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  * License.  See the file "COPYING" in the main directory of this archive
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  * for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  * Unified implementation of memcpy, memmove and the __copy_user backend.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8)  * Copyright (C) 1998, 99, 2000, 01, 2002 Ralf Baechle (ralf@gnu.org)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9)  * Copyright (C) 1999, 2000, 01, 2002 Silicon Graphics, Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10)  * Copyright (C) 2002 Broadcom, Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11)  *   memcpy/copy_user author: Mark Vandevoorde
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13)  * Mnemonic names for arguments to memcpy/__copy_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #include <asm/asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) #include <asm/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #include <asm/regdef.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) #define dst a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) #define src a1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) #define len a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26)  * Spec
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28)  * memcpy copies len bytes from src to dst and sets v0 to dst.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29)  * It assumes that
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30)  *   - src and dst don't overlap
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31)  *   - src is readable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32)  *   - dst is writable
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33)  * memcpy uses the standard calling convention
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35)  * __copy_user copies up to len bytes from src to dst and sets a2 (len) to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36)  * the number of uncopied bytes due to an exception caused by a read or write.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37)  * __copy_user assumes that src and dst don't overlap, and that the call is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38)  * implementing one of the following:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39)  *   copy_to_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40)  *     - src is readable  (no exceptions when reading src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41)  *   copy_from_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42)  *     - dst is writable  (no exceptions when writing dst)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43)  * __copy_user uses a non-standard calling convention; see
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44)  * arch/mips/include/asm/uaccess.h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46)  * When an exception happens on a load, the handler must
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47)  # ensure that all of the destination buffer is overwritten to prevent
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48)  * leaking information to user mode programs.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52)  * Implementation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56)  * The exception handler for loads requires that:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57)  *  1- AT contain the address of the byte just past the end of the source
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58)  *     of the copy,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59)  *  2- src_entry <= src < AT, and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60)  *  3- (dst - src) == (dst_entry - src_entry),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61)  * The _entry suffix denotes values when __copy_user was called.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63)  * (1) is set up up by uaccess.h and maintained by not writing AT in copy_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64)  * (2) is met by incrementing src by the number of bytes copied
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65)  * (3) is met by not doing loads between a pair of increments of dst and src
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67)  * The exception handlers for stores adjust len (if necessary) and return.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68)  * These handlers do not need to overwrite any data.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70)  * For __rmemcpy and memmove an exception is always a kernel bug, therefore
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71)  * they're not protected.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) #define EXC(inst_reg,addr,handler)		\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 9:	inst_reg, addr;				\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 	.section __ex_table,"a";		\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 	PTR	9b, handler;			\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 	.previous
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81)  * Only on the 64-bit kernel we can made use of 64-bit registers.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) #define LOAD   ld
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) #define LOADL  ldl
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) #define LOADR  ldr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) #define STOREL sdl
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) #define STORER sdr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) #define STORE  sd
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) #define ADD    daddu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) #define SUB    dsubu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) #define SRL    dsrl
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) #define SRA    dsra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) #define SLL    dsll
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) #define SLLV   dsllv
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) #define SRLV   dsrlv
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) #define NBYTES 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) #define LOG_NBYTES 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)  * As we are sharing code base with the mips32 tree (which use the o32 ABI
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102)  * register definitions). We need to redefine the register definitions from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)  * the n64 ABI register naming to the o32 ABI register naming.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) #undef t0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) #undef t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) #undef t2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) #undef t3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) #define t0	$8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) #define t1	$9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) #define t2	$10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) #define t3	$11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) #define t4	$12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) #define t5	$13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) #define t6	$14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) #define t7	$15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #ifdef CONFIG_CPU_LITTLE_ENDIAN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) #define LDFIRST LOADR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #define LDREST	LOADL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) #define STFIRST STORER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #define STREST	STOREL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) #define SHIFT_DISCARD SLLV
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) #define LDFIRST LOADL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) #define LDREST	LOADR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) #define STFIRST STOREL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) #define STREST	STORER
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) #define SHIFT_DISCARD SRLV
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) #define FIRST(unit) ((unit)*NBYTES)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #define REST(unit)  (FIRST(unit)+NBYTES-1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) #define UNIT(unit)  FIRST(unit)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) #define ADDRMASK (NBYTES-1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 	.text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) 	.set	noreorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 	.set	noat
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143)  * A combined memcpy/__copy_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)  * __copy_user sets len to 0 for success; else to an upper bound of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145)  * the number of uncopied bytes.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)  * memcpy sets v0 to dst.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) 	.align	5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) LEAF(memcpy)					/* a0=dst a1=src a2=len */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) EXPORT_SYMBOL(memcpy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) 	move	v0, dst				/* return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) __memcpy:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) FEXPORT(__copy_user)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) EXPORT_SYMBOL(__copy_user)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) 	 * Note: dst & src may be unaligned, len may be 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 	 * Temps
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 	# Octeon doesn't care if the destination is unaligned. The hardware
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 	# can fix it faster than we can special case the assembly.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 	pref	0, 0(src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 	sltu	t0, len, NBYTES		# Check if < 1 word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 	bnez	t0, copy_bytes_checklen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 	 and	t0, src, ADDRMASK	# Check if src unaligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) 	bnez	t0, src_unaligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) 	 sltu	t0, len, 4*NBYTES	# Check if < 4 words
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	bnez	t0, less_than_4units
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 	 sltu	t0, len, 8*NBYTES	# Check if < 8 words
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 	bnez	t0, less_than_8units
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) 	 sltu	t0, len, 16*NBYTES	# Check if < 16 words
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 	bnez	t0, cleanup_both_aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) 	 sltu	t0, len, 128+1		# Check if len < 129
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) 	bnez	t0, 1f			# Skip prefetch if len is too short
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 	 sltu	t0, len, 256+1		# Check if len < 257
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) 	bnez	t0, 1f			# Skip prefetch if len is too short
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 	 pref	0, 128(src)		# We must not prefetch invalid addresses
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) 	# This is where we loop if there is more than 128 bytes left
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 2:	pref	0, 256(src)		# We must not prefetch invalid addresses
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) 	# This is where we loop if we can't prefetch anymore
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) EXC(	LOAD	t0, UNIT(0)(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) EXC(	LOAD	t1, UNIT(1)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) EXC(	LOAD	t2, UNIT(2)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) EXC(	LOAD	t3, UNIT(3)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) 	SUB	len, len, 16*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) EXC(	STORE	t0, UNIT(0)(dst),	s_exc_p16u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) EXC(	STORE	t1, UNIT(1)(dst),	s_exc_p15u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) EXC(	STORE	t2, UNIT(2)(dst),	s_exc_p14u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) EXC(	STORE	t3, UNIT(3)(dst),	s_exc_p13u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) EXC(	LOAD	t0, UNIT(4)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) EXC(	LOAD	t1, UNIT(5)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) EXC(	LOAD	t2, UNIT(6)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) EXC(	LOAD	t3, UNIT(7)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) EXC(	STORE	t0, UNIT(4)(dst),	s_exc_p12u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) EXC(	STORE	t1, UNIT(5)(dst),	s_exc_p11u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) EXC(	STORE	t2, UNIT(6)(dst),	s_exc_p10u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 	ADD	src, src, 16*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) EXC(	STORE	t3, UNIT(7)(dst),	s_exc_p9u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) 	ADD	dst, dst, 16*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) EXC(	LOAD	t0, UNIT(-8)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) EXC(	LOAD	t1, UNIT(-7)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) EXC(	LOAD	t2, UNIT(-6)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) EXC(	LOAD	t3, UNIT(-5)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) EXC(	STORE	t0, UNIT(-8)(dst),	s_exc_p8u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) EXC(	STORE	t1, UNIT(-7)(dst),	s_exc_p7u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) EXC(	STORE	t2, UNIT(-6)(dst),	s_exc_p6u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) EXC(	STORE	t3, UNIT(-5)(dst),	s_exc_p5u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) EXC(	LOAD	t0, UNIT(-4)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) EXC(	LOAD	t1, UNIT(-3)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) EXC(	LOAD	t2, UNIT(-2)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) EXC(	LOAD	t3, UNIT(-1)(src),	l_exc_copy_rewind16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) EXC(	STORE	t0, UNIT(-4)(dst),	s_exc_p4u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) EXC(	STORE	t1, UNIT(-3)(dst),	s_exc_p3u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) EXC(	STORE	t2, UNIT(-2)(dst),	s_exc_p2u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) EXC(	STORE	t3, UNIT(-1)(dst),	s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 	sltu	t0, len, 256+1		# See if we can prefetch more
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 	beqz	t0, 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 	 sltu	t0, len, 128		# See if we can loop more time
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) 	beqz	t0, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 	# Jump here if there are less than 16*NBYTES left.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) cleanup_both_aligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 	beqz	len, done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 	 sltu	t0, len, 8*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 	bnez	t0, less_than_8units
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) EXC(	LOAD	t0, UNIT(0)(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) EXC(	LOAD	t1, UNIT(1)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) EXC(	LOAD	t2, UNIT(2)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) EXC(	LOAD	t3, UNIT(3)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) 	SUB	len, len, 8*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) EXC(	STORE	t0, UNIT(0)(dst),	s_exc_p8u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) EXC(	STORE	t1, UNIT(1)(dst),	s_exc_p7u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) EXC(	STORE	t2, UNIT(2)(dst),	s_exc_p6u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) EXC(	STORE	t3, UNIT(3)(dst),	s_exc_p5u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) EXC(	LOAD	t0, UNIT(4)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) EXC(	LOAD	t1, UNIT(5)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) EXC(	LOAD	t2, UNIT(6)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) EXC(	LOAD	t3, UNIT(7)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) EXC(	STORE	t0, UNIT(4)(dst),	s_exc_p4u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) EXC(	STORE	t1, UNIT(5)(dst),	s_exc_p3u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) EXC(	STORE	t2, UNIT(6)(dst),	s_exc_p2u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) EXC(	STORE	t3, UNIT(7)(dst),	s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 	ADD	src, src, 8*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 	beqz	len, done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) 	 ADD	dst, dst, 8*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) 	# Jump here if there are less than 8*NBYTES left.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) less_than_8units:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) 	sltu	t0, len, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	bnez	t0, less_than_4units
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) EXC(	LOAD	t0, UNIT(0)(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) EXC(	LOAD	t1, UNIT(1)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) EXC(	LOAD	t2, UNIT(2)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) EXC(	LOAD	t3, UNIT(3)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 	SUB	len, len, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) EXC(	STORE	t0, UNIT(0)(dst),	s_exc_p4u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) EXC(	STORE	t1, UNIT(1)(dst),	s_exc_p3u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) EXC(	STORE	t2, UNIT(2)(dst),	s_exc_p2u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) EXC(	STORE	t3, UNIT(3)(dst),	s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 	ADD	src, src, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) 	beqz	len, done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 	 ADD	dst, dst, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) 	# Jump here if there are less than 4*NBYTES left. This means
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 	# we may need to copy up to 3 NBYTES words.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) less_than_4units:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 	sltu	t0, len, 1*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 	bnez	t0, copy_bytes_checklen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 	# 1) Copy NBYTES, then check length again
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) EXC(	LOAD	t0, 0(src),		l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 	SUB	len, len, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) 	sltu	t1, len, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) EXC(	STORE	t0, 0(dst),		s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) 	ADD	src, src, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) 	bnez	t1, copy_bytes_checklen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 	 ADD	dst, dst, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 	# 2) Copy NBYTES, then check length again
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) EXC(	LOAD	t0, 0(src),		l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 	SUB	len, len, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 	sltu	t1, len, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) EXC(	STORE	t0, 0(dst),		s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 	ADD	src, src, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 	bnez	t1, copy_bytes_checklen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) 	 ADD	dst, dst, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) 	# 3) Copy NBYTES, then check length again
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 	#
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) EXC(	LOAD	t0, 0(src),		l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 	SUB	len, len, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 	ADD	src, src, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 	ADD	dst, dst, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 	b copy_bytes_checklen
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) EXC(	 STORE	t0, -8(dst),		s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) src_unaligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) #define rem t8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 	SRL	t0, len, LOG_NBYTES+2	 # +2 for 4 units/iter
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) 	beqz	t0, cleanup_src_unaligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) 	 and	rem, len, (4*NBYTES-1)	 # rem = len % 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317)  * Avoid consecutive LD*'s to the same register since some mips
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318)  * implementations can't issue them in the same cycle.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319)  * It's OK to load FIRST(N+1) before REST(N) because the two addresses
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320)  * are to the same unit (unless src is aligned, but it's not).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) EXC(	LDFIRST t0, FIRST(0)(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) EXC(	LDFIRST t1, FIRST(1)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 	SUB	len, len, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) EXC(	LDREST	t0, REST(0)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) EXC(	LDREST	t1, REST(1)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) EXC(	LDFIRST t2, FIRST(2)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) EXC(	LDFIRST t3, FIRST(3)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) EXC(	LDREST	t2, REST(2)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) EXC(	LDREST	t3, REST(3)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 	ADD	src, src, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) EXC(	STORE	t0, UNIT(0)(dst),	s_exc_p4u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) EXC(	STORE	t1, UNIT(1)(dst),	s_exc_p3u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) EXC(	STORE	t2, UNIT(2)(dst),	s_exc_p2u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) EXC(	STORE	t3, UNIT(3)(dst),	s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 	bne	len, rem, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 	 ADD	dst, dst, 4*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) cleanup_src_unaligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 	beqz	len, done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 	 and	rem, len, NBYTES-1  # rem = len % NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 	beq	rem, len, copy_bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) EXC(	LDFIRST t0, FIRST(0)(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) EXC(	LDREST	t0, REST(0)(src),	l_exc_copy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 	SUB	len, len, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) EXC(	STORE	t0, 0(dst),		s_exc_p1u)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) 	ADD	src, src, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 	bne	len, rem, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) 	 ADD	dst, dst, NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) copy_bytes_checklen:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 	beqz	len, done
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) copy_bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) 	/* 0 < len < NBYTES  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) #define COPY_BYTE(N)			\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) EXC(	lb	t0, N(src), l_exc);	\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) 	SUB	len, len, 1;		\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 	beqz	len, done;		\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) EXC(	 sb	t0, N(dst), s_exc_p1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) 	COPY_BYTE(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 	COPY_BYTE(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) 	COPY_BYTE(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 	COPY_BYTE(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 	COPY_BYTE(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 	COPY_BYTE(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) EXC(	lb	t0, NBYTES-2(src), l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 	SUB	len, len, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) EXC(	 sb	t0, NBYTES-2(dst), s_exc_p1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) done:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) 	END(memcpy)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) l_exc_copy_rewind16:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 	/* Rewind src and dst by 16*NBYTES for l_exc_copy */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) 	SUB	src, src, 16*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) 	SUB	dst, dst, 16*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) l_exc_copy:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 	 * Copy bytes from src until faulting load address (or until a
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) 	 * lb faults)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 	 *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) 	 * When reached by a faulting LDFIRST/LDREST, THREAD_BUADDR($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) 	 * may be more than a byte beyond the last address.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) 	 * Hence, the lb below may get an exception.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) 	 *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) 	 * Assumes src < THREAD_BUADDR($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) 	LOAD	t0, TI_TASK($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) 	LOAD	t0, THREAD_BUADDR(t0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) EXC(	lb	t1, 0(src),	l_exc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) 	ADD	src, src, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 	sb	t1, 0(dst)	# can't fault -- we're copy_from_user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) 	bne	src, t0, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) 	 ADD	dst, dst, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) l_exc:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) 	LOAD	t0, TI_TASK($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) 	LOAD	t0, THREAD_BUADDR(t0)	# t0 is just past last good address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) 	SUB	len, AT, t0		# len number of uncopied bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) #define SEXC(n)				\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) s_exc_p ## n ## u:			\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) 	jr	ra;			\
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) 	 ADD	len, len, n*NBYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) SEXC(16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) SEXC(15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) SEXC(14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) SEXC(13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) SEXC(12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) SEXC(11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) SEXC(10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) SEXC(9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) SEXC(8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) SEXC(7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) SEXC(6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) SEXC(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) SEXC(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) SEXC(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) SEXC(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) SEXC(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) s_exc_p1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) 	 ADD	len, len, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) s_exc:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) 	.align	5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) LEAF(memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) EXPORT_SYMBOL(memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) 	ADD	t0, a0, a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) 	ADD	t1, a1, a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) 	sltu	t0, a1, t0			# dst + len <= src -> memcpy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) 	sltu	t1, a0, t1			# dst >= src + len -> memcpy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) 	and	t0, t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) 	beqz	t0, __memcpy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) 	 move	v0, a0				/* return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) 	beqz	a2, r_out
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) 	END(memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) 	/* fall through to __rmemcpy */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) LEAF(__rmemcpy)					/* a0=dst a1=src a2=len */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) 	 sltu	t0, a1, a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) 	beqz	t0, r_end_bytes_up		# src >= dst
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) 	 nop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) 	ADD	a0, a2				# dst = dst + len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) 	ADD	a1, a2				# src = src + len
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) r_end_bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) 	lb	t0, -1(a1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) 	SUB	a2, a2, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) 	sb	t0, -1(a0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) 	SUB	a1, a1, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) 	bnez	a2, r_end_bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) 	 SUB	a0, a0, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) r_out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) 	 move	a2, zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) r_end_bytes_up:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) 	lb	t0, (a1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) 	SUB	a2, a2, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) 	sb	t0, (a0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) 	ADD	a1, a1, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) 	bnez	a2, r_end_bytes_up
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) 	 ADD	a0, a0, 0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) 	jr	ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) 	 move	a2, zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 	END(__rmemcpy)