^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include "sysdep.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) .weak memmove
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) ENTRY(__memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) ENTRY(memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) subu r3, r0, r1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) cmphs r3, r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) bt memcpy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) mov r12, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) addu r0, r0, r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) addu r1, r1, r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) /* Test if len less than 4 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) cmplti r2, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) bt .L_copy_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) andi r13, r0, 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) /* Test if dest is not 4 bytes aligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) bnez r13, .L_dest_not_aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) /* Hardware can handle unaligned access directly. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) .L_dest_aligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) /* If dest is aligned, then copy. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) zext r18, r2, 31, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) /* Test if len less than 16 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) bez r18, .L_len_less_16bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) movi r19, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) /* len > 16 bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) LABLE_ALIGN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) .L_len_larger_16bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) subi r1, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) subi r0, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #if defined(__CK860__)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) ldw r3, (r1, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) stw r3, (r0, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) ldw r3, (r1, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) stw r3, (r0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) ldw r3, (r1, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) stw r3, (r0, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) ldw r3, (r1, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) stw r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) ldw r20, (r1, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) ldw r21, (r1, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) ldw r22, (r1, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) ldw r23, (r1, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) stw r20, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) stw r21, (r0, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) stw r22, (r0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) stw r23, (r0, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) BNEZAD (r18, .L_len_larger_16bytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) .L_len_less_16bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) zext r18, r2, 3, 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) bez r18, .L_copy_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) .L_len_less_16bytes_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) subi r1, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) subi r0, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) ldw r3, (r1, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) stw r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) BNEZAD (r18, .L_len_less_16bytes_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) /* Test if len less than 4 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) .L_copy_by_byte:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) zext r18, r2, 1, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) bez r18, .L_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) .L_copy_by_byte_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) subi r1, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) subi r0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) ldb r3, (r1, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) stb r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) BNEZAD (r18, .L_copy_by_byte_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) .L_return:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) mov r0, r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) rts
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) /* If dest is not aligned, just copy some bytes makes the dest
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) align. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) .L_dest_not_aligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) sub r2, r13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) .L_dest_not_aligned_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) subi r1, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) subi r0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) /* Makes the dest align. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) ldb r3, (r1, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) PRE_BNEZAD (r13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) stb r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) BNEZAD (r13, .L_dest_not_aligned_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) cmplti r2, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) bt .L_copy_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) /* Check whether the src is aligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) jbr .L_dest_aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) ENDPROC(memmove)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) ENDPROC(__memmove)