^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include "sysdep.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) .weak memset
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) ENTRY(__memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) ENTRY(memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) /* Test if len less than 4 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) mov r12, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) cmplti r2, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) bt .L_set_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) andi r13, r0, 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) movi r19, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) /* Test if dest is not 4 bytes aligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) bnez r13, .L_dest_not_aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) /* Hardware can handle unaligned access directly. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) .L_dest_aligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) zextb r3, r1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) lsli r1, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) or r1, r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) lsli r3, r1, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) or r3, r1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) /* If dest is aligned, then copy. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) zext r18, r2, 31, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) /* Test if len less than 16 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) bez r18, .L_len_less_16bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) LABLE_ALIGN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) .L_len_larger_16bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) stw r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) stw r3, (r0, 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) stw r3, (r0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) stw r3, (r0, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) addi r0, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) BNEZAD (r18, .L_len_larger_16bytes)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) .L_len_less_16bytes:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) zext r18, r2, 3, 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) andi r2, 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) bez r18, .L_set_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) .L_len_less_16bytes_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) stw r3, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) addi r0, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) BNEZAD (r18, .L_len_less_16bytes_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) /* Test if len less than 4 bytes. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) .L_set_by_byte:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) zext r18, r2, 2, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) bez r18, .L_return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) .L_set_by_byte_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) stb r1, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) PRE_BNEZAD (r18)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) addi r0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) BNEZAD (r18, .L_set_by_byte_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) .L_return:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) mov r0, r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) rts
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) /* If dest is not aligned, just set some bytes makes the dest
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) align. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) .L_dest_not_aligned:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) sub r13, r19, r13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) sub r2, r13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) .L_dest_not_aligned_loop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) /* Makes the dest align. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) stb r1, (r0, 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) PRE_BNEZAD (r13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) addi r0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) BNEZAD (r13, .L_dest_not_aligned_loop)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) cmplti r2, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) bt .L_set_by_byte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) /* Check whether the src is aligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) jbr .L_dest_aligned
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) ENDPROC(memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) ENDPROC(__memset)