^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) * This file is subject to the terms and conditions of the GNU General Public
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * License. See the file "COPYING" in the main directory of this archive
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Copyright (C) 2007 by Maciej W. Rozycki
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <asm/asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <asm/regdef.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #if LONGSIZE == 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define LONG_S_L swl
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #define LONG_S_R swr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define LONG_S_L sdl
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define LONG_S_R sdr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #ifdef CONFIG_CPU_MICROMIPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) #define STORSIZE (LONGSIZE * 2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #define STORMASK (STORSIZE - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define FILL64RG t8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) #define FILLPTRG t7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) #undef LONG_S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) #define LONG_S LONG_SP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #define STORSIZE LONGSIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) #define STORMASK LONGMASK
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define FILL64RG a1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) #define FILLPTRG t0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) #define LEGACY_MODE 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #define EVA_MODE 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) * No need to protect it with EVA #ifdefery. The generated block of code
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) * will never be assembled if EVA is not enabled.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) #define EX(insn,reg,addr,handler) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) .if \mode == LEGACY_MODE; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) 9: insn reg, addr; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) .else; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) 9: ___BUILD_EVA_INSN(insn, reg, addr); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) .endif; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) .section __ex_table,"a"; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) PTR 9b, handler; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) .previous
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) .macro f_fill64 dst, offset, val, fixup, mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) .align 5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) * Macro to generate the __bzero{,_user} symbol
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) * Arguments:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) * mode: LEGACY_MODE or EVA_MODE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) .macro __BUILD_BZERO mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) /* Initialize __memset if this is the first time we call this macro */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) .ifnotdef __memset
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) .set __memset, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) .hidden __memset /* Make sure it does not leak */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) .endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) sltiu t0, a2, STORSIZE /* very small region? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) .set noreorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) bnez t0, .Lsmall_memset\@
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) andi t0, a0, STORMASK /* aligned? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) .set reorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) #ifdef CONFIG_CPU_MICROMIPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) move t8, a1 /* used by 'swp' instruction */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) move t9, a1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) .set noreorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) beqz t0, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) PTR_SUBU t0, STORSIZE /* alignment in bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) .set noat
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) li AT, STORSIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) beqz t0, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) PTR_SUBU t0, AT /* alignment in bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) .set at
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) .set reorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) R10KCBARRIER(0(ra))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) #ifdef __MIPSEB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) PTR_SUBU a0, t0 /* long align ptr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) PTR_ADDU a2, t0 /* correct size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) #define STORE_BYTE(N) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) EX(sb, a1, N(a0), .Lbyte_fixup\@); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) .set noreorder; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) beqz t0, 0f; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) PTR_ADDU t0, 1; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) .set reorder;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) PTR_ADDU a2, t0 /* correct size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) PTR_ADDU t0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) STORE_BYTE(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) STORE_BYTE(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) #if LONGSIZE == 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) EX(sb, a1, 2(a0), .Lbyte_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) STORE_BYTE(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) STORE_BYTE(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) STORE_BYTE(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) STORE_BYTE(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) EX(sb, a1, 6(a0), .Lbyte_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) 0:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) ori a0, STORMASK
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) xori a0, STORMASK
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) PTR_ADDIU a0, STORSIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 1: ori t1, a2, 0x3f /* # of full blocks */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) xori t1, 0x3f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) andi t0, a2, 0x40-STORSIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) beqz t1, .Lmemset_partial\@ /* no block to fill */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) PTR_ADDU t1, a0 /* end address */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 1: PTR_ADDIU a0, 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) R10KCBARRIER(0(ra))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) bne t1, a0, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) .Lmemset_partial\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) R10KCBARRIER(0(ra))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) PTR_LA t1, 2f /* where to start */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) #ifdef CONFIG_CPU_MICROMIPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) LONG_SRL t7, t0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) #if LONGSIZE == 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) PTR_SUBU t1, FILLPTRG
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) .set noat
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) LONG_SRL AT, FILLPTRG, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) PTR_SUBU t1, AT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) .set at
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) PTR_ADDU a0, t0 /* dest ptr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) jr t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) /* ... but first do longs ... */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 2: andi a2, STORMASK /* At most one long to go */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) .set noreorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) beqz a2, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) PTR_ADDU a0, a2 /* What's left */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) .set reorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) R10KCBARRIER(0(ra))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) #ifdef __MIPSEB__
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) PTR_SUBU t0, $0, a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) .set reorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) move a2, zero /* No remaining longs */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) PTR_ADDIU t0, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) STORE_BYTE(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) STORE_BYTE(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) #if LONGSIZE == 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) EX(sb, a1, 2(a0), .Lbyte_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) STORE_BYTE(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) STORE_BYTE(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) STORE_BYTE(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) STORE_BYTE(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) EX(sb, a1, 6(a0), .Lbyte_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) 0:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) 1: move a2, zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) .Lsmall_memset\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) PTR_ADDU t1, a0, a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) beqz a2, 2f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 1: PTR_ADDIU a0, 1 /* fill bytewise */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) R10KCBARRIER(0(ra))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) .set noreorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) bne t1, a0, 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) EX(sb, a1, -1(a0), .Lsmall_fixup\@)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) .set reorder
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 2: move a2, zero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) jr ra /* done */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) .if __memset == 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) END(memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) .set __memset, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) .hidden __memset
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) .endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) #ifdef CONFIG_CPU_NO_LOAD_STORE_LR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) .Lbyte_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) * a2 = a2 - t0 + 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) PTR_SUBU a2, t0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) PTR_ADDIU a2, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) .Lfirst_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) /* unset_bytes already in a2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) .Lfwd_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) * unset_bytes = partial_start_addr + #bytes - fault_addr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) PTR_L t0, TI_TASK($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) andi a2, 0x3f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) LONG_L t0, THREAD_BUADDR(t0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) LONG_ADDU a2, t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) LONG_SUBU a2, t0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) .Lpartial_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) * unset_bytes = partial_end_addr + #bytes - fault_addr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) PTR_L t0, TI_TASK($28)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) andi a2, STORMASK
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) LONG_L t0, THREAD_BUADDR(t0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) LONG_ADDU a2, a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) LONG_SUBU a2, t0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) .Llast_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) /* unset_bytes already in a2 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) .Lsmall_fixup\@:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) * unset_bytes = end_addr - current_addr + 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) * a2 = t1 - a0 + 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) PTR_SUBU a2, t1, a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) PTR_ADDIU a2, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) jr ra
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) * memset(void *s, int c, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) * a0: start of area to clear
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) * a1: char to fill with
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) * a2: size of area to clear
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) LEAF(memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) EXPORT_SYMBOL(memset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) move v0, a0 /* result */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) beqz a1, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) andi a1, 0xff /* spread fillword */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) LONG_SLL t1, a1, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) or a1, t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) LONG_SLL t1, a1, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) #if LONGSIZE == 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) or a1, t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) LONG_SLL t1, a1, 32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) or a1, t1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) #ifndef CONFIG_EVA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) FEXPORT(__bzero)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) EXPORT_SYMBOL(__bzero)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) FEXPORT(__bzero_kernel)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) EXPORT_SYMBOL(__bzero_kernel)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) __BUILD_BZERO LEGACY_MODE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) #ifdef CONFIG_EVA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) LEAF(__bzero)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) EXPORT_SYMBOL(__bzero)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) __BUILD_BZERO EVA_MODE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) END(__bzero)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) #endif