^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * This file contains assembly-language implementations
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * of IP-style 1's complement checksum routines.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Severely hacked about by Paul Mackerras (paulus@cs.anu.edu.au).
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/sys.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/cache.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <asm/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/ppc_asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * computes the checksum of a memory block at buff, length len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * and adds in "sum" (32-bit)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * __csum_partial(buff, len, sum)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) _GLOBAL(__csum_partial)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) subi r3,r3,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) srawi. r6,r4,2 /* Divide len by 4 and also clear carry */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) beq 3f /* if we're doing < 4 bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) andi. r0,r3,2 /* Align buffer to longword boundary */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) beq+ 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) lhz r0,4(r3) /* do 2 bytes to get aligned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) subi r4,r4,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) addi r3,r3,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) srwi. r6,r4,2 /* # words to do */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) beq 3f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) 1: andi. r6,r6,3 /* Prepare to handle words 4 by 4 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) beq 21f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) mtctr r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) 2: lwzu r0,4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) bdnz 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) 21: srwi. r6,r4,4 /* # blocks of 4 words to do */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) beq 3f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) lwz r0,4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) mtctr r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) lwz r6,8(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) lwz r7,12(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) adde r5,r5,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) lwzu r8,16(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) adde r5,r5,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) bdz 23f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) 22: lwz r0,4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) adde r5,r5,r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) lwz r6,8(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) lwz r7,12(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) adde r5,r5,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) lwzu r8,16(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) adde r5,r5,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) bdnz 22b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) 23: adde r5,r5,r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) 3: andi. r0,r4,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) beq+ 4f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) lhz r0,4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) addi r3,r3,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) 4: andi. r0,r4,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) beq+ 5f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) lbz r0,4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) slwi r0,r0,8 /* Upper byte of word */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) adde r5,r5,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) 5: addze r3,r5 /* add in final carry */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) EXPORT_SYMBOL(__csum_partial)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) * Computes the checksum of a memory block at src, length len,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) * and adds in 0xffffffff, while copying the block to dst.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) * If an access exception occurs it returns zero.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) * csum_partial_copy_generic(src, dst, len)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) #define CSUM_COPY_16_BYTES_WITHEX(n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) 8 ## n ## 0: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) lwz r7,4(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) 8 ## n ## 1: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) lwz r8,8(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) 8 ## n ## 2: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) lwz r9,12(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) 8 ## n ## 3: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) lwzu r10,16(r4); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) 8 ## n ## 4: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) stw r7,4(r6); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) adde r12,r12,r7; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) 8 ## n ## 5: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) stw r8,8(r6); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) adde r12,r12,r8; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) 8 ## n ## 6: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) stw r9,12(r6); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) adde r12,r12,r9; \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) 8 ## n ## 7: \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) stwu r10,16(r6); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) adde r12,r12,r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) #define CSUM_COPY_16_BYTES_EXCODE(n) \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) EX_TABLE(8 ## n ## 0b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) EX_TABLE(8 ## n ## 1b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) EX_TABLE(8 ## n ## 2b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) EX_TABLE(8 ## n ## 3b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) EX_TABLE(8 ## n ## 4b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) EX_TABLE(8 ## n ## 5b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) EX_TABLE(8 ## n ## 6b, fault); \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) EX_TABLE(8 ## n ## 7b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) .stabs "arch/powerpc/lib/",N_SO,0,0,0f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) .stabs "checksum_32.S",N_SO,0,0,0f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) 0:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) CACHELINE_BYTES = L1_CACHE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) LG_CACHELINE_BYTES = L1_CACHE_SHIFT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) CACHELINE_MASK = (L1_CACHE_BYTES-1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) _GLOBAL(csum_partial_copy_generic)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) li r12,-1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) addic r0,r0,0 /* clear carry */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) addi r6,r4,-4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) neg r0,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) addi r4,r3,-4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) crset 4*cr7+eq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) beq 58f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) cmplw 0,r5,r0 /* is this more than total to do? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) blt 63f /* if not much to do */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) rlwinm r7,r6,3,0x8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) rlwnm r12,r12,r7,0,31 /* odd destination address: rotate one byte */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) cmplwi cr7,r7,0 /* is destination address even ? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) andi. r8,r0,3 /* get it word-aligned first */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) mtctr r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) beq+ 61f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 70: lbz r9,4(r4) /* do some bytes */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) addi r4,r4,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) slwi r3,r3,8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) rlwimi r3,r9,0,24,31
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) 71: stb r9,4(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) addi r6,r6,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) bdnz 70b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) adde r12,r12,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 61: subf r5,r0,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) srwi. r0,r0,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) mtctr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) beq 58f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 72: lwzu r9,4(r4) /* do some words */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) adde r12,r12,r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 73: stwu r9,4(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) bdnz 72b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) clrlwi r5,r5,32-LG_CACHELINE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) li r11,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) beq 63f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) /* Here we decide how far ahead to prefetch the source */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) li r3,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) cmpwi r0,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) li r7,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) ble 114f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) li r7,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) #if MAX_COPY_PREFETCH > 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) /* Heuristically, for large transfers we prefetch
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) MAX_COPY_PREFETCH cachelines ahead. For small transfers
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) we prefetch 1 cacheline ahead. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) cmpwi r0,MAX_COPY_PREFETCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) ble 112f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) li r7,MAX_COPY_PREFETCH
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 112: mtctr r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) 111: dcbt r3,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) addi r3,r3,CACHELINE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) bdnz 111b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) dcbt r3,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) addi r3,r3,CACHELINE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) #endif /* MAX_COPY_PREFETCH > 1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 114: subf r8,r7,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) mr r0,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) mtctr r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 53: dcbt r3,r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 54: dcbz r11,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) /* the main body of the cacheline loop */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) CSUM_COPY_16_BYTES_WITHEX(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) #if L1_CACHE_BYTES >= 32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) CSUM_COPY_16_BYTES_WITHEX(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) #if L1_CACHE_BYTES >= 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) CSUM_COPY_16_BYTES_WITHEX(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) CSUM_COPY_16_BYTES_WITHEX(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) #if L1_CACHE_BYTES >= 128
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) CSUM_COPY_16_BYTES_WITHEX(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) CSUM_COPY_16_BYTES_WITHEX(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) CSUM_COPY_16_BYTES_WITHEX(6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) CSUM_COPY_16_BYTES_WITHEX(7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) bdnz 53b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) cmpwi r0,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) li r3,4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) li r7,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) bne 114b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 63: srwi. r0,r5,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) mtctr r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) beq 64f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 30: lwzu r0,4(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) adde r12,r12,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 31: stwu r0,4(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) bdnz 30b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 64: andi. r0,r5,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) beq+ 65f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 40: lhz r0,4(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) addi r4,r4,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 41: sth r0,4(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) adde r12,r12,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) addi r6,r6,2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 65: andi. r0,r5,1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) beq+ 66f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 50: lbz r0,4(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) 51: stb r0,4(r6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) slwi r0,r0,8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) adde r12,r12,r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) 66: addze r3,r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) beqlr+ cr7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) rlwinm r3,r3,8,0,31 /* odd destination address: rotate one byte */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) fault:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) EX_TABLE(70b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) EX_TABLE(71b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) EX_TABLE(72b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) EX_TABLE(73b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) EX_TABLE(54b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) * this stuff handles faults in the cacheline loop and branches to either
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) * fault (if in read part) or fault (if in write part)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) CSUM_COPY_16_BYTES_EXCODE(0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) #if L1_CACHE_BYTES >= 32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) CSUM_COPY_16_BYTES_EXCODE(1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) #if L1_CACHE_BYTES >= 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) CSUM_COPY_16_BYTES_EXCODE(2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) CSUM_COPY_16_BYTES_EXCODE(3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) #if L1_CACHE_BYTES >= 128
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) CSUM_COPY_16_BYTES_EXCODE(4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) CSUM_COPY_16_BYTES_EXCODE(5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) CSUM_COPY_16_BYTES_EXCODE(6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) CSUM_COPY_16_BYTES_EXCODE(7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) EX_TABLE(30b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) EX_TABLE(31b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) EX_TABLE(40b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) EX_TABLE(41b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) EX_TABLE(50b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) EX_TABLE(51b, fault);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) EXPORT_SYMBOL(csum_partial_copy_generic)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) * __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) * const struct in6_addr *daddr,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) * __u32 len, __u8 proto, __wsum sum)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) _GLOBAL(csum_ipv6_magic)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) lwz r8, 0(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) lwz r9, 4(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) addc r0, r7, r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) lwz r10, 8(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) adde r0, r0, r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) lwz r11, 12(r3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) adde r0, r0, r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) lwz r8, 0(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) adde r0, r0, r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) lwz r9, 4(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) adde r0, r0, r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) lwz r10, 8(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) adde r0, r0, r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) lwz r11, 12(r4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) adde r0, r0, r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) add r5, r5, r6 /* assumption: len + proto doesn't carry */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) adde r0, r0, r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) adde r0, r0, r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) addze r0, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) rotlwi r3, r0, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) add r3, r0, r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) not r3, r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) rlwinm r3, r3, 16, 16, 31
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) EXPORT_SYMBOL(csum_ipv6_magic)