^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * ChaCha 256-bit cipher algorithm, x64 SSSE3 functions
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2015 Martin Willi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <asm/frame.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) .section .rodata.cst16.ROT8, "aM", @progbits, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) .align 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) ROT8: .octa 0x0e0d0c0f0a09080b0605040702010003
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) .section .rodata.cst16.ROT16, "aM", @progbits, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) .align 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) ROT16: .octa 0x0d0c0f0e09080b0a0504070601000302
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) .section .rodata.cst16.CTRINC, "aM", @progbits, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) .align 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) CTRINC: .octa 0x00000003000000020000000100000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * chacha_permute - permute one block
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * Permute one 64-byte block where the state matrix is in %xmm0-%xmm3. This
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * function performs matrix operations on four words in parallel, but requires
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) * shuffling to rearrange the words after each round. 8/16-bit word rotation is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) * done with the slightly better performing SSSE3 byte shuffling, 7/12-bit word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) * rotation uses traditional shift+OR.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) * The round count is given in %r8d.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) * Clobbers: %r8d, %xmm4-%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) SYM_FUNC_START_LOCAL(chacha_permute)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) movdqa ROT8(%rip),%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) movdqa ROT16(%rip),%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) .Ldoubleround:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) # x0 += x1, x3 = rotl32(x3 ^ x0, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) paddd %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) pxor %xmm0,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) pshufb %xmm5,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) # x2 += x3, x1 = rotl32(x1 ^ x2, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) paddd %xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) pxor %xmm2,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) movdqa %xmm1,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) pslld $12,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) psrld $20,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) por %xmm6,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) # x0 += x1, x3 = rotl32(x3 ^ x0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) paddd %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) pxor %xmm0,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) pshufb %xmm4,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) # x2 += x3, x1 = rotl32(x1 ^ x2, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) paddd %xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) pxor %xmm2,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) movdqa %xmm1,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) pslld $7,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) psrld $25,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) por %xmm7,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) # x1 = shuffle32(x1, MASK(0, 3, 2, 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) pshufd $0x39,%xmm1,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) pshufd $0x4e,%xmm2,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) # x3 = shuffle32(x3, MASK(2, 1, 0, 3))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) pshufd $0x93,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) # x0 += x1, x3 = rotl32(x3 ^ x0, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) paddd %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) pxor %xmm0,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) pshufb %xmm5,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) # x2 += x3, x1 = rotl32(x1 ^ x2, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) paddd %xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) pxor %xmm2,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) movdqa %xmm1,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) pslld $12,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) psrld $20,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) por %xmm6,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) # x0 += x1, x3 = rotl32(x3 ^ x0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) paddd %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) pxor %xmm0,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) pshufb %xmm4,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) # x2 += x3, x1 = rotl32(x1 ^ x2, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) paddd %xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) pxor %xmm2,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) movdqa %xmm1,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) pslld $7,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) psrld $25,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) por %xmm7,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) # x1 = shuffle32(x1, MASK(2, 1, 0, 3))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) pshufd $0x93,%xmm1,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) pshufd $0x4e,%xmm2,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) # x3 = shuffle32(x3, MASK(0, 3, 2, 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) pshufd $0x39,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) sub $2,%r8d
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) jnz .Ldoubleround
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) SYM_FUNC_END(chacha_permute)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) SYM_FUNC_START(chacha_block_xor_ssse3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) # %rdi: Input state matrix, s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) # %rsi: up to 1 data block output, o
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) # %rdx: up to 1 data block input, i
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) # %rcx: input/output length in bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) # %r8d: nrounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) FRAME_BEGIN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) # x0..3 = s0..3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) movdqu 0x00(%rdi),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) movdqu 0x10(%rdi),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) movdqu 0x20(%rdi),%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) movdqu 0x30(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) movdqa %xmm0,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) movdqa %xmm1,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) movdqa %xmm2,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) movdqa %xmm3,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) mov %rcx,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) call chacha_permute
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) # o0 = i0 ^ (x0 + s0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) paddd %xmm8,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) cmp $0x10,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) jl .Lxorpart
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) movdqu 0x00(%rdx),%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) pxor %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) movdqu %xmm0,0x00(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) # o1 = i1 ^ (x1 + s1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) paddd %xmm9,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) movdqa %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) cmp $0x20,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) jl .Lxorpart
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) movdqu 0x10(%rdx),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) movdqu %xmm0,0x10(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) # o2 = i2 ^ (x2 + s2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) paddd %xmm10,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) movdqa %xmm2,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) cmp $0x30,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) jl .Lxorpart
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) movdqu 0x20(%rdx),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) pxor %xmm2,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) movdqu %xmm0,0x20(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) # o3 = i3 ^ (x3 + s3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) paddd %xmm11,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) movdqa %xmm3,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) cmp $0x40,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) jl .Lxorpart
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) movdqu 0x30(%rdx),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) pxor %xmm3,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) movdqu %xmm0,0x30(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) .Ldone:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) FRAME_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) .Lxorpart:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) # xor remaining bytes from partial register into output
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) mov %rax,%r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) and $0x0f,%r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) jz .Ldone
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) and $~0x0f,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) mov %rsi,%r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) lea 8(%rsp),%r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) sub $0x10,%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) and $~31,%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) lea (%rdx,%rax),%rsi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) mov %rsp,%rdi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) mov %r9,%rcx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) rep movsb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) pxor 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) mov %rsp,%rsi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) lea (%r11,%rax),%rdi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) mov %r9,%rcx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) rep movsb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) lea -8(%r10),%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) jmp .Ldone
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) SYM_FUNC_END(chacha_block_xor_ssse3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) SYM_FUNC_START(hchacha_block_ssse3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) # %rdi: Input state matrix, s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) # %rsi: output (8 32-bit words)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) # %edx: nrounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) FRAME_BEGIN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) movdqu 0x00(%rdi),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) movdqu 0x10(%rdi),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) movdqu 0x20(%rdi),%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) movdqu 0x30(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) mov %edx,%r8d
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) call chacha_permute
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) movdqu %xmm0,0x00(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) movdqu %xmm3,0x10(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) FRAME_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) SYM_FUNC_END(hchacha_block_ssse3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) SYM_FUNC_START(chacha_4block_xor_ssse3)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) # %rdi: Input state matrix, s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) # %rsi: up to 4 data blocks output, o
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) # %rdx: up to 4 data blocks input, i
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) # %rcx: input/output length in bytes
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) # %r8d: nrounds
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) # This function encrypts four consecutive ChaCha blocks by loading the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) # the state matrix in SSE registers four times. As we need some scratch
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) # registers, we save the first four registers on the stack. The
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) # algorithm performs each operation on the corresponding word of each
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) # state matrix, hence requires no word shuffling. For final XORing step
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) # we transpose the matrix by interleaving 32- and then 64-bit words,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) # which allows us to do XOR in SSE registers. 8/16-bit word rotation is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) # done with the slightly better performing SSSE3 byte shuffling,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) # 7/12-bit word rotation uses traditional shift+OR.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) lea 8(%rsp),%r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) sub $0x80,%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) and $~63,%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) mov %rcx,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) # x0..15[0-3] = s0..3[0..3]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) movq 0x00(%rdi),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) pshufd $0x00,%xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) pshufd $0x55,%xmm1,%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) movq 0x08(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) movq 0x10(%rdi),%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) pshufd $0x00,%xmm5,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) pshufd $0x55,%xmm5,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) movq 0x18(%rdi),%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) pshufd $0x00,%xmm7,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) pshufd $0x55,%xmm7,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) movq 0x20(%rdi),%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) pshufd $0x00,%xmm9,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) pshufd $0x55,%xmm9,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) movq 0x28(%rdi),%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) pshufd $0x00,%xmm11,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) pshufd $0x55,%xmm11,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) movq 0x30(%rdi),%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) pshufd $0x00,%xmm13,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) pshufd $0x55,%xmm13,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) movq 0x38(%rdi),%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) pshufd $0x00,%xmm15,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) pshufd $0x55,%xmm15,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) # x0..3 on stack
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) movdqa %xmm1,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) movdqa %xmm2,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) movdqa %xmm3,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) movdqa CTRINC(%rip),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) movdqa ROT8(%rip),%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) movdqa ROT16(%rip),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) # x12 += counter values 0-3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) paddd %xmm1,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) .Ldoubleround4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) # x0 += x4, x12 = rotl32(x12 ^ x0, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) paddd %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) pxor %xmm0,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) pshufb %xmm3,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) # x1 += x5, x13 = rotl32(x13 ^ x1, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) paddd %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) movdqa %xmm0,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) pxor %xmm0,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) pshufb %xmm3,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) # x2 += x6, x14 = rotl32(x14 ^ x2, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) paddd %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) movdqa %xmm0,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) pxor %xmm0,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) pshufb %xmm3,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) # x3 += x7, x15 = rotl32(x15 ^ x3, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) movdqa 0x30(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) paddd %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) pxor %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) pshufb %xmm3,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) # x8 += x12, x4 = rotl32(x4 ^ x8, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) paddd %xmm12,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) pxor %xmm8,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) psrld $20,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) por %xmm0,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) # x9 += x13, x5 = rotl32(x5 ^ x9, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) paddd %xmm13,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) pxor %xmm9,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) movdqa %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) psrld $20,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) por %xmm0,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) # x10 += x14, x6 = rotl32(x6 ^ x10, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) paddd %xmm14,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) pxor %xmm10,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) movdqa %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) psrld $20,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) por %xmm0,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) # x11 += x15, x7 = rotl32(x7 ^ x11, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) paddd %xmm15,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) pxor %xmm11,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) movdqa %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) psrld $20,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) por %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) # x0 += x4, x12 = rotl32(x12 ^ x0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) paddd %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) pxor %xmm0,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) pshufb %xmm2,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) # x1 += x5, x13 = rotl32(x13 ^ x1, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) paddd %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) movdqa %xmm0,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) pxor %xmm0,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) pshufb %xmm2,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) # x2 += x6, x14 = rotl32(x14 ^ x2, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) paddd %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) movdqa %xmm0,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) pxor %xmm0,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) pshufb %xmm2,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) # x3 += x7, x15 = rotl32(x15 ^ x3, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) movdqa 0x30(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) paddd %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) pxor %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) pshufb %xmm2,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) # x8 += x12, x4 = rotl32(x4 ^ x8, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) paddd %xmm12,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) pxor %xmm8,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) psrld $25,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) por %xmm0,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) # x9 += x13, x5 = rotl32(x5 ^ x9, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) paddd %xmm13,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) pxor %xmm9,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) movdqa %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) psrld $25,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) por %xmm0,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) # x10 += x14, x6 = rotl32(x6 ^ x10, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) paddd %xmm14,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) pxor %xmm10,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) movdqa %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) psrld $25,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) por %xmm0,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) # x11 += x15, x7 = rotl32(x7 ^ x11, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) paddd %xmm15,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) pxor %xmm11,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) movdqa %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) psrld $25,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) por %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) # x0 += x5, x15 = rotl32(x15 ^ x0, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) paddd %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) pxor %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) pshufb %xmm3,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) # x1 += x6, x12 = rotl32(x12 ^ x1, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) paddd %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) movdqa %xmm0,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) pxor %xmm0,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) pshufb %xmm3,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) # x2 += x7, x13 = rotl32(x13 ^ x2, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) paddd %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) movdqa %xmm0,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) pxor %xmm0,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) pshufb %xmm3,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) # x3 += x4, x14 = rotl32(x14 ^ x3, 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) movdqa 0x30(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) paddd %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) pxor %xmm0,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) pshufb %xmm3,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) # x10 += x15, x5 = rotl32(x5 ^ x10, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) paddd %xmm15,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) pxor %xmm10,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) movdqa %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) psrld $20,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) por %xmm0,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) # x11 += x12, x6 = rotl32(x6 ^ x11, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) paddd %xmm12,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) pxor %xmm11,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) movdqa %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) psrld $20,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) por %xmm0,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) # x8 += x13, x7 = rotl32(x7 ^ x8, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) paddd %xmm13,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) pxor %xmm8,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) movdqa %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) psrld $20,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) por %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) # x9 += x14, x4 = rotl32(x4 ^ x9, 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) paddd %xmm14,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) pxor %xmm9,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) pslld $12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) psrld $20,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) por %xmm0,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) # x0 += x5, x15 = rotl32(x15 ^ x0, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) paddd %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) pxor %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) pshufb %xmm2,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) # x1 += x6, x12 = rotl32(x12 ^ x1, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) paddd %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) movdqa %xmm0,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) pxor %xmm0,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) pshufb %xmm2,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) # x2 += x7, x13 = rotl32(x13 ^ x2, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) paddd %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) movdqa %xmm0,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) pxor %xmm0,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) pshufb %xmm2,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) # x3 += x4, x14 = rotl32(x14 ^ x3, 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) movdqa 0x30(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) paddd %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) pxor %xmm0,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) pshufb %xmm2,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) # x10 += x15, x5 = rotl32(x5 ^ x10, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) paddd %xmm15,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) pxor %xmm10,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) movdqa %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) psrld $25,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) por %xmm0,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) # x11 += x12, x6 = rotl32(x6 ^ x11, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) paddd %xmm12,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) pxor %xmm11,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) movdqa %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) psrld $25,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) por %xmm0,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) # x8 += x13, x7 = rotl32(x7 ^ x8, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) paddd %xmm13,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) pxor %xmm8,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) movdqa %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) psrld $25,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) por %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) # x9 += x14, x4 = rotl32(x4 ^ x9, 7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) paddd %xmm14,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) pxor %xmm9,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) pslld $7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) psrld $25,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) por %xmm0,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) sub $2,%r8d
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) jnz .Ldoubleround4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) # x0[0-3] += s0[0]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) # x1[0-3] += s0[1]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) movq 0x00(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) paddd 0x00(%rsp),%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) movdqa %xmm2,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) paddd 0x10(%rsp),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) movdqa %xmm3,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) # x2[0-3] += s0[2]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) # x3[0-3] += s0[3]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) movq 0x08(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) paddd 0x20(%rsp),%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) movdqa %xmm2,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) paddd 0x30(%rsp),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) movdqa %xmm3,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) # x4[0-3] += s1[0]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) # x5[0-3] += s1[1]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) movq 0x10(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) paddd %xmm2,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) paddd %xmm3,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) # x6[0-3] += s1[2]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) # x7[0-3] += s1[3]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) movq 0x18(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) paddd %xmm2,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) paddd %xmm3,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) # x8[0-3] += s2[0]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) # x9[0-3] += s2[1]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) movq 0x20(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) paddd %xmm2,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) paddd %xmm3,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) # x10[0-3] += s2[2]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) # x11[0-3] += s2[3]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) movq 0x28(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) paddd %xmm2,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) paddd %xmm3,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) # x12[0-3] += s3[0]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) # x13[0-3] += s3[1]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) movq 0x30(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) paddd %xmm2,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) paddd %xmm3,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) # x14[0-3] += s3[2]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) # x15[0-3] += s3[3]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) movq 0x38(%rdi),%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) pshufd $0x00,%xmm3,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) pshufd $0x55,%xmm3,%xmm3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) paddd %xmm2,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) paddd %xmm3,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) # x12 += counter values 0-3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) paddd %xmm1,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) # interleave 32-bit words in state n, n+1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) movdqa 0x10(%rsp),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) movdqa %xmm0,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) punpckldq %xmm1,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) punpckhdq %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) movdqa %xmm2,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) movdqa %xmm0,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) movdqa 0x30(%rsp),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) movdqa %xmm0,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) punpckldq %xmm1,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) punpckhdq %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) movdqa %xmm2,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) punpckldq %xmm5,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) punpckhdq %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) movdqa %xmm0,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) movdqa %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) punpckldq %xmm7,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) punpckhdq %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) movdqa %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) movdqa %xmm8,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) punpckldq %xmm9,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) punpckhdq %xmm9,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) movdqa %xmm0,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) movdqa %xmm10,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) punpckldq %xmm11,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) punpckhdq %xmm11,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) movdqa %xmm0,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) movdqa %xmm12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) punpckldq %xmm13,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) punpckhdq %xmm13,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) movdqa %xmm0,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) movdqa %xmm14,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) punpckldq %xmm15,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) punpckhdq %xmm15,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) movdqa %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) # interleave 64-bit words in state n, n+2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) movdqa 0x20(%rsp),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) movdqa %xmm0,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) punpcklqdq %xmm1,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) punpckhqdq %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) movdqa %xmm2,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) movdqa %xmm0,0x20(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) movdqa 0x30(%rsp),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) movdqa %xmm0,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) punpcklqdq %xmm1,%xmm2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) punpckhqdq %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) movdqa %xmm2,0x10(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) movdqa %xmm0,0x30(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) movdqa %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) punpcklqdq %xmm6,%xmm4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) punpckhqdq %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) movdqa %xmm0,%xmm6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) movdqa %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) punpcklqdq %xmm7,%xmm5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) punpckhqdq %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) movdqa %xmm0,%xmm7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) movdqa %xmm8,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) punpcklqdq %xmm10,%xmm8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) punpckhqdq %xmm10,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) movdqa %xmm0,%xmm10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) movdqa %xmm9,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) punpcklqdq %xmm11,%xmm9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) punpckhqdq %xmm11,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) movdqa %xmm0,%xmm11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) movdqa %xmm12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) punpcklqdq %xmm14,%xmm12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) punpckhqdq %xmm14,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) movdqa %xmm0,%xmm14
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) movdqa %xmm13,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) punpcklqdq %xmm15,%xmm13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) punpckhqdq %xmm15,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) movdqa %xmm0,%xmm15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) # xor with corresponding input, write to output
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) movdqa 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) cmp $0x10,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) movdqu 0x00(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) movdqu %xmm0,0x00(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) movdqu %xmm4,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) cmp $0x20,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) movdqu 0x10(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) movdqu %xmm0,0x10(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) movdqu %xmm8,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) cmp $0x30,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) movdqu 0x20(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) movdqu %xmm0,0x20(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 672) movdqu %xmm12,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 673) cmp $0x40,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 674) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 675) movdqu 0x30(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 676) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 677) movdqu %xmm0,0x30(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 678)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 679) movdqa 0x20(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 680) cmp $0x50,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 681) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 682) movdqu 0x40(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 683) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 684) movdqu %xmm0,0x40(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 685)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 686) movdqu %xmm6,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 687) cmp $0x60,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 688) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 689) movdqu 0x50(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 690) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 691) movdqu %xmm0,0x50(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 692)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 693) movdqu %xmm10,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 694) cmp $0x70,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 695) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 696) movdqu 0x60(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 697) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 698) movdqu %xmm0,0x60(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 699)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 700) movdqu %xmm14,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 701) cmp $0x80,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 702) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 703) movdqu 0x70(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 704) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 705) movdqu %xmm0,0x70(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 706)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 707) movdqa 0x10(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 708) cmp $0x90,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 709) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 710) movdqu 0x80(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 711) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 712) movdqu %xmm0,0x80(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 713)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 714) movdqu %xmm5,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 715) cmp $0xa0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 716) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 717) movdqu 0x90(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 718) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 719) movdqu %xmm0,0x90(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 720)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 721) movdqu %xmm9,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 722) cmp $0xb0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 723) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 724) movdqu 0xa0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 725) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 726) movdqu %xmm0,0xa0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 727)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 728) movdqu %xmm13,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 729) cmp $0xc0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 730) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 731) movdqu 0xb0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 732) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 733) movdqu %xmm0,0xb0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 734)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 735) movdqa 0x30(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 736) cmp $0xd0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 737) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 738) movdqu 0xc0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 739) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 740) movdqu %xmm0,0xc0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 741)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 742) movdqu %xmm7,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 743) cmp $0xe0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 744) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 745) movdqu 0xd0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 746) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 747) movdqu %xmm0,0xd0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 748)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 749) movdqu %xmm11,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 750) cmp $0xf0,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 751) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 752) movdqu 0xe0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 753) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 754) movdqu %xmm0,0xe0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 755)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 756) movdqu %xmm15,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 757) cmp $0x100,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 758) jl .Lxorpart4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 759) movdqu 0xf0(%rdx),%xmm1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 760) pxor %xmm1,%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 761) movdqu %xmm0,0xf0(%rsi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 762)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 763) .Ldone4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 764) lea -8(%r10),%rsp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 765) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 766)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 767) .Lxorpart4:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 768) # xor remaining bytes from partial register into output
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 769) mov %rax,%r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 770) and $0x0f,%r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 771) jz .Ldone4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 772) and $~0x0f,%rax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 773)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 774) mov %rsi,%r11
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 775)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 776) lea (%rdx,%rax),%rsi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 777) mov %rsp,%rdi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 778) mov %r9,%rcx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 779) rep movsb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 780)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 781) pxor 0x00(%rsp),%xmm0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 782) movdqa %xmm0,0x00(%rsp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 783)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 784) mov %rsp,%rsi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 785) lea (%r11,%rax),%rdi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 786) mov %r9,%rcx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 787) rep movsb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 788)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 789) jmp .Ldone4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 790)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 791) SYM_FUNC_END(chacha_4block_xor_ssse3)