^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #include <asm/asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <asm/msr.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #ifdef CONFIG_X86_64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * int {rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) * reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) .macro op_safe_regs op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) SYM_FUNC_START(\op\()_safe_regs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) pushq %rbx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) pushq %r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) movq %rdi, %r10 /* Save pointer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) xorl %r11d, %r11d /* Return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) movl (%rdi), %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) movl 4(%rdi), %ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) movl 8(%rdi), %edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) movl 12(%rdi), %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) movl 20(%rdi), %r12d
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) movl 24(%rdi), %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) movl 28(%rdi), %edi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) 1: \op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) 2: movl %eax, (%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) movl %r11d, %eax /* Return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) movl %ecx, 4(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) movl %edx, 8(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) movl %ebx, 12(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) movl %r12d, 20(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) movl %esi, 24(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) movl %edi, 28(%r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) popq %r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) popq %rbx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) 3:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) movl $-EIO, %r11d
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) jmp 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) _ASM_EXTABLE(1b, 3b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) SYM_FUNC_END(\op\()_safe_regs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) #else /* X86_32 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) .macro op_safe_regs op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) SYM_FUNC_START(\op\()_safe_regs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) pushl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) pushl %ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) pushl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) pushl %edi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) pushl $0 /* Return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) pushl %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) movl 4(%eax), %ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) movl 8(%eax), %edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) movl 12(%eax), %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) movl 20(%eax), %ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) movl 24(%eax), %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) movl 28(%eax), %edi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) movl (%eax), %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) 1: \op
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) 2: pushl %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) movl 4(%esp), %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) popl (%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) addl $4, %esp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) movl %ecx, 4(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) movl %edx, 8(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) movl %ebx, 12(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) movl %ebp, 20(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) movl %esi, 24(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) movl %edi, 28(%eax)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) popl %eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) popl %edi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) popl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) popl %ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) popl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) 3:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) movl $-EIO, 4(%esp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) jmp 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) _ASM_EXTABLE(1b, 3b)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) SYM_FUNC_END(\op\()_safe_regs)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) op_safe_regs rdmsr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) op_safe_regs wrmsr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)