tmp_suning_uos_patched/arch/x86/lib/msr-reg.S
Peter Zijlstra 3c91e22576 x86: Prepare asm files for straight-line-speculation
commit f94909ceb1ed4bfdb2ada72f93236305e6d6951f upstream.

Replace all ret/retq instructions with RET in preparation of making
RET a macro. Since AS is case insensitive it's a big no-op without
RET defined.

  find arch/x86/ -name \*.S | while read file
  do
	sed -i 's/\<ret[q]*\>/RET/' $file
  done

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211204134907.905503893@infradead.org
[bwh: Backported to 5.10: ran the above command]
Signed-off-by: Ben Hutchings <ben@decadent.org.uk>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
2022-07-25 11:26:28 +02:00

94 lines
1.7 KiB
ArmAsm

/* SPDX-License-Identifier: GPL-2.0 */
#include <linux/linkage.h>
#include <linux/errno.h>
#include <asm/asm.h>
#include <asm/msr.h>
#ifdef CONFIG_X86_64
/*
* int {rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
*
* reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
*
*/
.macro op_safe_regs op
SYM_FUNC_START(\op\()_safe_regs)
pushq %rbx
pushq %r12
movq %rdi, %r10 /* Save pointer */
xorl %r11d, %r11d /* Return value */
movl (%rdi), %eax
movl 4(%rdi), %ecx
movl 8(%rdi), %edx
movl 12(%rdi), %ebx
movl 20(%rdi), %r12d
movl 24(%rdi), %esi
movl 28(%rdi), %edi
1: \op
2: movl %eax, (%r10)
movl %r11d, %eax /* Return value */
movl %ecx, 4(%r10)
movl %edx, 8(%r10)
movl %ebx, 12(%r10)
movl %r12d, 20(%r10)
movl %esi, 24(%r10)
movl %edi, 28(%r10)
popq %r12
popq %rbx
RET
3:
movl $-EIO, %r11d
jmp 2b
_ASM_EXTABLE(1b, 3b)
SYM_FUNC_END(\op\()_safe_regs)
.endm
#else /* X86_32 */
.macro op_safe_regs op
SYM_FUNC_START(\op\()_safe_regs)
pushl %ebx
pushl %ebp
pushl %esi
pushl %edi
pushl $0 /* Return value */
pushl %eax
movl 4(%eax), %ecx
movl 8(%eax), %edx
movl 12(%eax), %ebx
movl 20(%eax), %ebp
movl 24(%eax), %esi
movl 28(%eax), %edi
movl (%eax), %eax
1: \op
2: pushl %eax
movl 4(%esp), %eax
popl (%eax)
addl $4, %esp
movl %ecx, 4(%eax)
movl %edx, 8(%eax)
movl %ebx, 12(%eax)
movl %ebp, 20(%eax)
movl %esi, 24(%eax)
movl %edi, 28(%eax)
popl %eax
popl %edi
popl %esi
popl %ebp
popl %ebx
RET
3:
movl $-EIO, 4(%esp)
jmp 2b
_ASM_EXTABLE(1b, 3b)
SYM_FUNC_END(\op\()_safe_regs)
.endm
#endif
op_safe_regs rdmsr
op_safe_regs wrmsr