summaryrefslogtreecommitdiff
path: root/arch/x86/entry/thunk_64.S
blob: 4974e09b1b80e10f2a9857be42651821ed2cdd58 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
/* SPDX-License-Identifier: GPL-2.0-only */
/*
 * Save registers before calling assembly functions. This avoids
 * disturbance of register allocation in some inline assembly constructs.
 * Copyright 2001,2002 by Andi Kleen, SuSE Labs.
 */
#include <linux/linkage.h>
#include "calling.h"
#include <asm/asm.h>
#include <asm/export.h>
#include <asm/irqflags.h>

.code64
.section .noinstr.text, "ax"

	/* rdi:	arg1 ... normal C conventions. rax is saved/restored. */
	.macro THUNK name, func, put_ret_addr_in_rdi=0, check_if=0
SYM_FUNC_START_NOALIGN(\name)

	.if \check_if
	/*
	 * Check for interrupts disabled right here. No point in
	 * going all the way down
	 */
	pushq	%rax
	SAVE_FLAGS(CLBR_RAX)
	testl	$X86_EFLAGS_IF, %eax
	popq	%rax
	jnz	1f
	ret
1:
	.endif

	pushq %rbp
	movq %rsp, %rbp

	pushq %rdi
	pushq %rsi
	pushq %rdx
	pushq %rcx
	pushq %rax
	pushq %r8
	pushq %r9
	pushq %r10
	pushq %r11

	.if \put_ret_addr_in_rdi
	/* 8(%rbp) is return addr on stack */
	movq 8(%rbp), %rdi
	.endif

	/*
	 * noinstr callers will have interrupts disabled and will thus
	 * not get here. Annotate the call as objtool does not know about
	 * this and would complain about leaving the noinstr section.
	 */
1:
	.pushsection .discard.instr_begin
	.long 1b - .
	.popsection

	call \func
2:
	.pushsection .discard.instr_end
	.long 2b - .
	.popsection

	jmp  .L_restore
SYM_FUNC_END(\name)
	.endm

#ifdef CONFIG_PREEMPTION
	THUNK preempt_schedule_thunk, preempt_schedule
	EXPORT_SYMBOL(preempt_schedule_thunk)

	THUNK preempt_schedule_notrace_thunk, preempt_schedule_notrace, check_if=1
	EXPORT_SYMBOL(preempt_schedule_notrace_thunk)
#endif

#ifdef CONFIG_PREEMPTION
SYM_CODE_START_LOCAL_NOALIGN(.L_restore)
	popq %r11
	popq %r10
	popq %r9
	popq %r8
	popq %rax
	popq %rcx
	popq %rdx
	popq %rsi
	popq %rdi
	popq %rbp
	ret
SYM_CODE_END(.L_restore)
#endif