aboutsummaryrefslogtreecommitdiff
path: root/arch/x86/power/hibernate_asm_32.S
diff options
context:
space:
mode:
authorJiri Slaby <[email protected]>2019-10-11 13:51:05 +0200
committerBorislav Petkov <[email protected]>2019-10-18 11:58:33 +0200
commit78762b0e79bc1dd01347be061abdf505202152c9 (patch)
tree6053a9b9fb0c7d7e02393bae6890c8a591f5e1df /arch/x86/power/hibernate_asm_32.S
parent6dcc5627f6aec4cb1d1494d06a48d8061db06a04 (diff)
x86/asm/32: Add ENDs to some functions and relabel with SYM_CODE_*
All these are functions which are invoked from elsewhere but they are not typical C functions. So annotate them using the new SYM_CODE_START. All these were not balanced with any END, so mark their ends by SYM_CODE_END, appropriately. Signed-off-by: Jiri Slaby <[email protected]> Signed-off-by: Borislav Petkov <[email protected]> Reviewed-by: Boris Ostrovsky <[email protected]> [xen bits] Reviewed-by: Rafael J. Wysocki <[email protected]> [hibernate] Cc: Andy Lutomirski <[email protected]> Cc: "H. Peter Anvin" <[email protected]> Cc: Ingo Molnar <[email protected]> Cc: Josh Poimboeuf <[email protected]> Cc: Juergen Gross <[email protected]> Cc: Len Brown <[email protected]> Cc: [email protected] Cc: [email protected] Cc: Pavel Machek <[email protected]> Cc: Peter Zijlstra <[email protected]> Cc: Pingfan Liu <[email protected]> Cc: Stefano Stabellini <[email protected]> Cc: "Steven Rostedt (VMware)" <[email protected]> Cc: Thomas Gleixner <[email protected]> Cc: x86-ml <[email protected]> Cc: [email protected] Link: https://lkml.kernel.org/r/[email protected]
Diffstat (limited to 'arch/x86/power/hibernate_asm_32.S')
-rw-r--r--arch/x86/power/hibernate_asm_32.S6
1 files changed, 4 insertions, 2 deletions
diff --git a/arch/x86/power/hibernate_asm_32.S b/arch/x86/power/hibernate_asm_32.S
index 6fe383002125..a19ed3d23185 100644
--- a/arch/x86/power/hibernate_asm_32.S
+++ b/arch/x86/power/hibernate_asm_32.S
@@ -35,7 +35,7 @@ ENTRY(swsusp_arch_suspend)
ret
ENDPROC(swsusp_arch_suspend)
-ENTRY(restore_image)
+SYM_CODE_START(restore_image)
/* prepare to jump to the image kernel */
movl restore_jump_address, %ebx
movl restore_cr3, %ebp
@@ -45,9 +45,10 @@ ENTRY(restore_image)
/* jump to relocated restore code */
movl relocated_restore_code, %eax
jmpl *%eax
+SYM_CODE_END(restore_image)
/* code below has been relocated to a safe page */
-ENTRY(core_restore_code)
+SYM_CODE_START(core_restore_code)
movl temp_pgt, %eax
movl %eax, %cr3
@@ -77,6 +78,7 @@ copy_loop:
done:
jmpl *%ebx
+SYM_CODE_END(core_restore_code)
/* code below belongs to the image kernel */
.align PAGE_SIZE