aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDan Williams <[email protected]>2018-05-03 17:06:16 -0700
committerIngo Molnar <[email protected]>2018-05-15 08:32:42 +0200
commitbd131544aa7e318a5735cbcbad46c4a5ee6b9d42 (patch)
tree48223d5efd96ad0a4dead2ad3ce8657c5185164d
parentda7bc9c57eb0e91e048d05f7dbe5014a8b81ccfa (diff)
x86/asm/memcpy_mcsafe: Add labels for __memcpy_mcsafe() write fault handling
The memcpy_mcsafe() implementation handles CPU exceptions when reading from the source address. Before it can be used for user copies it needs to grow support for handling write faults. In preparation for adding that exception handling update the labels for the read cache word X case (.L_cache_rX) and write cache word X case (.L_cache_wX). Reported-by: Tony Luck <[email protected]> Signed-off-by: Dan Williams <[email protected]> Cc: Al Viro <[email protected]> Cc: Andrew Morton <[email protected]> Cc: Andy Lutomirski <[email protected]> Cc: Borislav Petkov <[email protected]> Cc: Linus Torvalds <[email protected]> Cc: Peter Zijlstra <[email protected]> Cc: Thomas Gleixner <[email protected]> Cc: [email protected] Cc: [email protected] Cc: [email protected] Link: http://lkml.kernel.org/r/152539237606.31796.6719743548991782264.stgit@dwillia2-desk3.amr.corp.intel.com Signed-off-by: Ingo Molnar <[email protected]>
-rw-r--r--arch/x86/lib/memcpy_64.S21
1 files changed, 12 insertions, 9 deletions
diff --git a/arch/x86/lib/memcpy_64.S b/arch/x86/lib/memcpy_64.S
index 54c971892db5..5709f3ec22a4 100644
--- a/arch/x86/lib/memcpy_64.S
+++ b/arch/x86/lib/memcpy_64.S
@@ -204,13 +204,14 @@ ENTRY(__memcpy_mcsafe)
subl $8, %ecx
negl %ecx
subl %ecx, %edx
-.L_copy_leading_bytes:
+.L_read_leading_bytes:
movb (%rsi), %al
+.L_write_leading_bytes:
movb %al, (%rdi)
incq %rsi
incq %rdi
decl %ecx
- jnz .L_copy_leading_bytes
+ jnz .L_read_leading_bytes
.L_8byte_aligned:
movl %edx, %ecx
@@ -218,13 +219,14 @@ ENTRY(__memcpy_mcsafe)
shrl $3, %ecx
jz .L_no_whole_words
-.L_copy_words:
+.L_read_words:
movq (%rsi), %r8
+.L_write_words:
movq %r8, (%rdi)
addq $8, %rsi
addq $8, %rdi
decl %ecx
- jnz .L_copy_words
+ jnz .L_read_words
/* Any trailing bytes? */
.L_no_whole_words:
@@ -233,13 +235,14 @@ ENTRY(__memcpy_mcsafe)
/* Copy trailing bytes */
movl %edx, %ecx
-.L_copy_trailing_bytes:
+.L_read_trailing_bytes:
movb (%rsi), %al
+.L_write_trailing_bytes:
movb %al, (%rdi)
incq %rsi
incq %rdi
decl %ecx
- jnz .L_copy_trailing_bytes
+ jnz .L_read_trailing_bytes
/* Copy successful. Return zero */
.L_done_memcpy_trap:
@@ -256,7 +259,7 @@ EXPORT_SYMBOL_GPL(__memcpy_mcsafe)
.previous
- _ASM_EXTABLE_FAULT(.L_copy_leading_bytes, .L_memcpy_mcsafe_fail)
- _ASM_EXTABLE_FAULT(.L_copy_words, .L_memcpy_mcsafe_fail)
- _ASM_EXTABLE_FAULT(.L_copy_trailing_bytes, .L_memcpy_mcsafe_fail)
+ _ASM_EXTABLE_FAULT(.L_read_leading_bytes, .L_memcpy_mcsafe_fail)
+ _ASM_EXTABLE_FAULT(.L_read_words, .L_memcpy_mcsafe_fail)
+ _ASM_EXTABLE_FAULT(.L_read_trailing_bytes, .L_memcpy_mcsafe_fail)
#endif