summaryrefslogtreecommitdiff
path: root/arch/x86/lib
diff options
context:
space:
mode:
authorTony Luck <tony.luck@intel.com>2021-12-16 09:24:31 -0800
committerBorislav Petkov <bp@suse.de>2021-12-29 13:46:02 +0100
commit244122b4d2e5221e6abd6e21d6a58170104db781 (patch)
treed4e2ba23bdd27e226016e9028591204c737cb8a2 /arch/x86/lib
parent9c7e2634f647630db4e0719391dd80cd81132a66 (diff)
x86/lib: Add fast-short-rep-movs check to copy_user_enhanced_fast_string()
Commit f444a5ff95dc ("x86/cpufeatures: Add support for fast short REP; MOVSB") fixed memmove() with an ALTERNATIVE that will use REP MOVSB for all string lengths. copy_user_enhanced_fast_string() has a similar run time check to avoid using REP MOVSB for copies less that 64 bytes. Add an ALTERNATIVE to patch out the short length check and always use REP MOVSB on X86_FEATURE_FSRM CPUs. Signed-off-by: Tony Luck <tony.luck@intel.com> Signed-off-by: Borislav Petkov <bp@suse.de> Link: https://lore.kernel.org/r/20211216172431.1396371-1-tony.luck@intel.com
Diffstat (limited to 'arch/x86/lib')
-rw-r--r--arch/x86/lib/copy_user_64.S4
1 files changed, 2 insertions, 2 deletions
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 2797e630b9b1..1c429f0489dd 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -200,8 +200,8 @@ EXPORT_SYMBOL(copy_user_generic_string)
*/
SYM_FUNC_START(copy_user_enhanced_fast_string)
ASM_STAC
- cmpl $64,%edx
- jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */
+ /* CPUs without FSRM should avoid rep movsb for short copies */
+ ALTERNATIVE "cmpl $64, %edx; jb .L_copy_short_string", "", X86_FEATURE_FSRM
movl %edx,%ecx
1: rep
movsb