x86/bugs: Fix the SRSO mitigation on Zen3/4
The original version of the mitigation would patch in the calls to the untraining routines directly. That is, the alternative() in UNTRAIN_RET will patch in the CALL to srso_alias_untrain_ret() directly. However, even if commite7c25c441e
("x86/cpu: Cleanup the untrain mess") meant well in trying to clean up the situation, due to micro- architectural reasons, the untraining routine srso_alias_untrain_ret() must be the target of a CALL instruction and not of a JMP instruction as it is done now. Reshuffle the alternative macros to accomplish that. Fixes:e7c25c441e
("x86/cpu: Cleanup the untrain mess") Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de> Reviewed-by: Ingo Molnar <mingo@kernel.org> Cc: stable@kernel.org Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
091619baac
commit
4535e1a417
3 changed files with 23 additions and 10 deletions
|
@ -14,6 +14,7 @@
|
||||||
#include <asm/asm.h>
|
#include <asm/asm.h>
|
||||||
#include <asm/fred.h>
|
#include <asm/fred.h>
|
||||||
#include <asm/gsseg.h>
|
#include <asm/gsseg.h>
|
||||||
|
#include <asm/nospec-branch.h>
|
||||||
|
|
||||||
#ifndef CONFIG_X86_CMPXCHG64
|
#ifndef CONFIG_X86_CMPXCHG64
|
||||||
extern void cmpxchg8b_emu(void);
|
extern void cmpxchg8b_emu(void);
|
||||||
|
|
|
@ -262,11 +262,20 @@
|
||||||
.Lskip_rsb_\@:
|
.Lskip_rsb_\@:
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The CALL to srso_alias_untrain_ret() must be patched in directly at
|
||||||
|
* the spot where untraining must be done, ie., srso_alias_untrain_ret()
|
||||||
|
* must be the target of a CALL instruction instead of indirectly
|
||||||
|
* jumping to a wrapper which then calls it. Therefore, this macro is
|
||||||
|
* called outside of __UNTRAIN_RET below, for the time being, before the
|
||||||
|
* kernel can support nested alternatives with arbitrary nesting.
|
||||||
|
*/
|
||||||
|
.macro CALL_UNTRAIN_RET
|
||||||
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
|
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
|
||||||
#define CALL_UNTRAIN_RET "call entry_untrain_ret"
|
ALTERNATIVE_2 "", "call entry_untrain_ret", X86_FEATURE_UNRET, \
|
||||||
#else
|
"call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
|
||||||
#define CALL_UNTRAIN_RET ""
|
|
||||||
#endif
|
#endif
|
||||||
|
.endm
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Mitigate RETBleed for AMD/Hygon Zen uarch. Requires KERNEL CR3 because the
|
* Mitigate RETBleed for AMD/Hygon Zen uarch. Requires KERNEL CR3 because the
|
||||||
|
@ -282,8 +291,8 @@
|
||||||
.macro __UNTRAIN_RET ibpb_feature, call_depth_insns
|
.macro __UNTRAIN_RET ibpb_feature, call_depth_insns
|
||||||
#if defined(CONFIG_MITIGATION_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
|
#if defined(CONFIG_MITIGATION_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
|
||||||
VALIDATE_UNRET_END
|
VALIDATE_UNRET_END
|
||||||
ALTERNATIVE_3 "", \
|
CALL_UNTRAIN_RET
|
||||||
CALL_UNTRAIN_RET, X86_FEATURE_UNRET, \
|
ALTERNATIVE_2 "", \
|
||||||
"call entry_ibpb", \ibpb_feature, \
|
"call entry_ibpb", \ibpb_feature, \
|
||||||
__stringify(\call_depth_insns), X86_FEATURE_CALL_DEPTH
|
__stringify(\call_depth_insns), X86_FEATURE_CALL_DEPTH
|
||||||
#endif
|
#endif
|
||||||
|
@ -342,6 +351,8 @@ extern void retbleed_return_thunk(void);
|
||||||
static inline void retbleed_return_thunk(void) {}
|
static inline void retbleed_return_thunk(void) {}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
extern void srso_alias_untrain_ret(void);
|
||||||
|
|
||||||
#ifdef CONFIG_MITIGATION_SRSO
|
#ifdef CONFIG_MITIGATION_SRSO
|
||||||
extern void srso_return_thunk(void);
|
extern void srso_return_thunk(void);
|
||||||
extern void srso_alias_return_thunk(void);
|
extern void srso_alias_return_thunk(void);
|
||||||
|
|
|
@ -163,6 +163,7 @@ SYM_CODE_START_NOALIGN(srso_alias_untrain_ret)
|
||||||
lfence
|
lfence
|
||||||
jmp srso_alias_return_thunk
|
jmp srso_alias_return_thunk
|
||||||
SYM_FUNC_END(srso_alias_untrain_ret)
|
SYM_FUNC_END(srso_alias_untrain_ret)
|
||||||
|
__EXPORT_THUNK(srso_alias_untrain_ret)
|
||||||
.popsection
|
.popsection
|
||||||
|
|
||||||
.pushsection .text..__x86.rethunk_safe
|
.pushsection .text..__x86.rethunk_safe
|
||||||
|
@ -224,10 +225,12 @@ SYM_CODE_START(srso_return_thunk)
|
||||||
SYM_CODE_END(srso_return_thunk)
|
SYM_CODE_END(srso_return_thunk)
|
||||||
|
|
||||||
#define JMP_SRSO_UNTRAIN_RET "jmp srso_untrain_ret"
|
#define JMP_SRSO_UNTRAIN_RET "jmp srso_untrain_ret"
|
||||||
#define JMP_SRSO_ALIAS_UNTRAIN_RET "jmp srso_alias_untrain_ret"
|
|
||||||
#else /* !CONFIG_MITIGATION_SRSO */
|
#else /* !CONFIG_MITIGATION_SRSO */
|
||||||
|
/* Dummy for the alternative in CALL_UNTRAIN_RET. */
|
||||||
|
SYM_CODE_START(srso_alias_untrain_ret)
|
||||||
|
RET
|
||||||
|
SYM_FUNC_END(srso_alias_untrain_ret)
|
||||||
#define JMP_SRSO_UNTRAIN_RET "ud2"
|
#define JMP_SRSO_UNTRAIN_RET "ud2"
|
||||||
#define JMP_SRSO_ALIAS_UNTRAIN_RET "ud2"
|
|
||||||
#endif /* CONFIG_MITIGATION_SRSO */
|
#endif /* CONFIG_MITIGATION_SRSO */
|
||||||
|
|
||||||
#ifdef CONFIG_MITIGATION_UNRET_ENTRY
|
#ifdef CONFIG_MITIGATION_UNRET_ENTRY
|
||||||
|
@ -319,9 +322,7 @@ SYM_FUNC_END(retbleed_untrain_ret)
|
||||||
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
|
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
|
||||||
|
|
||||||
SYM_FUNC_START(entry_untrain_ret)
|
SYM_FUNC_START(entry_untrain_ret)
|
||||||
ALTERNATIVE_2 JMP_RETBLEED_UNTRAIN_RET, \
|
ALTERNATIVE JMP_RETBLEED_UNTRAIN_RET, JMP_SRSO_UNTRAIN_RET, X86_FEATURE_SRSO
|
||||||
JMP_SRSO_UNTRAIN_RET, X86_FEATURE_SRSO, \
|
|
||||||
JMP_SRSO_ALIAS_UNTRAIN_RET, X86_FEATURE_SRSO_ALIAS
|
|
||||||
SYM_FUNC_END(entry_untrain_ret)
|
SYM_FUNC_END(entry_untrain_ret)
|
||||||
__EXPORT_THUNK(entry_untrain_ret)
|
__EXPORT_THUNK(entry_untrain_ret)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue