|
@@ -374,7 +374,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
|
|
|
* Catch too early usage of this before alternatives
|
|
|
* have run.
|
|
|
*/
|
|
|
- asm goto("1: jmp %l[t_warn]\n"
|
|
|
+ asm_volatile_goto("1: jmp %l[t_warn]\n"
|
|
|
"2:\n"
|
|
|
".section .altinstructions,\"a\"\n"
|
|
|
" .long 1b - .\n"
|
|
@@ -388,7 +388,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
|
|
|
|
|
|
#endif
|
|
|
|
|
|
- asm goto("1: jmp %l[t_no]\n"
|
|
|
+ asm_volatile_goto("1: jmp %l[t_no]\n"
|
|
|
"2:\n"
|
|
|
".section .altinstructions,\"a\"\n"
|
|
|
" .long 1b - .\n"
|
|
@@ -453,7 +453,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
|
|
|
* have. Thus, we force the jump to the widest, 4-byte, signed relative
|
|
|
* offset even though the last would often fit in less bytes.
|
|
|
*/
|
|
|
- asm goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n"
|
|
|
+ asm_volatile_goto("1: .byte 0xe9\n .long %l[t_dynamic] - 2f\n"
|
|
|
"2:\n"
|
|
|
".section .altinstructions,\"a\"\n"
|
|
|
" .long 1b - .\n" /* src offset */
|