Skip to content

Commit

Permalink
Merge tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/l…
Browse files Browse the repository at this point in the history
…inux/kernel/git/tip/tip

Pull objtool updates from Ingo Molnar:
 "There are a lot of objtool changes in this cycle, all across the map:

   - Speed up objtool significantly, especially when there are large
     number of sections

   - Improve objtool's understanding of special instructions such as
     IRET, to reduce the number of annotations required

   - Implement 'noinstr' validation

   - Do baby steps for non-x86 objtool use

   - Simplify/fix retpoline decoding

   - Add vmlinux validation

   - Improve documentation

   - Fix various bugs and apply smaller cleanups"

* tag 'objtool-core-2020-06-01' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (54 commits)
  objtool: Enable compilation of objtool for all architectures
  objtool: Move struct objtool_file into arch-independent header
  objtool: Exit successfully when requesting help
  objtool: Add check_kcov_mode() to the uaccess safelist
  samples/ftrace: Fix asm function ELF annotations
  objtool: optimize add_dead_ends for split sections
  objtool: use gelf_getsymshndx to handle >64k sections
  objtool: Allow no-op CFI ops in alternatives
  x86/retpoline: Fix retpoline unwind
  x86: Change {JMP,CALL}_NOSPEC argument
  x86: Simplify retpoline declaration
  x86/speculation: Change FILL_RETURN_BUFFER to work with objtool
  objtool: Add support for intra-function calls
  objtool: Move the IRET hack into the arch decoder
  objtool: Remove INSN_STACK
  objtool: Make handle_insn_ops() unconditional
  objtool: Rework allocating stack_ops on decode
  objtool: UNWIND_HINT_RET_OFFSET should not check registers
  objtool: is_fentry_call() crashes if call has no destination
  x86,smap: Fix smap_{save,restore}() alternatives
  ...
  • Loading branch information
torvalds committed Jun 1, 2020
2 parents 6005606 + 0decf1f commit 69fc06f
Show file tree
Hide file tree
Showing 46 changed files with 1,228 additions and 724 deletions.
4 changes: 2 additions & 2 deletions arch/x86/crypto/aesni-intel_asm.S
Original file line number Diff line number Diff line change
Expand Up @@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
pxor INC, STATE4
movdqu IV, 0x30(OUTP)

CALL_NOSPEC %r11
CALL_NOSPEC r11

movdqu 0x00(OUTP), INC
pxor INC, STATE1
Expand Down Expand Up @@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
_aesni_gf128mul_x_ble()
movups IV, (IVP)

CALL_NOSPEC %r11
CALL_NOSPEC r11

movdqu 0x40(OUTP), INC
pxor INC, STATE1
Expand Down
2 changes: 1 addition & 1 deletion arch/x86/crypto/camellia-aesni-avx-asm_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
vpxor 14 * 16(%rax), %xmm15, %xmm14;
vpxor 15 * 16(%rax), %xmm15, %xmm15;

CALL_NOSPEC %r9;
CALL_NOSPEC r9;

addq $(16 * 16), %rsp;

Expand Down
2 changes: 1 addition & 1 deletion arch/x86/crypto/camellia-aesni-avx2-asm_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
vpxor 14 * 32(%rax), %ymm15, %ymm14;
vpxor 15 * 32(%rax), %ymm15, %ymm15;

CALL_NOSPEC %r9;
CALL_NOSPEC r9;

addq $(16 * 32), %rsp;

Expand Down
26 changes: 13 additions & 13 deletions arch/x86/crypto/crc32c-pcl-intel-asm_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@

.text
SYM_FUNC_START(crc_pcl)
#define bufp %rdi
#define bufp rdi
#define bufp_dw %edi
#define bufp_w %di
#define bufp_b %dil
Expand Down Expand Up @@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
## 1) ALIGN:
################################################################

mov bufp, bufptmp # rdi = *buf
neg bufp
and $7, bufp # calculate the unalignment amount of
mov %bufp, bufptmp # rdi = *buf
neg %bufp
and $7, %bufp # calculate the unalignment amount of
# the address
je proc_block # Skip if aligned

Expand All @@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align:
#### Calculate CRC of unaligned bytes of the buffer (if any)
movq (bufptmp), tmp # load a quadward from the buffer
add bufp, bufptmp # align buffer pointer for quadword
add %bufp, bufptmp # align buffer pointer for quadword
# processing
sub bufp, len # update buffer length
sub %bufp, len # update buffer length
align_loop:
crc32b %bl, crc_init_dw # compute crc32 of 1-byte
shr $8, tmp # get next byte
dec bufp
dec %bufp
jne align_loop

proc_block:
Expand Down Expand Up @@ -169,10 +169,10 @@ continue_block:
xor crc2, crc2

## branch into array
lea jump_table(%rip), bufp
movzxw (bufp, %rax, 2), len
lea crc_array(%rip), bufp
lea (bufp, len, 1), bufp
lea jump_table(%rip), %bufp
movzxw (%bufp, %rax, 2), len
lea crc_array(%rip), %bufp
lea (%bufp, len, 1), %bufp
JMP_NOSPEC bufp

################################################################
Expand Down Expand Up @@ -218,9 +218,9 @@ LABEL crc_ %i
## 4) Combine three results:
################################################################

lea (K_table-8)(%rip), bufp # first entry is for idx 1
lea (K_table-8)(%rip), %bufp # first entry is for idx 1
shlq $3, %rax # rax *= 8
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2
pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
subq %rax, tmp # tmp -= rax*24

Expand Down
6 changes: 3 additions & 3 deletions arch/x86/entry/entry_32.S
Original file line number Diff line number Diff line change
Expand Up @@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)

/* kernel thread */
1: movl %edi, %eax
CALL_NOSPEC %ebx
CALL_NOSPEC ebx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
Expand Down Expand Up @@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)

TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception_read_cr2)

Expand All @@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)

TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi
CALL_NOSPEC edi
jmp ret_from_exception
SYM_CODE_END(common_exception)

Expand Down
2 changes: 1 addition & 1 deletion arch/x86/entry/entry_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */
UNWIND_HINT_EMPTY
movq %r12, %rdi
CALL_NOSPEC %rbx
CALL_NOSPEC rbx
/*
* A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve()
Expand Down
25 changes: 25 additions & 0 deletions arch/x86/include/asm/GEN-for-each-reg.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
#ifdef CONFIG_64BIT
GEN(rax)
GEN(rbx)
GEN(rcx)
GEN(rdx)
GEN(rsi)
GEN(rdi)
GEN(rbp)
GEN(r8)
GEN(r9)
GEN(r10)
GEN(r11)
GEN(r12)
GEN(r13)
GEN(r14)
GEN(r15)
#else
GEN(eax)
GEN(ebx)
GEN(ecx)
GEN(edx)
GEN(esi)
GEN(edi)
GEN(ebp)
#endif
35 changes: 15 additions & 20 deletions arch/x86/include/asm/asm-prototypes.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,19 @@ extern void cmpxchg8b_emu(void);
#endif

#ifdef CONFIG_RETPOLINE
#ifdef CONFIG_X86_32
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_e ## reg(void);
#else
#define INDIRECT_THUNK(reg) extern asmlinkage void __x86_indirect_thunk_r ## reg(void);
INDIRECT_THUNK(8)
INDIRECT_THUNK(9)
INDIRECT_THUNK(10)
INDIRECT_THUNK(11)
INDIRECT_THUNK(12)
INDIRECT_THUNK(13)
INDIRECT_THUNK(14)
INDIRECT_THUNK(15)
#endif
INDIRECT_THUNK(ax)
INDIRECT_THUNK(bx)
INDIRECT_THUNK(cx)
INDIRECT_THUNK(dx)
INDIRECT_THUNK(si)
INDIRECT_THUNK(di)
INDIRECT_THUNK(bp)

#define DECL_INDIRECT_THUNK(reg) \
extern asmlinkage void __x86_indirect_thunk_ ## reg (void);

#define DECL_RETPOLINE(reg) \
extern asmlinkage void __x86_retpoline_ ## reg (void);

#undef GEN
#define GEN(reg) DECL_INDIRECT_THUNK(reg)
#include <asm/GEN-for-each-reg.h>

#undef GEN
#define GEN(reg) DECL_RETPOLINE(reg)
#include <asm/GEN-for-each-reg.h>

#endif /* CONFIG_RETPOLINE */
76 changes: 20 additions & 56 deletions arch/x86/include/asm/nospec-branch.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,13 @@
#define _ASM_X86_NOSPEC_BRANCH_H_

#include <linux/static_key.h>
#include <linux/frame.h>

#include <asm/alternative.h>
#include <asm/alternative-asm.h>
#include <asm/cpufeatures.h>
#include <asm/msr-index.h>

/*
* This should be used immediately before a retpoline alternative. It tells
* objtool where the retpolines are so that it can make sense of the control
* flow by just reading the original instruction(s) and ignoring the
* alternatives.
*/
#define ANNOTATE_NOSPEC_ALTERNATIVE \
ANNOTATE_IGNORE_ALTERNATIVE
#include <asm/unwind_hints.h>

/*
* Fill the CPU return stack buffer.
Expand Down Expand Up @@ -46,21 +39,25 @@
#define __FILL_RETURN_BUFFER(reg, nr, sp) \
mov $(nr/2), reg; \
771: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 772f; \
773: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 773b; \
772: \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 774f; \
775: /* speculation trap */ \
UNWIND_HINT_EMPTY; \
pause; \
lfence; \
jmp 775b; \
774: \
add $(BITS_PER_LONG/8) * 2, sp; \
dec reg; \
jnz 771b; \
add $(BITS_PER_LONG/8) * nr, sp;
jnz 771b;

#ifdef __ASSEMBLY__

Expand All @@ -76,58 +73,28 @@
.popsection
.endm

/*
* These are the bare retpoline primitives for indirect jmp and call.
* Do not use these directly; they only exist to make the ALTERNATIVE
* invocation below less ugly.
*/
.macro RETPOLINE_JMP reg:req
call .Ldo_rop_\@
.Lspec_trap_\@:
pause
lfence
jmp .Lspec_trap_\@
.Ldo_rop_\@:
mov \reg, (%_ASM_SP)
ret
.endm

/*
* This is a wrapper around RETPOLINE_JMP so the called function in reg
* returns to the instruction after the macro.
*/
.macro RETPOLINE_CALL reg:req
jmp .Ldo_call_\@
.Ldo_retpoline_jmp_\@:
RETPOLINE_JMP \reg
.Ldo_call_\@:
call .Ldo_retpoline_jmp_\@
.endm

/*
* JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
* indirect jmp/call which may be susceptible to the Spectre variant 2
* attack.
*/
.macro JMP_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
__stringify(jmp __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
jmp *\reg
jmp *%\reg
#endif
.endm

.macro CALL_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg), \
__stringify(call __x86_retpoline_\reg), X86_FEATURE_RETPOLINE, \
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
#else
call *\reg
call *%\reg
#endif
.endm

Expand All @@ -137,10 +104,8 @@
*/
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
#ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
\ftr
ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr
__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
.Lskip_rsb_\@:
#endif
.endm
Expand All @@ -161,16 +126,16 @@
* which is ensured when CONFIG_RETPOLINE is defined.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
"call __x86_indirect_thunk_%V[thunk_target]\n", \
"call __x86_retpoline_%V[thunk_target]\n", \
X86_FEATURE_RETPOLINE, \
"lfence;\n" \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
X86_FEATURE_RETPOLINE_AMD)

# define THUNK_TARGET(addr) [thunk_target] "r" (addr)

#else /* CONFIG_X86_32 */
Expand All @@ -180,7 +145,6 @@
* here, anyway.
*/
# define CALL_NOSPEC \
ANNOTATE_NOSPEC_ALTERNATIVE \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE \
"call *%[thunk_target]\n", \
Expand Down
3 changes: 1 addition & 2 deletions arch/x86/include/asm/orc_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,7 @@
#define ORC_TYPE_CALL 0
#define ORC_TYPE_REGS 1
#define ORC_TYPE_REGS_IRET 2
#define UNWIND_HINT_TYPE_SAVE 3
#define UNWIND_HINT_TYPE_RESTORE 4
#define UNWIND_HINT_TYPE_RET_OFFSET 3

#ifndef __ASSEMBLY__
/*
Expand Down
2 changes: 0 additions & 2 deletions arch/x86/include/asm/processor.h
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,6 @@ static inline void sync_core(void)
unsigned int tmp;

asm volatile (
UNWIND_HINT_SAVE
"mov %%ss, %0\n\t"
"pushq %q0\n\t"
"pushq %%rsp\n\t"
Expand All @@ -738,7 +737,6 @@ static inline void sync_core(void)
"pushq %q0\n\t"
"pushq $1f\n\t"
"iretq\n\t"
UNWIND_HINT_RESTORE
"1:"
: "=&r" (tmp), ASM_CALL_CONSTRAINT : : "cc", "memory");
#endif
Expand Down
Loading

0 comments on commit 69fc06f

Please sign in to comment.