Skip to content

Commit

Permalink
kasan: clean up KASAN_SHADOW_SCALE_SHIFT usage
Browse files Browse the repository at this point in the history
Right now the fact that KASAN uses a single shadow byte for 8 bytes of
memory is scattered all over the code.

This change defines KASAN_SHADOW_SCALE_SHIFT early in asm include files
and makes use of this constant where necessary.

[[email protected]: coding-style fixes]
Link: http://lkml.kernel.org/r/34937ca3b90736eaad91b568edf5684091f662e3.1515775666.git.andreyknvl@google.com
Signed-off-by: Andrey Konovalov <[email protected]>
Acked-by: Andrey Ryabinin <[email protected]>
Cc: Dmitry Vyukov <[email protected]>
Signed-off-by: Andrew Morton <[email protected]>
Signed-off-by: Linus Torvalds <[email protected]>
  • Loading branch information
xairy authored and torvalds committed Feb 7, 2018
1 parent 5f21f3a commit 917538e
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 15 deletions.
17 changes: 10 additions & 7 deletions arch/arm64/include/asm/kasan.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,25 @@

/*
* KASAN_SHADOW_START: beginning of the kernel virtual addresses.
* KASAN_SHADOW_END: KASAN_SHADOW_START + 1/8 of kernel virtual addresses.
* KASAN_SHADOW_END: KASAN_SHADOW_START + 1/N of kernel virtual addresses,
* where N = (1 << KASAN_SHADOW_SCALE_SHIFT).
*/
#define KASAN_SHADOW_START (VA_START)
#define KASAN_SHADOW_END (KASAN_SHADOW_START + KASAN_SHADOW_SIZE)

/*
* This value is used to map an address to the corresponding shadow
* address by the following formula:
* shadow_addr = (address >> 3) + KASAN_SHADOW_OFFSET;
* shadow_addr = (address >> KASAN_SHADOW_SCALE_SHIFT) + KASAN_SHADOW_OFFSET
*
* (1 << 61) shadow addresses - [KASAN_SHADOW_OFFSET,KASAN_SHADOW_END]
* cover all 64-bits of virtual addresses. So KASAN_SHADOW_OFFSET
* should satisfy the following equation:
* KASAN_SHADOW_OFFSET = KASAN_SHADOW_END - (1ULL << 61)
* (1 << (64 - KASAN_SHADOW_SCALE_SHIFT)) shadow addresses that lie in range
* [KASAN_SHADOW_OFFSET, KASAN_SHADOW_END) cover all 64-bits of virtual
* addresses. So KASAN_SHADOW_OFFSET should satisfy the following equation:
* KASAN_SHADOW_OFFSET = KASAN_SHADOW_END -
* (1ULL << (64 - KASAN_SHADOW_SCALE_SHIFT))
*/
#define KASAN_SHADOW_OFFSET (KASAN_SHADOW_END - (1ULL << (64 - 3)))
#define KASAN_SHADOW_OFFSET (KASAN_SHADOW_END - (1ULL << \
(64 - KASAN_SHADOW_SCALE_SHIFT)))

void kasan_init(void);
void kasan_copy_shadow(pgd_t *pgdir);
Expand Down
3 changes: 2 additions & 1 deletion arch/arm64/include/asm/memory.h
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,8 @@
* stack size when KASAN is in use.
*/
#ifdef CONFIG_KASAN
#define KASAN_SHADOW_SIZE (UL(1) << (VA_BITS - 3))
#define KASAN_SHADOW_SCALE_SHIFT 3
#define KASAN_SHADOW_SIZE (UL(1) << (VA_BITS - KASAN_SHADOW_SCALE_SHIFT))
#define KASAN_THREAD_SHIFT 1
#else
#define KASAN_SHADOW_SIZE (0)
Expand Down
3 changes: 2 additions & 1 deletion arch/arm64/mm/kasan_init.c
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,8 @@ static void __init kasan_pgd_populate(unsigned long addr, unsigned long end,
/* The early shadow maps everything to a single page of zeroes */
asmlinkage void __init kasan_early_init(void)
{
BUILD_BUG_ON(KASAN_SHADOW_OFFSET != KASAN_SHADOW_END - (1UL << 61));
BUILD_BUG_ON(KASAN_SHADOW_OFFSET !=
KASAN_SHADOW_END - (1UL << (64 - KASAN_SHADOW_SCALE_SHIFT)));
BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_START, PGDIR_SIZE));
BUILD_BUG_ON(!IS_ALIGNED(KASAN_SHADOW_END, PGDIR_SIZE));
kasan_pgd_populate(KASAN_SHADOW_START, KASAN_SHADOW_END, NUMA_NO_NODE,
Expand Down
12 changes: 8 additions & 4 deletions arch/x86/include/asm/kasan.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

#include <linux/const.h>
#define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL)
#define KASAN_SHADOW_SCALE_SHIFT 3

/*
* Compiler uses shadow offset assuming that addresses start
Expand All @@ -12,12 +13,15 @@
* 'kernel address space start' >> KASAN_SHADOW_SCALE_SHIFT
*/
#define KASAN_SHADOW_START (KASAN_SHADOW_OFFSET + \
((-1UL << __VIRTUAL_MASK_SHIFT) >> 3))
((-1UL << __VIRTUAL_MASK_SHIFT) >> \
KASAN_SHADOW_SCALE_SHIFT))
/*
* 47 bits for kernel address -> (47 - 3) bits for shadow
* 56 bits for kernel address -> (56 - 3) bits for shadow
* 47 bits for kernel address -> (47 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow
* 56 bits for kernel address -> (56 - KASAN_SHADOW_SCALE_SHIFT) bits for shadow
*/
#define KASAN_SHADOW_END (KASAN_SHADOW_START + (1ULL << (__VIRTUAL_MASK_SHIFT - 3)))
#define KASAN_SHADOW_END (KASAN_SHADOW_START + \
(1ULL << (__VIRTUAL_MASK_SHIFT - \
KASAN_SHADOW_SCALE_SHIFT)))

#ifndef __ASSEMBLY__

Expand Down
2 changes: 0 additions & 2 deletions include/linux/kasan.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ struct task_struct;

#ifdef CONFIG_KASAN

#define KASAN_SHADOW_SCALE_SHIFT 3

#include <asm/kasan.h>
#include <asm/pgtable.h>

Expand Down

0 comments on commit 917538e

Please sign in to comment.