diff options
Diffstat (limited to 'arch/arm64')
-rw-r--r-- | arch/arm64/Kconfig | 1 | ||||
-rw-r--r-- | arch/arm64/include/asm/Kbuild | 17 | ||||
-rw-r--r-- | arch/arm64/include/asm/elf.h | 12 | ||||
-rw-r--r-- | arch/arm64/include/asm/stackprotector.h | 1 | ||||
-rw-r--r-- | arch/arm64/include/asm/string.h | 5 | ||||
-rw-r--r-- | arch/arm64/include/asm/uaccess.h | 4 | ||||
-rw-r--r-- | arch/arm64/include/uapi/asm/Kbuild | 16 | ||||
-rw-r--r-- | arch/arm64/mm/kasan_init.c | 8 | ||||
-rw-r--r-- | arch/arm64/mm/mmap.c | 7 |
9 files changed, 36 insertions, 35 deletions
diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig index 8addb851ab5e..dfd908630631 100644 --- a/arch/arm64/Kconfig +++ b/arch/arm64/Kconfig @@ -12,6 +12,7 @@ config ARM64 select ARCH_HAS_DEVMEM_IS_ALLOWED select ARCH_HAS_ACPI_TABLE_UPGRADE if ACPI select ARCH_HAS_ELF_RANDOMIZE + select ARCH_HAS_FORTIFY_SOURCE select ARCH_HAS_GCOV_PROFILE_ALL select ARCH_HAS_GIGANTIC_PAGE if (MEMORY_ISOLATION && COMPACTION) || CMA select ARCH_HAS_KCOV diff --git a/arch/arm64/include/asm/Kbuild b/arch/arm64/include/asm/Kbuild index a7a97a608033..f81c7b685fc6 100644 --- a/arch/arm64/include/asm/Kbuild +++ b/arch/arm64/include/asm/Kbuild @@ -6,41 +6,24 @@ generic-y += dma.h generic-y += dma-contiguous.h generic-y += early_ioremap.h generic-y += emergency-restart.h -generic-y += errno.h generic-y += hw_irq.h -generic-y += ioctl.h -generic-y += ioctls.h -generic-y += ipcbuf.h generic-y += irq_regs.h generic-y += kdebug.h generic-y += kmap_types.h -generic-y += kvm_para.h generic-y += local.h generic-y += local64.h generic-y += mcs_spinlock.h generic-y += mm-arch-hooks.h -generic-y += mman.h -generic-y += msgbuf.h generic-y += msi.h -generic-y += poll.h generic-y += preempt.h -generic-y += resource.h generic-y += rwsem.h generic-y += segment.h -generic-y += sembuf.h generic-y += serial.h generic-y += set_memory.h -generic-y += shmbuf.h generic-y += simd.h generic-y += sizes.h -generic-y += socket.h -generic-y += sockios.h -generic-y += swab.h generic-y += switch_to.h -generic-y += termbits.h -generic-y += termios.h generic-y += trace_clock.h -generic-y += types.h generic-y += unaligned.h generic-y += user.h generic-y += vga.h diff --git a/arch/arm64/include/asm/elf.h b/arch/arm64/include/asm/elf.h index ac3fb7441510..acae781f7359 100644 --- a/arch/arm64/include/asm/elf.h +++ b/arch/arm64/include/asm/elf.h @@ -113,12 +113,11 @@ #define ELF_EXEC_PAGESIZE PAGE_SIZE /* - * This is the location that an ET_DYN program is loaded if exec'ed. Typical - * use of this is to invoke "./ld.so someprog" to test out a new version of - * the loader. We need to make sure that it is out of the way of the program - * that it will "exec", and that there is sufficient room for the brk. + * This is the base location for PIE (ET_DYN with INTERP) loads. On + * 64-bit, this is raised to 4GB to leave the entire 32-bit address + * space open for things that want to use the area for 32-bit pointers. */ -#define ELF_ET_DYN_BASE (2 * TASK_SIZE_64 / 3) +#define ELF_ET_DYN_BASE 0x100000000UL #ifndef __ASSEMBLY__ @@ -174,7 +173,8 @@ extern int arch_setup_additional_pages(struct linux_binprm *bprm, #ifdef CONFIG_COMPAT -#define COMPAT_ELF_ET_DYN_BASE (2 * TASK_SIZE_32 / 3) +/* PIE load location for compat arm. Must match ARM ELF_ET_DYN_BASE. */ +#define COMPAT_ELF_ET_DYN_BASE 0x000400000UL /* AArch32 registers. */ #define COMPAT_ELF_NGREG 18 diff --git a/arch/arm64/include/asm/stackprotector.h b/arch/arm64/include/asm/stackprotector.h index fe5e287dc56b..b86a0865ddf1 100644 --- a/arch/arm64/include/asm/stackprotector.h +++ b/arch/arm64/include/asm/stackprotector.h @@ -30,6 +30,7 @@ static __always_inline void boot_init_stack_canary(void) /* Try to get a semi random initial value. */ get_random_bytes(&canary, sizeof(canary)); canary ^= LINUX_VERSION_CODE; + canary &= CANARY_MASK; current->stack_canary = canary; __stack_chk_guard = current->stack_canary; diff --git a/arch/arm64/include/asm/string.h b/arch/arm64/include/asm/string.h index 2eb714c4639f..d0aa42907569 100644 --- a/arch/arm64/include/asm/string.h +++ b/arch/arm64/include/asm/string.h @@ -63,6 +63,11 @@ extern int memcmp(const void *, const void *, size_t); #define memcpy(dst, src, len) __memcpy(dst, src, len) #define memmove(dst, src, len) __memmove(dst, src, len) #define memset(s, c, n) __memset(s, c, n) + +#ifndef __NO_FORTIFY +#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */ +#endif + #endif #endif diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h index 59f09e6a6cb8..8f0a1de11e4a 100644 --- a/arch/arm64/include/asm/uaccess.h +++ b/arch/arm64/include/asm/uaccess.h @@ -254,8 +254,6 @@ do { \ (void)0; \ }) -#define __get_user_unaligned __get_user - #define get_user(x, ptr) \ ({ \ __typeof__(*(ptr)) __user *__p = (ptr); \ @@ -320,8 +318,6 @@ do { \ (void)0; \ }) -#define __put_user_unaligned __put_user - #define put_user(x, ptr) \ ({ \ __typeof__(*(ptr)) __user *__p = (ptr); \ diff --git a/arch/arm64/include/uapi/asm/Kbuild b/arch/arm64/include/uapi/asm/Kbuild index 13a97aa2285f..fc28bd95c6d3 100644 --- a/arch/arm64/include/uapi/asm/Kbuild +++ b/arch/arm64/include/uapi/asm/Kbuild @@ -1,4 +1,20 @@ # UAPI Header export list include include/uapi/asm-generic/Kbuild.asm +generic-y += errno.h +generic-y += ioctl.h +generic-y += ioctls.h +generic-y += ipcbuf.h generic-y += kvm_para.h +generic-y += mman.h +generic-y += msgbuf.h +generic-y += poll.h +generic-y += resource.h +generic-y += sembuf.h +generic-y += shmbuf.h +generic-y += socket.h +generic-y += sockios.h +generic-y += swab.h +generic-y += termbits.h +generic-y += termios.h +generic-y += types.h diff --git a/arch/arm64/mm/kasan_init.c b/arch/arm64/mm/kasan_init.c index 687a358a3733..81f03959a4ab 100644 --- a/arch/arm64/mm/kasan_init.c +++ b/arch/arm64/mm/kasan_init.c @@ -191,14 +191,8 @@ void __init kasan_init(void) if (start >= end) break; - /* - * end + 1 here is intentional. We check several shadow bytes in - * advance to slightly speed up fastpath. In some rare cases - * we could cross boundary of mapped shadow, so we just map - * some more here. - */ vmemmap_populate((unsigned long)kasan_mem_to_shadow(start), - (unsigned long)kasan_mem_to_shadow(end) + 1, + (unsigned long)kasan_mem_to_shadow(end), pfn_to_nid(virt_to_pfn(start))); } diff --git a/arch/arm64/mm/mmap.c b/arch/arm64/mm/mmap.c index adc208c2ae9c..decccffb03ca 100644 --- a/arch/arm64/mm/mmap.c +++ b/arch/arm64/mm/mmap.c @@ -35,7 +35,7 @@ * Leave enough space between the mmap area and the stack to honour ulimit in * the face of randomisation. */ -#define MIN_GAP (SZ_128M + ((STACK_RND_MASK << PAGE_SHIFT) + 1)) +#define MIN_GAP (SZ_128M) #define MAX_GAP (STACK_TOP/6*5) static int mmap_is_legacy(void) @@ -65,6 +65,11 @@ unsigned long arch_mmap_rnd(void) static unsigned long mmap_base(unsigned long rnd) { unsigned long gap = rlimit(RLIMIT_STACK); + unsigned long pad = (STACK_RND_MASK << PAGE_SHIFT) + stack_guard_gap; + + /* Values close to RLIM_INFINITY can overflow. */ + if (gap + pad > gap) + gap += pad; if (gap < MIN_GAP) gap = MIN_GAP; |