Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
9d211e2
Revert "arm64: vdso: Fix compilation with clang older than 8"
nickdesaulniers Oct 13, 2020
8f1e694
arm64: vdso: remove -nostdlib compiler flag
masahir0y Nov 7, 2021
435b094
arm64: vdso32: drop the test for dmb ishld
nickdesaulniers Oct 19, 2021
0b3f8d3
arm64: vdso32: drop test for -march=armv8-a
nickdesaulniers Oct 19, 2021
8629289
arm64: do not descend to vdso directories twice
masahir0y Sep 25, 2021
becc579
arm64: clean vdso & vdso32 files
Aug 10, 2021
69b55d1
arm64: vdso: add vdso linker script to 'targets' instead of extra-y
masahir0y Aug 31, 2020
54b0855
arm64: vdso32: add CONFIG_THUMB2_COMPAT_VDSO
nickdesaulniers Jun 8, 2020
65867b4
arm64: vdso: Use GFP_KERNEL for allocating compat vdso and signal pages
willdeacon Jun 20, 2021
22e1613
arm64: vdso: remove aarch32_vdso_pages[]
mrutland-arm Apr 28, 2020
bd4b9c4
arm64: vdso: simplify arch_vdso_type ifdeffery
mrutland-arm Apr 28, 2020
d38fc9f
arm64: vdso: use consistent 'abi' nomenclature
mrutland-arm Apr 28, 2020
fbea67b
arm64: vdso: use consistent 'map' nomenclature
mrutland-arm Apr 28, 2020
f57567c
arm64: compat: Allow 32-bit vdso and sigpage to co-exist
willdeacon Sep 25, 2021
b2e004b
arm64: compat: Always use sigpage for sigreturn trampoline
willdeacon Jun 20, 2021
f6ff764
arm64: compat: Remove 32-bit sigreturn code from the vDSO
willdeacon Jun 22, 2020
9cef7a7
arm64: vdso32: Remove a bunch of #ifdef CONFIG_COMPAT_VDSO guards
willdeacon Sep 25, 2021
85b88da
arm64: vdso: Fix unusual formatting in *setup_additional_pages()
willdeacon Sep 25, 2021
26c5be2
arm64: compat: Allow signal page to be remapped
willdeacon Mar 18, 2021
ebe3cad
arm64: vdso: fix makefile dependency on vdso.so
jgouly May 10, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions arch/arm64/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -1326,6 +1326,14 @@ config COMPAT_VDSO
You must have a 32-bit build of glibc 2.22 or later for programs
to seamlessly take advantage of this.

config THUMB2_COMPAT_VDSO
bool "Compile the 32-bit vDSO for Thumb-2 mode" if EXPERT
depends on COMPAT_VDSO
default y
help
Compile the compat vDSO with '-mthumb -fomit-frame-pointer' if y,
otherwise with '-marm'.

menuconfig ARMV8_DEPRECATED
bool "Emulate deprecated/obsolete ARMv8 instructions"
depends on SYSCTL
Expand Down
12 changes: 8 additions & 4 deletions arch/arm64/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,8 @@ vdso_install:
# We use MRPROPER_FILES and CLEAN_FILES now
archclean:
$(Q)$(MAKE) $(clean)=$(boot)
$(Q)$(MAKE) $(clean)=arch/arm64/kernel/vdso
$(Q)$(MAKE) $(clean)=arch/arm64/kernel/vdso32

ifeq ($(KBUILD_EXTMOD),)
# We need to generate vdso-offsets.h before compiling certain files in kernel/.
Expand All @@ -187,10 +189,12 @@ ifeq ($(KBUILD_EXTMOD),)
# this hack.
prepare: vdso_prepare
vdso_prepare: prepare0
$(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso include/generated/vdso-offsets.h
$(if $(CONFIG_COMPAT_VDSO),$(Q)$(MAKE) \
$(build)=arch/arm64/kernel/vdso32 \
include/generated/vdso32-offsets.h)
$(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso \
include/generated/vdso-offsets.h arch/arm64/kernel/vdso/vdso.so
ifdef CONFIG_COMPAT_VDSO
$(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso32 \
include/generated/vdso32-offsets.h arch/arm64/kernel/vdso32/vdso.so
endif
endif

define archhelp
Expand Down
3 changes: 3 additions & 0 deletions arch/arm64/include/asm/mmu.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@

typedef struct {
atomic64_t id;
#ifdef CONFIG_COMPAT
void *sigpage;
#endif
void *vdso;
unsigned long flags;
} mm_context_t;
Expand Down
7 changes: 0 additions & 7 deletions arch/arm64/include/asm/vdso/compat_barrier.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,9 @@

#define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")

#if __LINUX_ARM_ARCH__ >= 8 && defined(CONFIG_AS_DMB_ISHLD)
#define aarch32_smp_mb() dmb(ish)
#define aarch32_smp_rmb() dmb(ishld)
#define aarch32_smp_wmb() dmb(ishst)
#else
#define aarch32_smp_mb() dmb(ish)
#define aarch32_smp_rmb() aarch32_smp_mb()
#define aarch32_smp_wmb() dmb(ishst)
#endif


#undef smp_mb
#undef smp_rmb
Expand Down
11 changes: 7 additions & 4 deletions arch/arm64/kernel/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ $(obj)/%.stub.o: $(obj)/%.o FORCE

obj-$(CONFIG_COMPAT) += sys32.o signal32.o \
sys_compat.o
ifneq ($(CONFIG_COMPAT_VDSO), y)
obj-$(CONFIG_COMPAT) += sigreturn32.o
endif
obj-$(CONFIG_KUSER_HELPERS) += kuser32.o
obj-$(CONFIG_FUNCTION_TRACER) += ftrace.o entry-ftrace.o
obj-$(CONFIG_MODULES) += module.o
Expand Down Expand Up @@ -65,9 +63,14 @@ obj-$(CONFIG_ARM_SDE_INTERFACE) += sdei.o
obj-$(CONFIG_ARM64_SSBD) += ssbd.o
obj-$(CONFIG_ARM64_PTR_AUTH) += pointer_auth.o
obj-$(CONFIG_SHADOW_CALL_STACK) += scs.o
obj-y += vdso-wrap.o
obj-$(CONFIG_COMPAT_VDSO) += vdso32-wrap.o

# Force dependency (vdso*-wrap.S includes vdso.so through incbin)
$(obj)/vdso-wrap.o: $(obj)/vdso/vdso.so
$(obj)/vdso32-wrap.o: $(obj)/vdso32/vdso.so

obj-y += vdso/ probes/
obj-$(CONFIG_COMPAT_VDSO) += vdso32/
obj-y += probes/
head-y := head.o
extra-y += $(head-y) vmlinux.lds

Expand Down
27 changes: 1 addition & 26 deletions arch/arm64/kernel/signal32.c
Original file line number Diff line number Diff line change
Expand Up @@ -342,38 +342,13 @@ static void compat_setup_return(struct pt_regs *regs, struct k_sigaction *ka,
retcode = ptr_to_compat(ka->sa.sa_restorer);
} else {
/* Set up sigreturn pointer */
#ifdef CONFIG_COMPAT_VDSO
void *vdso_base = current->mm->context.vdso;
void *vdso_trampoline;

if (ka->sa.sa_flags & SA_SIGINFO) {
if (thumb) {
vdso_trampoline = VDSO_SYMBOL(vdso_base,
compat_rt_sigreturn_thumb);
} else {
vdso_trampoline = VDSO_SYMBOL(vdso_base,
compat_rt_sigreturn_arm);
}
} else {
if (thumb) {
vdso_trampoline = VDSO_SYMBOL(vdso_base,
compat_sigreturn_thumb);
} else {
vdso_trampoline = VDSO_SYMBOL(vdso_base,
compat_sigreturn_arm);
}
}

retcode = ptr_to_compat(vdso_trampoline) + thumb;
#else
unsigned int idx = thumb << 1;

if (ka->sa.sa_flags & SA_SIGINFO)
idx += 3;

retcode = (unsigned long)current->mm->context.vdso +
retcode = (unsigned long)current->mm->context.sigpage +
(idx << 2) + thumb;
#endif
}

regs->regs[0] = usig;
Expand Down
File renamed without changes.
Loading