summaryrefslogtreecommitdiff
path: root/arch/arm64/include
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2014-02-28 16:12:25 +0000
committerCatalin Marinas <catalin.marinas@arm.com>2014-02-28 16:12:25 +0000
commitb57fc9e80692043e2a3a74e1d2c047eb700dcd0c (patch)
tree875a74ad9b9bff024bfc5e3815450005466258f7 /arch/arm64/include
parent84fe6826c28f69d8708bd575faed7f75e6b6f57f (diff)
arm64: Fix !CONFIG_SMP kernel build
Commit fb4a96029c8a (arm64: kernel: fix per-cpu offset restore on resume) uses per_cpu_offset() unconditionally during CPU wakeup, however, this is only defined for the SMP case. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Reported-by: Dave P Martin <Dave.Martin@arm.com>
Diffstat (limited to 'arch/arm64/include')
-rw-r--r--arch/arm64/include/asm/percpu.h8
1 files changed, 8 insertions, 0 deletions
diff --git a/arch/arm64/include/asm/percpu.h b/arch/arm64/include/asm/percpu.h
index 13fb0b3efc5f..453a179469a3 100644
--- a/arch/arm64/include/asm/percpu.h
+++ b/arch/arm64/include/asm/percpu.h
@@ -16,6 +16,8 @@
#ifndef __ASM_PERCPU_H
#define __ASM_PERCPU_H
+#ifdef CONFIG_SMP
+
static inline void set_my_cpu_offset(unsigned long off)
{
asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
@@ -36,6 +38,12 @@ static inline unsigned long __my_cpu_offset(void)
}
#define __my_cpu_offset __my_cpu_offset()
+#else /* !CONFIG_SMP */
+
+#define set_my_cpu_offset(x) do { } while (0)
+
+#endif /* CONFIG_SMP */
+
#include <asm-generic/percpu.h>
#endif /* __ASM_PERCPU_H */