diff options
author | Dan Williams <dan.j.williams@intel.com> | 2007-02-16 22:16:32 +0100 |
---|---|---|
committer | Russell King <rmk+kernel@arm.linux.org.uk> | 2007-02-17 15:04:29 +0000 |
commit | f80dff9da07d81da16e3b842118d47b9febf9c01 (patch) | |
tree | ea2da17c5af516c241b3ea3b4dd4fa47d9d86769 /include/asm-arm/arch-iop13xx | |
parent | 588ef7693574cfbcb228f48d5478c2b39a9b0c9f (diff) |
[ARM] 4185/2: entry: introduce get_irqnr_preamble and arch_ret_to_user
get_irqnr_preamble allows machines to take some action before entering the
get_irqnr_and_base loop. On iop we enable cp6 access.
arch_ret_to_user is added to the userspace return path to allow individual
architectures to take actions, like disabling coprocessor access, before
the final return to userspace.
Per Nicolas Pitre's note, there is no need to cp_wait on the return to user
as the latency to return is sufficient.
Signed-off-by: Dan Williams <dan.j.williams@intel.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include/asm-arm/arch-iop13xx')
-rw-r--r-- | include/asm-arm/arch-iop13xx/entry-macro.S | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/include/asm-arm/arch-iop13xx/entry-macro.S b/include/asm-arm/arch-iop13xx/entry-macro.S index 94c50283dc56..a624a7870c64 100644 --- a/include/asm-arm/arch-iop13xx/entry-macro.S +++ b/include/asm-arm/arch-iop13xx/entry-macro.S @@ -19,21 +19,27 @@ .macro disable_fiq .endm + .macro get_irqnr_preamble, base, tmp + mrc p15, 0, \tmp, c15, c1, 0 + orr \tmp, \tmp, #(1 << 6) + mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access + .endm + /* * Note: a 1-cycle window exists where iintvec will return the value * of iintbase, so we explicitly check for "bad zeros" */ .macro get_irqnr_and_base, irqnr, irqstat, base, tmp - mrc p15, 0, \tmp, c15, c1, 0 - orr \tmp, \tmp, #(1 << 6) - mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access - mrc p6, 0, \irqnr, c3, c2, 0 @ Read IINTVEC cmp \irqnr, #0 mrceq p6, 0, \irqnr, c3, c2, 0 @ Re-read on potentially bad zero adds \irqstat, \irqnr, #1 @ Check for 0xffffffff movne \irqnr, \irqnr, lsr #2 @ Convert to irqnr + .endm - biceq \tmp, \tmp, #(1 << 6) - mcreq p15, 0, \tmp, c15, c1, 0 @ Disable cp6 access if no more interrupts + .macro arch_ret_to_user, tmp1, tmp2 + mrc p15, 0, \tmp1, c15, c1, 0 + ands \tmp2, \tmp1, #(1 << 6) + bicne \tmp1, \tmp1, #(1 << 6) + mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access .endm |