|
@@ -1,5 +1,5 @@
|
|
/*
|
|
/*
|
|
- * Based on arch/arm/include/asm/assembler.h
|
|
|
|
|
|
+ * Based on arch/arm/include/asm/assembler.h, arch/arm/mm/proc-macros.S
|
|
*
|
|
*
|
|
* Copyright (C) 1996-2000 Russell King
|
|
* Copyright (C) 1996-2000 Russell King
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
@@ -23,6 +23,8 @@
|
|
#ifndef __ASM_ASSEMBLER_H
|
|
#ifndef __ASM_ASSEMBLER_H
|
|
#define __ASM_ASSEMBLER_H
|
|
#define __ASM_ASSEMBLER_H
|
|
|
|
|
|
|
|
+#include <asm/asm-offsets.h>
|
|
|
|
+#include <asm/pgtable-hwdef.h>
|
|
#include <asm/ptrace.h>
|
|
#include <asm/ptrace.h>
|
|
#include <asm/thread_info.h>
|
|
#include <asm/thread_info.h>
|
|
|
|
|
|
@@ -199,6 +201,84 @@ lr .req x30 // link register
|
|
add \reg, \reg, \tmp
|
|
add \reg, \reg, \tmp
|
|
.endm
|
|
.endm
|
|
|
|
|
|
|
|
+/*
|
|
|
|
+ * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm)
|
|
|
|
+ */
|
|
|
|
+ .macro vma_vm_mm, rd, rn
|
|
|
|
+ ldr \rd, [\rn, #VMA_VM_MM]
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * mmid - get context id from mm pointer (mm->context.id)
|
|
|
|
+ */
|
|
|
|
+ .macro mmid, rd, rn
|
|
|
|
+ ldr \rd, [\rn, #MM_CONTEXT_ID]
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * dcache_line_size - get the minimum D-cache line size from the CTR register.
|
|
|
|
+ */
|
|
|
|
+ .macro dcache_line_size, reg, tmp
|
|
|
|
+ mrs \tmp, ctr_el0 // read CTR
|
|
|
|
+ ubfm \tmp, \tmp, #16, #19 // cache line size encoding
|
|
|
|
+ mov \reg, #4 // bytes per word
|
|
|
|
+ lsl \reg, \reg, \tmp // actual cache line size
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * icache_line_size - get the minimum I-cache line size from the CTR register.
|
|
|
|
+ */
|
|
|
|
+ .macro icache_line_size, reg, tmp
|
|
|
|
+ mrs \tmp, ctr_el0 // read CTR
|
|
|
|
+ and \tmp, \tmp, #0xf // cache line size encoding
|
|
|
|
+ mov \reg, #4 // bytes per word
|
|
|
|
+ lsl \reg, \reg, \tmp // actual cache line size
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * tcr_set_idmap_t0sz - update TCR.T0SZ so that we can load the ID map
|
|
|
|
+ */
|
|
|
|
+ .macro tcr_set_idmap_t0sz, valreg, tmpreg
|
|
|
|
+#ifndef CONFIG_ARM64_VA_BITS_48
|
|
|
|
+ ldr_l \tmpreg, idmap_t0sz
|
|
|
|
+ bfi \valreg, \tmpreg, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
|
|
|
|
+#endif
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * Macro to perform a data cache maintenance for the interval
|
|
|
|
+ * [kaddr, kaddr + size)
|
|
|
|
+ *
|
|
|
|
+ * op: operation passed to dc instruction
|
|
|
|
+ * domain: domain used in dsb instruciton
|
|
|
|
+ * kaddr: starting virtual address of the region
|
|
|
|
+ * size: size of the region
|
|
|
|
+ * Corrupts: kaddr, size, tmp1, tmp2
|
|
|
|
+ */
|
|
|
|
+ .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2
|
|
|
|
+ dcache_line_size \tmp1, \tmp2
|
|
|
|
+ add \size, \kaddr, \size
|
|
|
|
+ sub \tmp2, \tmp1, #1
|
|
|
|
+ bic \kaddr, \kaddr, \tmp2
|
|
|
|
+9998: dc \op, \kaddr
|
|
|
|
+ add \kaddr, \kaddr, \tmp1
|
|
|
|
+ cmp \kaddr, \size
|
|
|
|
+ b.lo 9998b
|
|
|
|
+ dsb \domain
|
|
|
|
+ .endm
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * reset_pmuserenr_el0 - reset PMUSERENR_EL0 if PMUv3 present
|
|
|
|
+ */
|
|
|
|
+ .macro reset_pmuserenr_el0, tmpreg
|
|
|
|
+ mrs \tmpreg, id_aa64dfr0_el1 // Check ID_AA64DFR0_EL1 PMUVer
|
|
|
|
+ sbfx \tmpreg, \tmpreg, #8, #4
|
|
|
|
+ cmp \tmpreg, #1 // Skip if no PMU present
|
|
|
|
+ b.lt 9000f
|
|
|
|
+ msr pmuserenr_el0, xzr // Disable PMU access from EL0
|
|
|
|
+9000:
|
|
|
|
+ .endm
|
|
|
|
+
|
|
/*
|
|
/*
|
|
* Annotate a function as position independent, i.e., safe to be called before
|
|
* Annotate a function as position independent, i.e., safe to be called before
|
|
* the kernel virtual mapping is activated.
|
|
* the kernel virtual mapping is activated.
|