|
@@ -202,8 +202,7 @@
|
|
|
#define ARM_SMMU_CB_S1_TLBIVAL 0x620
|
|
#define ARM_SMMU_CB_S1_TLBIVAL 0x620
|
|
|
#define ARM_SMMU_CB_S2_TLBIIPAS2 0x630
|
|
#define ARM_SMMU_CB_S2_TLBIIPAS2 0x630
|
|
|
#define ARM_SMMU_CB_S2_TLBIIPAS2L 0x638
|
|
#define ARM_SMMU_CB_S2_TLBIIPAS2L 0x638
|
|
|
-#define ARM_SMMU_CB_ATS1PR_LO 0x800
|
|
|
|
|
-#define ARM_SMMU_CB_ATS1PR_HI 0x804
|
|
|
|
|
|
|
+#define ARM_SMMU_CB_ATS1PR 0x800
|
|
|
#define ARM_SMMU_CB_ATSR 0x8f0
|
|
#define ARM_SMMU_CB_ATSR 0x8f0
|
|
|
|
|
|
|
|
#define SCTLR_S1_ASIDPNE (1 << 12)
|
|
#define SCTLR_S1_ASIDPNE (1 << 12)
|
|
@@ -1229,18 +1228,18 @@ static phys_addr_t arm_smmu_iova_to_phys_hard(struct iommu_domain *domain,
|
|
|
void __iomem *cb_base;
|
|
void __iomem *cb_base;
|
|
|
u32 tmp;
|
|
u32 tmp;
|
|
|
u64 phys;
|
|
u64 phys;
|
|
|
|
|
+ unsigned long va;
|
|
|
|
|
|
|
|
cb_base = ARM_SMMU_CB_BASE(smmu) + ARM_SMMU_CB(smmu, cfg->cbndx);
|
|
cb_base = ARM_SMMU_CB_BASE(smmu) + ARM_SMMU_CB(smmu, cfg->cbndx);
|
|
|
|
|
|
|
|
- if (smmu->version == 1) {
|
|
|
|
|
- u32 reg = iova & ~0xfff;
|
|
|
|
|
- writel_relaxed(reg, cb_base + ARM_SMMU_CB_ATS1PR_LO);
|
|
|
|
|
- } else {
|
|
|
|
|
- u32 reg = iova & ~0xfff;
|
|
|
|
|
- writel_relaxed(reg, cb_base + ARM_SMMU_CB_ATS1PR_LO);
|
|
|
|
|
- reg = ((u64)iova & ~0xfff) >> 32;
|
|
|
|
|
- writel_relaxed(reg, cb_base + ARM_SMMU_CB_ATS1PR_HI);
|
|
|
|
|
- }
|
|
|
|
|
|
|
+ /* ATS1 registers can only be written atomically */
|
|
|
|
|
+ va = iova & ~0xfffUL;
|
|
|
|
|
+#ifdef CONFIG_64BIT
|
|
|
|
|
+ if (smmu->version == ARM_SMMU_V2)
|
|
|
|
|
+ writeq_relaxed(va, cb_base + ARM_SMMU_CB_ATS1PR);
|
|
|
|
|
+ else
|
|
|
|
|
+#endif
|
|
|
|
|
+ writel_relaxed(va, cb_base + ARM_SMMU_CB_ATS1PR);
|
|
|
|
|
|
|
|
if (readl_poll_timeout_atomic(cb_base + ARM_SMMU_CB_ATSR, tmp,
|
|
if (readl_poll_timeout_atomic(cb_base + ARM_SMMU_CB_ATSR, tmp,
|
|
|
!(tmp & ATSR_ACTIVE), 5, 50)) {
|
|
!(tmp & ATSR_ACTIVE), 5, 50)) {
|