|
@@ -31,6 +31,7 @@
|
|
|
#include <linux/gfp.h>
|
|
|
#include <linux/scatterlist.h>
|
|
|
#include <linux/mem_encrypt.h>
|
|
|
+#include <linux/set_memory.h>
|
|
|
|
|
|
#include <asm/io.h>
|
|
|
#include <asm/dma.h>
|
|
@@ -156,8 +157,6 @@ unsigned long swiotlb_size_or_default(void)
|
|
|
return size ? size : (IO_TLB_DEFAULT_SIZE);
|
|
|
}
|
|
|
|
|
|
-void __weak swiotlb_set_mem_attributes(void *vaddr, unsigned long size) { }
|
|
|
-
|
|
|
/* For swiotlb, clear memory encryption mask from dma addresses */
|
|
|
static dma_addr_t swiotlb_phys_to_dma(struct device *hwdev,
|
|
|
phys_addr_t address)
|
|
@@ -209,12 +208,12 @@ void __init swiotlb_update_mem_attributes(void)
|
|
|
|
|
|
vaddr = phys_to_virt(io_tlb_start);
|
|
|
bytes = PAGE_ALIGN(io_tlb_nslabs << IO_TLB_SHIFT);
|
|
|
- swiotlb_set_mem_attributes(vaddr, bytes);
|
|
|
+ set_memory_decrypted((unsigned long)vaddr, bytes >> PAGE_SHIFT);
|
|
|
memset(vaddr, 0, bytes);
|
|
|
|
|
|
vaddr = phys_to_virt(io_tlb_overflow_buffer);
|
|
|
bytes = PAGE_ALIGN(io_tlb_overflow);
|
|
|
- swiotlb_set_mem_attributes(vaddr, bytes);
|
|
|
+ set_memory_decrypted((unsigned long)vaddr, bytes >> PAGE_SHIFT);
|
|
|
memset(vaddr, 0, bytes);
|
|
|
}
|
|
|
|
|
@@ -355,7 +354,7 @@ swiotlb_late_init_with_tbl(char *tlb, unsigned long nslabs)
|
|
|
io_tlb_start = virt_to_phys(tlb);
|
|
|
io_tlb_end = io_tlb_start + bytes;
|
|
|
|
|
|
- swiotlb_set_mem_attributes(tlb, bytes);
|
|
|
+ set_memory_decrypted((unsigned long)tlb, bytes >> PAGE_SHIFT);
|
|
|
memset(tlb, 0, bytes);
|
|
|
|
|
|
/*
|
|
@@ -366,7 +365,8 @@ swiotlb_late_init_with_tbl(char *tlb, unsigned long nslabs)
|
|
|
if (!v_overflow_buffer)
|
|
|
goto cleanup2;
|
|
|
|
|
|
- swiotlb_set_mem_attributes(v_overflow_buffer, io_tlb_overflow);
|
|
|
+ set_memory_decrypted((unsigned long)v_overflow_buffer,
|
|
|
+ io_tlb_overflow >> PAGE_SHIFT);
|
|
|
memset(v_overflow_buffer, 0, io_tlb_overflow);
|
|
|
io_tlb_overflow_buffer = virt_to_phys(v_overflow_buffer);
|
|
|
|