|
@@ -74,7 +74,7 @@ void * __meminit vmemmap_alloc_block(unsigned long size, int node)
|
|
|
}
|
|
|
|
|
|
/* need to make sure size is all the same during early stage */
|
|
|
-static void * __meminit alloc_block_buf(unsigned long size, int node)
|
|
|
+void * __meminit vmemmap_alloc_block_buf(unsigned long size, int node)
|
|
|
{
|
|
|
void *ptr;
|
|
|
|
|
@@ -129,7 +129,7 @@ static unsigned long __meminit vmem_altmap_alloc(struct vmem_altmap *altmap,
|
|
|
return pfn + nr_align;
|
|
|
}
|
|
|
|
|
|
-static void * __meminit altmap_alloc_block_buf(unsigned long size,
|
|
|
+void * __meminit altmap_alloc_block_buf(unsigned long size,
|
|
|
struct vmem_altmap *altmap)
|
|
|
{
|
|
|
unsigned long pfn, nr_pfns;
|
|
@@ -153,15 +153,6 @@ static void * __meminit altmap_alloc_block_buf(unsigned long size,
|
|
|
return ptr;
|
|
|
}
|
|
|
|
|
|
-/* need to make sure size is all the same during early stage */
|
|
|
-void * __meminit __vmemmap_alloc_block_buf(unsigned long size, int node,
|
|
|
- struct vmem_altmap *altmap)
|
|
|
-{
|
|
|
- if (altmap)
|
|
|
- return altmap_alloc_block_buf(size, altmap);
|
|
|
- return alloc_block_buf(size, node);
|
|
|
-}
|
|
|
-
|
|
|
void __meminit vmemmap_verify(pte_t *pte, int node,
|
|
|
unsigned long start, unsigned long end)
|
|
|
{
|
|
@@ -178,7 +169,7 @@ pte_t * __meminit vmemmap_pte_populate(pmd_t *pmd, unsigned long addr, int node)
|
|
|
pte_t *pte = pte_offset_kernel(pmd, addr);
|
|
|
if (pte_none(*pte)) {
|
|
|
pte_t entry;
|
|
|
- void *p = alloc_block_buf(PAGE_SIZE, node);
|
|
|
+ void *p = vmemmap_alloc_block_buf(PAGE_SIZE, node);
|
|
|
if (!p)
|
|
|
return NULL;
|
|
|
entry = pfn_pte(__pa(p) >> PAGE_SHIFT, PAGE_KERNEL);
|