|
@@ -111,13 +111,13 @@ static inline int slob_page_free(struct page *sp)
|
|
|
|
|
|
static void set_slob_page_free(struct page *sp, struct list_head *list)
|
|
|
{
|
|
|
- list_add(&sp->list, list);
|
|
|
+ list_add(&sp->lru, list);
|
|
|
__SetPageSlobFree(sp);
|
|
|
}
|
|
|
|
|
|
static inline void clear_slob_page_free(struct page *sp)
|
|
|
{
|
|
|
- list_del(&sp->list);
|
|
|
+ list_del(&sp->lru);
|
|
|
__ClearPageSlobFree(sp);
|
|
|
}
|
|
|
|
|
@@ -282,7 +282,7 @@ static void *slob_alloc(size_t size, gfp_t gfp, int align, int node)
|
|
|
|
|
|
spin_lock_irqsave(&slob_lock, flags);
|
|
|
/* Iterate through each partially free page, try to find room */
|
|
|
- list_for_each_entry(sp, slob_list, list) {
|
|
|
+ list_for_each_entry(sp, slob_list, lru) {
|
|
|
#ifdef CONFIG_NUMA
|
|
|
/*
|
|
|
* If there's a node specification, search for a partial
|
|
@@ -296,7 +296,7 @@ static void *slob_alloc(size_t size, gfp_t gfp, int align, int node)
|
|
|
continue;
|
|
|
|
|
|
/* Attempt to alloc */
|
|
|
- prev = sp->list.prev;
|
|
|
+ prev = sp->lru.prev;
|
|
|
b = slob_page_alloc(sp, size, align);
|
|
|
if (!b)
|
|
|
continue;
|
|
@@ -322,7 +322,7 @@ static void *slob_alloc(size_t size, gfp_t gfp, int align, int node)
|
|
|
spin_lock_irqsave(&slob_lock, flags);
|
|
|
sp->units = SLOB_UNITS(PAGE_SIZE);
|
|
|
sp->freelist = b;
|
|
|
- INIT_LIST_HEAD(&sp->list);
|
|
|
+ INIT_LIST_HEAD(&sp->lru);
|
|
|
set_slob(b, SLOB_UNITS(PAGE_SIZE), b + SLOB_UNITS(PAGE_SIZE));
|
|
|
set_slob_page_free(sp, slob_list);
|
|
|
b = slob_page_alloc(sp, size, align);
|