|
@@ -136,7 +136,7 @@ static void slice_mask_for_free(struct mm_struct *mm, struct slice_mask *ret)
|
|
|
if (mm->task_size <= SLICE_LOW_TOP)
|
|
|
return;
|
|
|
|
|
|
- for (i = 0; i < SLICE_NUM_HIGH; i++)
|
|
|
+ for (i = 0; i < GET_HIGH_SLICE_INDEX(mm->context.addr_limit); i++)
|
|
|
if (!slice_high_has_vma(mm, i))
|
|
|
__set_bit(i, ret->high_slices);
|
|
|
}
|
|
@@ -157,7 +157,7 @@ static void slice_mask_for_size(struct mm_struct *mm, int psize, struct slice_ma
|
|
|
ret->low_slices |= 1u << i;
|
|
|
|
|
|
hpsizes = mm->context.high_slices_psize;
|
|
|
- for (i = 0; i < SLICE_NUM_HIGH; i++) {
|
|
|
+ for (i = 0; i < GET_HIGH_SLICE_INDEX(mm->context.addr_limit); i++) {
|
|
|
mask_index = i & 0x1;
|
|
|
index = i >> 1;
|
|
|
if (((hpsizes[index] >> (mask_index * 4)) & 0xf) == psize)
|
|
@@ -165,15 +165,17 @@ static void slice_mask_for_size(struct mm_struct *mm, int psize, struct slice_ma
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-static int slice_check_fit(struct slice_mask mask, struct slice_mask available)
|
|
|
+static int slice_check_fit(struct mm_struct *mm,
|
|
|
+ struct slice_mask mask, struct slice_mask available)
|
|
|
{
|
|
|
DECLARE_BITMAP(result, SLICE_NUM_HIGH);
|
|
|
+ unsigned long slice_count = GET_HIGH_SLICE_INDEX(mm->context.addr_limit);
|
|
|
|
|
|
bitmap_and(result, mask.high_slices,
|
|
|
- available.high_slices, SLICE_NUM_HIGH);
|
|
|
+ available.high_slices, slice_count);
|
|
|
|
|
|
return (mask.low_slices & available.low_slices) == mask.low_slices &&
|
|
|
- bitmap_equal(result, mask.high_slices, SLICE_NUM_HIGH);
|
|
|
+ bitmap_equal(result, mask.high_slices, slice_count);
|
|
|
}
|
|
|
|
|
|
static void slice_flush_segments(void *parm)
|
|
@@ -217,7 +219,7 @@ static void slice_convert(struct mm_struct *mm, struct slice_mask mask, int psiz
|
|
|
mm->context.low_slices_psize = lpsizes;
|
|
|
|
|
|
hpsizes = mm->context.high_slices_psize;
|
|
|
- for (i = 0; i < SLICE_NUM_HIGH; i++) {
|
|
|
+ for (i = 0; i < GET_HIGH_SLICE_INDEX(mm->context.addr_limit); i++) {
|
|
|
mask_index = i & 0x1;
|
|
|
index = i >> 1;
|
|
|
if (test_bit(i, mask.high_slices))
|
|
@@ -484,7 +486,7 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
|
|
|
/* Check if we fit in the good mask. If we do, we just return,
|
|
|
* nothing else to do
|
|
|
*/
|
|
|
- if (slice_check_fit(mask, good_mask)) {
|
|
|
+ if (slice_check_fit(mm, mask, good_mask)) {
|
|
|
slice_dbg(" fits good !\n");
|
|
|
return addr;
|
|
|
}
|
|
@@ -509,7 +511,7 @@ unsigned long slice_get_unmapped_area(unsigned long addr, unsigned long len,
|
|
|
slice_or_mask(&potential_mask, &good_mask);
|
|
|
slice_print_mask(" potential", potential_mask);
|
|
|
|
|
|
- if ((addr != 0 || fixed) && slice_check_fit(mask, potential_mask)) {
|
|
|
+ if ((addr != 0 || fixed) && slice_check_fit(mm, mask, potential_mask)) {
|
|
|
slice_dbg(" fits potential !\n");
|
|
|
goto convert;
|
|
|
}
|
|
@@ -734,6 +736,6 @@ int is_hugepage_only_range(struct mm_struct *mm, unsigned long addr,
|
|
|
slice_print_mask(" mask", mask);
|
|
|
slice_print_mask(" available", available);
|
|
|
#endif
|
|
|
- return !slice_check_fit(mask, available);
|
|
|
+ return !slice_check_fit(mm, mask, available);
|
|
|
}
|
|
|
#endif
|