|
@@ -2414,6 +2414,19 @@ static void age_active_anon(struct zone *zone, struct scan_control *sc)
|
|
|
} while (memcg);
|
|
|
}
|
|
|
|
|
|
+static bool zone_balanced(struct zone *zone, int order,
|
|
|
+ unsigned long balance_gap, int classzone_idx)
|
|
|
+{
|
|
|
+ if (!zone_watermark_ok_safe(zone, order, high_wmark_pages(zone) +
|
|
|
+ balance_gap, classzone_idx, 0))
|
|
|
+ return false;
|
|
|
+
|
|
|
+ if (COMPACTION_BUILD && order && !compaction_suitable(zone, order))
|
|
|
+ return false;
|
|
|
+
|
|
|
+ return true;
|
|
|
+}
|
|
|
+
|
|
|
/*
|
|
|
* pgdat_balanced is used when checking if a node is balanced for high-order
|
|
|
* allocations. Only zones that meet watermarks and are in a zone allowed
|
|
@@ -2492,8 +2505,7 @@ static bool prepare_kswapd_sleep(pg_data_t *pgdat, int order, long remaining,
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
- if (!zone_watermark_ok_safe(zone, order, high_wmark_pages(zone),
|
|
|
- i, 0))
|
|
|
+ if (!zone_balanced(zone, order, 0, i))
|
|
|
all_zones_ok = false;
|
|
|
else
|
|
|
balanced += zone->present_pages;
|
|
@@ -2602,8 +2614,7 @@ loop_again:
|
|
|
break;
|
|
|
}
|
|
|
|
|
|
- if (!zone_watermark_ok_safe(zone, order,
|
|
|
- high_wmark_pages(zone), 0, 0)) {
|
|
|
+ if (!zone_balanced(zone, order, 0, 0)) {
|
|
|
end_zone = i;
|
|
|
break;
|
|
|
} else {
|
|
@@ -2679,9 +2690,8 @@ loop_again:
|
|
|
testorder = 0;
|
|
|
|
|
|
if ((buffer_heads_over_limit && is_highmem_idx(i)) ||
|
|
|
- !zone_watermark_ok_safe(zone, testorder,
|
|
|
- high_wmark_pages(zone) + balance_gap,
|
|
|
- end_zone, 0)) {
|
|
|
+ !zone_balanced(zone, testorder,
|
|
|
+ balance_gap, end_zone)) {
|
|
|
shrink_zone(zone, &sc);
|
|
|
|
|
|
reclaim_state->reclaimed_slab = 0;
|
|
@@ -2708,8 +2718,7 @@ loop_again:
|
|
|
continue;
|
|
|
}
|
|
|
|
|
|
- if (!zone_watermark_ok_safe(zone, testorder,
|
|
|
- high_wmark_pages(zone), end_zone, 0)) {
|
|
|
+ if (!zone_balanced(zone, testorder, 0, end_zone)) {
|
|
|
all_zones_ok = 0;
|
|
|
/*
|
|
|
* We are still under min water mark. This
|