|
@@ -1717,7 +1717,8 @@ i915_gem_object_put_pages(struct drm_i915_gem_object *obj)
|
|
|
}
|
|
|
|
|
|
static long
|
|
|
-i915_gem_purge(struct drm_i915_private *dev_priv, long target)
|
|
|
+__i915_gem_shrink(struct drm_i915_private *dev_priv, long target,
|
|
|
+ bool purgeable_only)
|
|
|
{
|
|
|
struct drm_i915_gem_object *obj, *next;
|
|
|
long count = 0;
|
|
@@ -1725,7 +1726,7 @@ i915_gem_purge(struct drm_i915_private *dev_priv, long target)
|
|
|
list_for_each_entry_safe(obj, next,
|
|
|
&dev_priv->mm.unbound_list,
|
|
|
gtt_list) {
|
|
|
- if (i915_gem_object_is_purgeable(obj) &&
|
|
|
+ if ((i915_gem_object_is_purgeable(obj) || !purgeable_only) &&
|
|
|
i915_gem_object_put_pages(obj) == 0) {
|
|
|
count += obj->base.size >> PAGE_SHIFT;
|
|
|
if (count >= target)
|
|
@@ -1736,7 +1737,7 @@ i915_gem_purge(struct drm_i915_private *dev_priv, long target)
|
|
|
list_for_each_entry_safe(obj, next,
|
|
|
&dev_priv->mm.inactive_list,
|
|
|
mm_list) {
|
|
|
- if (i915_gem_object_is_purgeable(obj) &&
|
|
|
+ if ((i915_gem_object_is_purgeable(obj) || !purgeable_only) &&
|
|
|
i915_gem_object_unbind(obj) == 0 &&
|
|
|
i915_gem_object_put_pages(obj) == 0) {
|
|
|
count += obj->base.size >> PAGE_SHIFT;
|
|
@@ -1748,6 +1749,12 @@ i915_gem_purge(struct drm_i915_private *dev_priv, long target)
|
|
|
return count;
|
|
|
}
|
|
|
|
|
|
+static long
|
|
|
+i915_gem_purge(struct drm_i915_private *dev_priv, long target)
|
|
|
+{
|
|
|
+ return __i915_gem_shrink(dev_priv, target, true);
|
|
|
+}
|
|
|
+
|
|
|
static void
|
|
|
i915_gem_shrink_all(struct drm_i915_private *dev_priv)
|
|
|
{
|
|
@@ -4395,6 +4402,9 @@ i915_gem_inactive_shrink(struct shrinker *shrinker, struct shrink_control *sc)
|
|
|
|
|
|
if (nr_to_scan) {
|
|
|
nr_to_scan -= i915_gem_purge(dev_priv, nr_to_scan);
|
|
|
+ if (nr_to_scan > 0)
|
|
|
+ nr_to_scan -= __i915_gem_shrink(dev_priv, nr_to_scan,
|
|
|
+ false);
|
|
|
if (nr_to_scan > 0)
|
|
|
i915_gem_shrink_all(dev_priv);
|
|
|
}
|
|
@@ -4403,7 +4413,7 @@ i915_gem_inactive_shrink(struct shrinker *shrinker, struct shrink_control *sc)
|
|
|
list_for_each_entry(obj, &dev_priv->mm.unbound_list, gtt_list)
|
|
|
if (obj->pages_pin_count == 0)
|
|
|
cnt += obj->base.size >> PAGE_SHIFT;
|
|
|
- list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list)
|
|
|
+ list_for_each_entry(obj, &dev_priv->mm.inactive_list, gtt_list)
|
|
|
if (obj->pin_count == 0 && obj->pages_pin_count == 0)
|
|
|
cnt += obj->base.size >> PAGE_SHIFT;
|
|
|
|