|
@@ -35,6 +35,7 @@
|
|
|
#include <linux/swap.h>
|
|
|
#include <linux/pci.h>
|
|
|
|
|
|
+static uint32_t i915_gem_get_gtt_alignment(struct drm_gem_object *obj);
|
|
|
static int i915_gem_object_flush_gpu_write_domain(struct drm_gem_object *obj);
|
|
|
static void i915_gem_object_flush_gtt_write_domain(struct drm_gem_object *obj);
|
|
|
static void i915_gem_object_flush_cpu_write_domain(struct drm_gem_object *obj);
|
|
@@ -48,7 +49,8 @@ static int i915_gem_object_wait_rendering(struct drm_gem_object *obj);
|
|
|
static int i915_gem_object_bind_to_gtt(struct drm_gem_object *obj,
|
|
|
unsigned alignment);
|
|
|
static void i915_gem_clear_fence_reg(struct drm_gem_object *obj);
|
|
|
-static int i915_gem_evict_something(struct drm_device *dev, int min_size);
|
|
|
+static int i915_gem_evict_something(struct drm_device *dev, int min_size,
|
|
|
+ unsigned alignment);
|
|
|
static int i915_gem_evict_from_inactive_list(struct drm_device *dev);
|
|
|
static int i915_gem_phys_pwrite(struct drm_device *dev, struct drm_gem_object *obj,
|
|
|
struct drm_i915_gem_pwrite *args,
|
|
@@ -313,7 +315,8 @@ i915_gem_object_get_pages_or_evict(struct drm_gem_object *obj)
|
|
|
if (ret == -ENOMEM) {
|
|
|
struct drm_device *dev = obj->dev;
|
|
|
|
|
|
- ret = i915_gem_evict_something(dev, obj->size);
|
|
|
+ ret = i915_gem_evict_something(dev, obj->size,
|
|
|
+ i915_gem_get_gtt_alignment(obj));
|
|
|
if (ret)
|
|
|
return ret;
|
|
|
|
|
@@ -2005,10 +2008,12 @@ i915_gem_object_unbind(struct drm_gem_object *obj)
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
-static struct drm_gem_object *
|
|
|
-i915_gem_find_inactive_object(struct drm_device *dev, int min_size)
|
|
|
+static int
|
|
|
+i915_gem_scan_inactive_list_and_evict(struct drm_device *dev, int min_size,
|
|
|
+ unsigned alignment, int *found)
|
|
|
{
|
|
|
drm_i915_private_t *dev_priv = dev->dev_private;
|
|
|
+ struct drm_gem_object *obj;
|
|
|
struct drm_i915_gem_object *obj_priv;
|
|
|
struct drm_gem_object *best = NULL;
|
|
|
struct drm_gem_object *first = NULL;
|
|
@@ -2022,14 +2027,31 @@ i915_gem_find_inactive_object(struct drm_device *dev, int min_size)
|
|
|
(!best || obj->size < best->size)) {
|
|
|
best = obj;
|
|
|
if (best->size == min_size)
|
|
|
- return best;
|
|
|
+ break;
|
|
|
}
|
|
|
if (!first)
|
|
|
first = obj;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- return best ? best : first;
|
|
|
+ obj = best ? best : first;
|
|
|
+
|
|
|
+ if (!obj) {
|
|
|
+ *found = 0;
|
|
|
+ return 0;
|
|
|
+ }
|
|
|
+
|
|
|
+ *found = 1;
|
|
|
+
|
|
|
+#if WATCH_LRU
|
|
|
+ DRM_INFO("%s: evicting %p\n", __func__, obj);
|
|
|
+#endif
|
|
|
+ obj_priv = to_intel_bo(obj);
|
|
|
+ BUG_ON(obj_priv->pin_count != 0);
|
|
|
+ BUG_ON(obj_priv->active);
|
|
|
+
|
|
|
+ /* Wait on the rendering and unbind the buffer. */
|
|
|
+ return i915_gem_object_unbind(obj);
|
|
|
}
|
|
|
|
|
|
static int
|
|
@@ -2115,11 +2137,11 @@ i915_gem_evict_everything(struct drm_device *dev)
|
|
|
}
|
|
|
|
|
|
static int
|
|
|
-i915_gem_evict_something(struct drm_device *dev, int min_size)
|
|
|
+i915_gem_evict_something(struct drm_device *dev,
|
|
|
+ int min_size, unsigned alignment)
|
|
|
{
|
|
|
drm_i915_private_t *dev_priv = dev->dev_private;
|
|
|
- struct drm_gem_object *obj;
|
|
|
- int ret;
|
|
|
+ int ret, found;
|
|
|
|
|
|
struct intel_ring_buffer *render_ring = &dev_priv->render_ring;
|
|
|
struct intel_ring_buffer *bsd_ring = &dev_priv->bsd_ring;
|
|
@@ -2129,20 +2151,11 @@ i915_gem_evict_something(struct drm_device *dev, int min_size)
|
|
|
/* If there's an inactive buffer available now, grab it
|
|
|
* and be done.
|
|
|
*/
|
|
|
- obj = i915_gem_find_inactive_object(dev, min_size);
|
|
|
- if (obj) {
|
|
|
- struct drm_i915_gem_object *obj_priv;
|
|
|
-
|
|
|
-#if WATCH_LRU
|
|
|
- DRM_INFO("%s: evicting %p\n", __func__, obj);
|
|
|
-#endif
|
|
|
- obj_priv = to_intel_bo(obj);
|
|
|
- BUG_ON(obj_priv->pin_count != 0);
|
|
|
- BUG_ON(obj_priv->active);
|
|
|
-
|
|
|
- /* Wait on the rendering and unbind the buffer. */
|
|
|
- return i915_gem_object_unbind(obj);
|
|
|
- }
|
|
|
+ ret = i915_gem_scan_inactive_list_and_evict(dev, min_size,
|
|
|
+ alignment,
|
|
|
+ &found);
|
|
|
+ if (found)
|
|
|
+ return ret;
|
|
|
|
|
|
/* If we didn't get anything, but the ring is still processing
|
|
|
* things, wait for the next to finish and hopefully leave us
|
|
@@ -2184,6 +2197,7 @@ i915_gem_evict_something(struct drm_device *dev, int min_size)
|
|
|
* will get moved to inactive.
|
|
|
*/
|
|
|
if (!list_empty(&dev_priv->mm.flushing_list)) {
|
|
|
+ struct drm_gem_object *obj = NULL;
|
|
|
struct drm_i915_gem_object *obj_priv;
|
|
|
|
|
|
/* Find an object that we can immediately reuse */
|
|
@@ -2661,7 +2675,7 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
|
|
|
#if WATCH_LRU
|
|
|
DRM_INFO("%s: GTT full, evicting something\n", __func__);
|
|
|
#endif
|
|
|
- ret = i915_gem_evict_something(dev, obj->size);
|
|
|
+ ret = i915_gem_evict_something(dev, obj->size, alignment);
|
|
|
if (ret)
|
|
|
return ret;
|
|
|
|
|
@@ -2679,7 +2693,8 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
|
|
|
|
|
|
if (ret == -ENOMEM) {
|
|
|
/* first try to clear up some space from the GTT */
|
|
|
- ret = i915_gem_evict_something(dev, obj->size);
|
|
|
+ ret = i915_gem_evict_something(dev, obj->size,
|
|
|
+ alignment);
|
|
|
if (ret) {
|
|
|
/* now try to shrink everyone else */
|
|
|
if (gfpmask) {
|
|
@@ -2709,7 +2724,7 @@ i915_gem_object_bind_to_gtt(struct drm_gem_object *obj, unsigned alignment)
|
|
|
drm_mm_put_block(obj_priv->gtt_space);
|
|
|
obj_priv->gtt_space = NULL;
|
|
|
|
|
|
- ret = i915_gem_evict_something(dev, obj->size);
|
|
|
+ ret = i915_gem_evict_something(dev, obj->size, alignment);
|
|
|
if (ret)
|
|
|
return ret;
|
|
|
|