diff options
Diffstat (limited to 'drivers/gpu/drm/i915/gt/intel_gtt.c')
-rw-r--r-- | drivers/gpu/drm/i915/gt/intel_gtt.c | 36 |
1 files changed, 30 insertions, 6 deletions
diff --git a/drivers/gpu/drm/i915/gt/intel_gtt.c b/drivers/gpu/drm/i915/gt/intel_gtt.c index a94be0306464..a5f5b2dda332 100644 --- a/drivers/gpu/drm/i915/gt/intel_gtt.c +++ b/drivers/gpu/drm/i915/gt/intel_gtt.c @@ -10,9 +10,11 @@ #include <drm/drm_cache.h> +#include "gem/i915_gem_internal.h" #include "gem/i915_gem_lmem.h" #include "i915_trace.h" #include "intel_gt.h" +#include "intel_gt_regs.h" #include "intel_gtt.h" struct drm_i915_gem_object *alloc_pt_lmem(struct i915_address_space *vm, int sz) @@ -105,14 +107,19 @@ void __i915_vm_close(struct i915_address_space *vm) list_for_each_entry_safe(vma, vn, &vm->bound_list, vm_link) { struct drm_i915_gem_object *obj = vma->obj; - /* Keep the obj (and hence the vma) alive as _we_ destroy it */ - if (!kref_get_unless_zero(&obj->base.refcount)) + if (!kref_get_unless_zero(&obj->base.refcount)) { + /* + * Unbind the dying vma to ensure the bound_list + * is completely drained. We leave the destruction to + * the object destructor. + */ + atomic_and(~I915_VMA_PIN_MASK, &vma->flags); + WARN_ON(__i915_vma_unbind(vma)); continue; + } - atomic_and(~I915_VMA_PIN_MASK, &vma->flags); - WARN_ON(__i915_vma_unbind(vma)); - __i915_vma_put(vma); - + /* Keep the obj (and hence the vma) alive as _we_ destroy it */ + i915_vma_destroy_locked(vma); i915_gem_object_put(obj); } GEM_BUG_ON(!list_empty(&vm->bound_list)); @@ -161,6 +168,9 @@ static void __i915_vm_release(struct work_struct *work) struct i915_address_space *vm = container_of(work, struct i915_address_space, release_work); + /* Synchronize async unbinds. */ + i915_vma_resource_bind_dep_sync_all(vm); + vm->cleanup(vm); i915_address_space_fini(vm); @@ -189,6 +199,7 @@ void i915_address_space_init(struct i915_address_space *vm, int subclass) if (!kref_read(&vm->resv_ref)) kref_init(&vm->resv_ref); + vm->pending_unbind = RB_ROOT_CACHED; INIT_WORK(&vm->release_work, __i915_vm_release); atomic_set(&vm->open, 1); @@ -219,6 +230,19 @@ void i915_address_space_init(struct i915_address_space *vm, int subclass) GEM_BUG_ON(!vm->total); drm_mm_init(&vm->mm, 0, vm->total); + + memset64(vm->min_alignment, I915_GTT_MIN_ALIGNMENT, + ARRAY_SIZE(vm->min_alignment)); + + if (HAS_64K_PAGES(vm->i915) && NEEDS_COMPACT_PT(vm->i915) && + subclass == VM_CLASS_PPGTT) { + vm->min_alignment[INTEL_MEMORY_LOCAL] = I915_GTT_PAGE_SIZE_2M; + vm->min_alignment[INTEL_MEMORY_STOLEN_LOCAL] = I915_GTT_PAGE_SIZE_2M; + } else if (HAS_64K_PAGES(vm->i915)) { + vm->min_alignment[INTEL_MEMORY_LOCAL] = I915_GTT_PAGE_SIZE_64K; + vm->min_alignment[INTEL_MEMORY_STOLEN_LOCAL] = I915_GTT_PAGE_SIZE_64K; + } + vm->mm.head_node.color = I915_COLOR_UNEVICTABLE; INIT_LIST_HEAD(&vm->bound_list); |