Lines Matching refs:obj

62 eb_add_object(struct eb_objects *eb, struct drm_i915_gem_object *obj)
78 static inline int use_cpu_reloc(struct drm_i915_gem_object *obj)
80 return (obj->base.write_domain == I915_GEM_DOMAIN_CPU ||
81 !obj->map_and_fenceable ||
82 obj->cache_level != I915_CACHE_NONE);
86 i915_gem_execbuffer_relocate_entry(struct drm_i915_gem_object *obj,
90 struct drm_device *dev = obj->base.dev;
119 "obj %p target %d offset %d "
121 obj, reloc->target_handle,
130 "obj %p target %d offset %d "
132 obj, reloc->target_handle,
149 if (reloc->offset > obj->base.size - 4) {
151 "obj %p target %d offset %d size %d.\n",
152 obj, reloc->target_handle,
154 (int) obj->base.size);
159 "obj %p target %d offset %d.\n",
160 obj, reloc->target_handle,
165 ret = i915_gem_object_set_to_gtt_domain(obj, true);
169 ret = i915_gem_object_put_fence(obj);
178 reloc_entry = (uint32_t *)(uintptr_t)(obj->page_list[reloc_base/PAGE_SIZE] + reloc_offset);
194 i915_gem_execbuffer_relocate_object(struct drm_i915_gem_object *obj,
200 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
219 ret = i915_gem_execbuffer_relocate_entry(obj, eb, r);
240 i915_gem_execbuffer_relocate_object_slow(struct drm_i915_gem_object *obj,
244 const struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
248 ret = i915_gem_execbuffer_relocate_entry(obj, eb, &relocs[i]);
261 struct drm_i915_gem_object *obj;
264 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
265 ret = i915_gem_execbuffer_relocate_object(obj, eb);
276 need_reloc_mappable(struct drm_i915_gem_object *obj)
278 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
279 return entry->relocation_count && !use_cpu_reloc(obj);
283 i915_gem_execbuffer_reserve_object(struct drm_i915_gem_object *obj,
287 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
288 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
296 obj->tiling_mode != I915_TILING_NONE;
300 need_fence = obj->tiling_mode != I915_TILING_NONE;
302 need_mappable = need_fence || need_reloc_mappable(obj);
304 ret = i915_gem_object_pin(obj, entry->alignment, need_mappable, false);
312 ret = i915_gem_object_get_fence(obj);
316 if (i915_gem_object_pin_fence(obj))
319 obj->pending_fenced_gpu_access = true;
324 if (dev_priv->mm.aliasing_ppgtt && !obj->has_aliasing_ppgtt_mapping) {
326 obj, obj->cache_level);
328 obj->has_aliasing_ppgtt_mapping = 1;
331 if (entry->offset != obj->gtt_offset) {
332 entry->offset = obj->gtt_offset;
337 obj->base.pending_read_domains = I915_GEM_DOMAIN_RENDER;
338 obj->base.pending_write_domain = I915_GEM_DOMAIN_RENDER;
342 !obj->has_global_gtt_mapping)
343 i915_gem_gtt_bind_object(obj, obj->cache_level);
349 i915_gem_execbuffer_unreserve_object(struct drm_i915_gem_object *obj)
353 if (!obj->gtt_space)
356 entry = obj->exec_entry;
359 i915_gem_object_unpin_fence(obj);
362 i915_gem_object_unpin(obj);
373 struct drm_i915_gem_object *obj;
388 obj = list_first_entry(objects,
391 entry = obj->exec_entry;
396 obj->tiling_mode != I915_TILING_NONE;
400 need_fence = obj->tiling_mode != I915_TILING_NONE;
401 need_mappable = need_fence || need_reloc_mappable(obj);
404 list_move(&obj->exec_list, &ordered_objects, (caddr_t)obj);
406 list_move_tail(&obj->exec_list, &ordered_objects, (caddr_t)obj);
408 obj->base.pending_read_domains = I915_GEM_GPU_DOMAINS & ~I915_GEM_DOMAIN_COMMAND;
409 obj->base.pending_write_domain = 0;
410 obj->pending_fenced_gpu_access = false;
412 if (IS_GEN5(ring->dev) && (batch_obj != obj) && obj->gtt_offset) {
414 if (obj->tiling_mode != I915_TILING_NONE)
415 err = i915_gem_object_get_fence(obj);
417 err = i915_gem_object_put_fence(obj);
419 DRM_ERROR("failed to get obj 0x%p fence %d", obj, err);
420 obj->pending_fenced_gpu_access = true;
444 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
445 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
447 if (!obj->gtt_space)
452 obj->tiling_mode != I915_TILING_NONE;
456 need_fence = obj->tiling_mode != I915_TILING_NONE;
457 need_mappable = need_fence || need_reloc_mappable(obj);
460 if ((entry->alignment && obj->gtt_offset & (entry->alignment - 1)) ||
461 (need_mappable && !obj->map_and_fenceable))
462 ret = i915_gem_object_unbind(obj, 1);
464 ret = i915_gem_execbuffer_reserve_object(obj, ring, need_relocs);
470 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
471 if (obj->gtt_space)
474 ret = i915_gem_execbuffer_reserve_object(obj, ring, need_relocs);
480 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list)
481 i915_gem_execbuffer_unreserve_object(obj);
504 struct drm_i915_gem_object *obj;
511 obj = list_first_entry(objects,
514 list_del_init(&obj->exec_list);
515 drm_gem_object_unreference(&obj->base);
559 obj = to_intel_bo(drm_gem_object_lookup(dev, file,
561 if (&obj->base == NULL) {
568 list_add_tail(&obj->exec_list, objects, (caddr_t)obj);
569 obj->exec_handle = exec[i].handle;
570 obj->exec_entry = &exec[i];
571 eb_add_object(eb, obj);
580 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
581 int offset = obj->exec_entry - exec;
582 ret = i915_gem_execbuffer_relocate_object_slow(obj, eb,
604 struct drm_i915_gem_object *obj;
608 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
609 ret = i915_gem_object_sync(obj, ring);
613 if (obj->base.write_domain & I915_GEM_DOMAIN_CPU)
614 i915_gem_clflush_object(obj);
616 flush_domains |= obj->base.write_domain;
682 struct drm_i915_gem_object *obj;
684 list_for_each_entry(obj, struct drm_i915_gem_object, objects, exec_list) {
685 obj->base.write_domain = obj->base.pending_write_domain;
686 if (obj->base.write_domain == 0)
687 obj->base.pending_read_domains |= obj->base.read_domains;
688 obj->base.read_domains = obj->base.pending_read_domains;
689 obj->fenced_gpu_access = obj->pending_fenced_gpu_access;
691 i915_gem_object_move_to_active(obj, ring);
692 if (obj->base.write_domain) {
693 obj->dirty = 1;
694 obj->last_write_seqno = intel_ring_get_seqno(ring);
695 if (obj->pin_count) /* check for potential scanout */
696 intel_mark_fb_busy(obj, ring);
706 struct drm_i915_gem_object *obj)
712 (void)__i915_add_request(ring, file, obj, NULL);
909 struct drm_i915_gem_object *obj;
911 obj = to_intel_bo(drm_gem_object_lookup(dev, file,
913 if (&obj->base == NULL) {
921 if (!list_empty(&obj->exec_list)) {
923 obj, exec[i].handle, i);
928 list_add_tail(&obj->exec_list, &objects, (caddr_t)obj);
929 obj->exec_handle = exec[i].handle;
930 obj->exec_entry = &exec[i];
931 eb_add_object(eb, obj);
934 node->obj_list[i] = (caddr_t)obj;
935 TRACE_GEM_OBJ_HISTORY(obj, "prepare emit");
1037 struct drm_i915_gem_object *obj;
1039 obj = list_first_entry(&objects,
1042 TRACE_GEM_OBJ_HISTORY(obj, "finish emit");
1043 list_del_init(&obj->exec_list);
1044 drm_gem_object_unreference(&obj->base);