Lines Matching refs:base

210 	ret = drm_gem_handle_create(file, &obj->base, &handle);
212 drm_gem_object_release(&obj->base);
213 i915_gem_info_remove_obj(dev->dev_private, obj->base.size);
219 drm_gem_object_unreference(&obj->base);
326 if (!(obj->base.read_domains & I915_GEM_DOMAIN_CPU)) {
387 obj->base.kaddr + args->offset,
417 if (&obj->base == NULL) {
423 if (args->offset > obj->base.size ||
424 args->size > obj->base.size - args->offset) {
437 drm_gem_object_unreference(&obj->base);
465 ret = DRM_COPY_FROM_USER(obj->base.kaddr + args->offset, user_data, args->size);
506 if (obj->base.write_domain != I915_GEM_DOMAIN_CPU) {
521 if (!(obj->base.read_domains & I915_GEM_DOMAIN_CPU)
570 ret = DRM_COPY_FROM_USER(obj->base.kaddr + args->offset,
605 if (&obj->base == NULL) {
611 if (args->offset > obj->base.size ||
612 args->size > obj->base.size - args->offset) {
635 obj->base.write_domain != I915_GEM_DOMAIN_CPU) {
650 drm_gem_object_unreference(&obj->base);
835 obj->base.write_domain &= ~I915_GEM_GPU_DOMAINS;
870 struct drm_device *dev = obj->base.dev;
934 if (&obj->base == NULL) {
961 drm_gem_object_unreference(&obj->base);
983 if (&obj->base == NULL) {
992 drm_gem_object_unreference(&obj->base);
1109 struct ddi_umem_cookie *umem_cookie = obj->base.maplist.map->umem_cookie;
1112 if (obj->base.gtt_map_kaddr == NULL) {
1113 ret = drm_gem_create_mmap_offset(&obj->base);
1120 umem_cookie->cvaddr = obj->base.gtt_map_kaddr;
1125 obj->mmap_offset = obj->base.maplist.user_token;
1126 obj->base.maplist.map->callback = 1;
1148 struct drm_device *dev = obj->base.dev;
1151 if (obj->base.maplist.map->gtt_mmap) {
1153 if (!list_empty(&obj->base.seg_list)) {
1154 list_for_each_entry_safe(entry, temp, struct gem_map_list, &obj->base.seg_list, head) {
1161 drm_gem_release_mmap(&obj->base);
1162 obj->base.maplist.map->gtt_mmap = 0;
1169 drm_gem_free_mmap_offset(&obj->base);
1236 if (&obj->base == NULL) {
1241 if (obj->base.size > dev_priv->gtt.mappable_end) {
1255 drm_gem_object_unreference(&obj->base);
1296 obj->base.read_domains = obj->base.write_domain = I915_GEM_DOMAIN_CPU;
1304 btop(obj->base.size) * sizeof(caddr_t));
1331 pgcnt_t np = btop(obj->base.size);
1341 for (i = 0, va = obj->base.kaddr; i < np; i++, va += PAGESIZE) {
1361 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
1382 struct drm_device *dev = obj->base.dev;
1395 drm_gem_object_reference(&obj->base);
1421 struct drm_device *dev = obj->base.dev;
1424 BUG_ON(obj->base.write_domain & ~I915_GEM_GPU_DOMAINS);
1434 obj->base.write_domain = 0;
1441 drm_gem_object_unreference(&obj->base);
1630 acthd < obj->gtt_offset + obj->base.size)
1797 obj->base.read_domains &= ~I915_GEM_GPU_DOMAINS;
1985 if (&obj->base == NULL) {
2011 drm_gem_object_unreference(&obj->base);
2022 drm_gem_object_unreference(&obj->base);
2050 if (to == NULL || !i915_semaphore_is_enabled(obj->base.dev))
2080 if ((obj->base.read_domains & I915_GEM_DOMAIN_GTT) == 0)
2084 obj->base.read_domains &= ~I915_GEM_DOMAIN_GTT;
2085 obj->base.write_domain &= ~I915_GEM_DOMAIN_GTT;
2094 drm_i915_private_t *dev_priv = obj->base.dev->dev_private;
2292 return obj && obj->base.read_domains & I915_GEM_DOMAIN_GTT;
2337 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
2340 i915_gem_write_fence(obj->base.dev, reg, enable ? obj : NULL);
2372 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
2440 struct drm_device *dev = obj->base.dev;
2564 struct drm_device *dev = obj->base.dev;
2574 obj->base.size,
2577 obj->base.size,
2581 obj->base.size,
2592 size = map_and_fenceable ? fence_size : obj->base.size;
2597 if (obj->base.size > gtt_max) {
2599 obj->base.size,
2659 obj->gtt_offset + obj->base.size <= dev_priv->gtt.mappable_end;
2696 drm_clflush_pages(obj->page_list, obj->base.size / PAGE_SIZE);
2704 if (obj->base.write_domain != I915_GEM_DOMAIN_GTT)
2717 obj->base.write_domain = 0;
2724 struct drm_device *dev = obj->base.dev;
2726 if (obj->base.write_domain != I915_GEM_DOMAIN_CPU)
2731 obj->base.write_domain = 0;
2743 drm_i915_private_t *dev_priv = obj->base.dev->dev_private;
2750 if (obj->base.write_domain == I915_GEM_DOMAIN_GTT)
2763 if ((obj->base.read_domains & I915_GEM_DOMAIN_GTT) == 0)
2770 // BUG_ON((obj->base.write_domain & ~I915_GEM_DOMAIN_GTT) != 0);
2771 obj->base.read_domains |= I915_GEM_DOMAIN_GTT;
2773 obj->base.read_domains = I915_GEM_DOMAIN_GTT;
2774 obj->base.write_domain = I915_GEM_DOMAIN_GTT;
2788 struct drm_device *dev = obj->base.dev;
2839 WARN_ON(obj->base.write_domain & ~I915_GEM_DOMAIN_CPU);
2840 WARN_ON(obj->base.read_domains & ~I915_GEM_DOMAIN_CPU);
2842 obj->base.read_domains = I915_GEM_DOMAIN_CPU;
2843 obj->base.write_domain = I915_GEM_DOMAIN_CPU;
2862 if (&obj->base == NULL) {
2869 drm_gem_object_unreference(&obj->base);
2898 if (&obj->base == NULL) {
2905 drm_gem_object_unreference(&obj->base);
2954 old_write_domain = obj->base.write_domain;
2955 old_read_domains = obj->base.read_domains;
2960 obj->base.write_domain = 0;
2961 obj->base.read_domains |= I915_GEM_DOMAIN_GTT;
2971 if ((obj->base.read_domains & I915_GEM_GPU_DOMAINS) == 0)
2979 obj->base.read_domains &= ~I915_GEM_GPU_DOMAINS;
2996 if (obj->base.write_domain == I915_GEM_DOMAIN_CPU)
3005 old_write_domain = obj->base.write_domain;
3006 old_read_domains = obj->base.read_domains;
3009 if ((obj->base.read_domains & I915_GEM_DOMAIN_CPU) == 0) {
3012 obj->base.read_domains |= I915_GEM_DOMAIN_CPU;
3018 BUG_ON((obj->base.write_domain & ~I915_GEM_DOMAIN_CPU) != 0);
3024 obj->base.read_domains = I915_GEM_DOMAIN_CPU;
3025 obj->base.write_domain = I915_GEM_DOMAIN_CPU;
3108 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
3152 if (&obj->base == NULL) {
3178 drm_gem_object_unreference(&obj->base);
3197 if (&obj->base == NULL) {
3215 drm_gem_object_unreference(&obj->base);
3234 if (&obj->base == NULL) {
3251 drm_gem_object_unreference(&obj->base);
3290 i915_gem_info_add_obj(obj->base.dev->dev_private, obj->base.size);
3313 if (drm_gem_object_init(dev, &obj->base, size, gen) != 0) {
3322 obj->base.write_domain = I915_GEM_DOMAIN_CPU;
3323 obj->base.read_domains = I915_GEM_DOMAIN_CPU;
3354 struct drm_device *dev = obj->base.dev;
3384 // if (obj->base.import_attach)
3385 // drm_prime_gem_destroy(&obj->base, NULL);
3387 i915_gem_info_remove_obj(dev_priv, obj->base.size);
3390 kfree(obj->bit_17, sizeof(BITS_TO_LONGS(obj->base.size >> PAGE_SHIFT) * sizeof(long)));
3391 drm_gem_object_release(&obj->base);
3631 (void) memcpy(dev_priv->fbcon_obj->base.kaddr, dev->old_gtt, size);
3639 i915_gem_free_object(&dev_priv->fbcon_obj->base);
3866 page_count = obj->base.size / PAGE_SIZE;
3906 obj->base.size, align);
3908 DRM_ERROR("failed to init phys object %d size: %lu\n", id, obj->base.size);
3925 page_count = obj->base.size / PAGE_SIZE;