Lines Matching refs:dev

77 i915_gem_cleanup_ringbuffer(struct drm_device *dev);
84 drm_i915_private_t *dev_priv = dev->dev_private;
87 if (dev->driver->use_gem != 1)
93 spin_lock(&dev->struct_mutex);
98 spin_unlock(&dev->struct_mutex);
103 dev->gtt_total = (uint32_t) (args.gtt_end - args.gtt_start);
106 (unsigned long) args.gtt_start, dev->gtt_total);
107 DRM_DEBUG("i915_gem_init_ioctl dev->gtt_total %x, dev_priv->mm.gtt_space 0x%x gtt_start 0x%lx", dev->gtt_total, dev_priv->mm.gtt_space, args.gtt_start);
108 ASSERT(dev->gtt_total != 0);
110 spin_unlock(&dev->struct_mutex);
124 if (dev->driver->use_gem != 1)
127 args.aper_size = (uint64_t)dev->gtt_total;
129 atomic_read(&dev->pin_memory));
136 DRM_DEBUG("i915_gem_get_aaperture_ioctl called sizeof %d, aper_size 0x%x, aper_available_size 0x%x\n", sizeof(args), dev->gtt_total, args.aper_available_size);
154 if (dev->driver->use_gem != 1)
168 obj = drm_gem_object_alloc(dev, args.size);
175 spin_lock(&dev->struct_mutex);
177 spin_unlock(&dev->struct_mutex);
207 if (dev->driver->use_gem != 1)
228 spin_lock(&dev->struct_mutex);
233 spin_unlock(&dev->struct_mutex);
249 spin_unlock(&dev->struct_mutex);
256 i915_gem_gtt_pwrite(struct drm_device *dev, struct drm_gem_object *obj,
265 spin_lock(&dev->struct_mutex);
268 spin_unlock(&dev->struct_mutex);
288 spin_unlock(&dev->struct_mutex);
296 i915_gem_shmem_pwrite(struct drm_device *dev, struct drm_gem_object *obj,
319 if (dev->driver->use_gem != 1)
350 dev->gtt_total != 0)
351 ret = i915_gem_gtt_pwrite(dev, obj, &args, fpriv);
353 ret = i915_gem_shmem_pwrite(dev, obj, &args, fpriv);
376 if (dev->driver->use_gem != 1)
406 spin_lock(&dev->struct_mutex);
424 spin_unlock(&dev->struct_mutex);
443 if (dev->driver->use_gem != 1)
449 spin_lock(&dev->struct_mutex);
452 spin_unlock(&dev->struct_mutex);
467 spin_unlock(&dev->struct_mutex);
488 if (dev->driver->use_gem != 1)
499 ret = ddi_devmap_segmap(fpriv->dev, (off_t)obj->map->handle,
505 spin_lock(&dev->struct_mutex);
507 spin_unlock(&dev->struct_mutex);
534 struct drm_device *dev = obj->dev;
535 drm_i915_private_t *dev_priv = dev->dev_private;
552 struct drm_device *dev = obj->dev;
553 drm_i915_private_t *dev_priv = dev->dev_private;
563 struct drm_device *dev = obj->dev;
564 drm_i915_private_t *dev_priv = dev->dev_private;
589 i915_add_request(struct drm_device *dev, uint32_t flush_domains)
591 drm_i915_private_t *dev_priv = dev->dev_private;
610 DRM_DEBUG("add_request seqno = %d dev 0x%lx", seqno, dev);
654 worktimer_id = timeout(i915_gem_retire_work_handler, (void *) dev, DRM_HZ);
667 i915_retire_commands(struct drm_device *dev)
669 drm_i915_private_t *dev_priv = dev->dev_private;
675 if (IS_I965G(dev))
690 i915_gem_retire_request(struct drm_device *dev,
693 drm_i915_private_t *dev_priv = dev->dev_private;
734 i915_get_gem_seqno(struct drm_device *dev)
736 drm_i915_private_t *dev_priv = dev->dev_private;
745 i915_gem_retire_requests(struct drm_device *dev)
747 drm_i915_private_t *dev_priv = dev->dev_private;
750 seqno = i915_get_gem_seqno(dev);
760 i915_gem_retire_request(dev, request);
772 struct drm_device *dev = (struct drm_device *)device;
773 drm_i915_private_t *dev_priv = dev->dev_private;
775 spin_lock(&dev->struct_mutex);
779 spin_unlock(&dev->struct_mutex);
783 i915_gem_retire_requests(dev);
787 worktimer_id = timeout(i915_gem_retire_work_handler, (void *) dev, DRM_HZ);
789 spin_unlock(&dev->struct_mutex);
794 * @dev: drm device to reset
807 void i965_reset(struct drm_device *dev, u8 flags)
810 drm_i915_private_t *dev_priv = dev->dev_private;
815 i915_save_display(dev);
817 if (pci_config_setup(dev->dip, &conf_hdl) != DDI_SUCCESS) {
869 i915_kernel_lost_context(dev);
871 (void) drm_irq_install(dev);
878 i915_restore_display(dev);
886 i915_wait_request(struct drm_device *dev, uint32_t seqno)
888 drm_i915_private_t *dev_priv = dev->dev_private;
894 if (!i915_seqno_passed(i915_get_gem_seqno(dev), seqno)) {
895 if (IS_IGDNG(dev))
902 (void) i915_driver_irq_preinstall(dev);
903 i915_driver_irq_postinstall(dev);
907 i915_user_irq_on(dev);
909 (i915_seqno_passed(i915_get_gem_seqno(dev), seqno) ||
911 i915_user_irq_off(dev);
919 if (ret == -2 && (seqno > i915_get_gem_seqno(dev))) {
920 if (IS_I965G(dev)) {
921 DRM_ERROR("GPU hang detected try to reset ... wait for irq_queue seqno %d, now seqno %d", seqno, i915_get_gem_seqno(dev));
923 i965_reset(dev, GDRST_RENDER);
924 i915_gem_retire_requests(dev);
937 i915_gem_retire_requests(dev);
943 i915_gem_flush(struct drm_device *dev,
947 drm_i915_private_t *dev_priv = dev->dev_private;
955 drm_agp_chipset_flush(dev);
991 if (!IS_I965G(dev)) {
1018 struct drm_device *dev = obj->dev;
1043 ret = i915_wait_request(dev, seqno);
1045 DRM_ERROR("%s: i915_wait_request request->seqno %d now %d\n", __func__, seqno, i915_get_gem_seqno(dev));
1059 struct drm_device *dev = obj->dev;
1092 (void) drm_agp_unbind_pages(dev, obj->size / PAGE_SIZE,
1102 atomic_dec(&dev->gtt_count);
1103 atomic_sub(obj->size, &dev->gtt_memory);
1116 i915_gem_evict_something(struct drm_device *dev)
1118 drm_i915_private_t *dev_priv = dev->dev_private;
1150 ret = i915_wait_request(dev, request->seqno);
1175 i915_gem_flush(dev,
1178 (void) i915_add_request(dev, obj->write_domain);
1198 i915_gem_evict_everything(struct drm_device *dev)
1203 ret = i915_gem_evict_something(dev);
1220 struct drm_device *dev = obj->dev;
1221 drm_i915_private_t *dev_priv = dev->dev_private;
1261 ret = i915_gem_evict_something(dev);
1284 obj_priv->agp_mem = drm_agp_bind_pages(dev,
1295 atomic_inc(&dev->gtt_count);
1296 atomic_add(obj->size, &dev->gtt_memory);
1327 struct drm_device *dev = obj->dev;
1334 i915_gem_flush(dev, 0, obj->write_domain);
1335 seqno = i915_add_request(dev, obj->write_domain);
1359 struct drm_device *dev = obj->dev;
1365 drm_agp_chipset_flush(dev);
1422 struct drm_device *dev = obj->dev;
1443 drm_agp_chipset_flush(dev);
1579 struct drm_device *dev = obj->dev;
1622 dev->invalidate_domains |= invalidate_domains;
1623 dev->flush_domains |= flush_domains;
1628 dev->invalidate_domains, dev->flush_domains);
1641 struct drm_device *dev = obj->dev;
1657 drm_agp_chipset_flush(dev);
1900 i915_dispatch_gem_execbuffer(struct drm_device *dev,
1904 drm_i915_private_t *dev_priv = dev->dev_private;
1929 int ret = i915_emit_box(dev, boxes, i,
1936 if (IS_I830(dev) || IS_845G(dev)) {
1945 if (IS_I965G(dev)) {
1971 i915_gem_ring_throttle(struct drm_device *dev, struct drm_file *file_priv)
1977 spin_lock(&dev->struct_mutex);
1982 ret = i915_wait_request(dev, seqno);
1984 DRM_ERROR("%s: i915_wait_request request->seqno %d now %d\n", __func__, seqno, i915_get_gem_seqno(dev));
1986 spin_unlock(&dev->struct_mutex);
1995 drm_i915_private_t *dev_priv = dev->dev_private;
2007 if (dev->driver->use_gem != 1)
2041 spin_lock(&dev->struct_mutex);
2045 spin_unlock(&dev->struct_mutex);
2051 spin_unlock(&dev->struct_mutex);
2111 ret = i915_gem_evict_everything(dev);
2125 dev->invalidate_domains = 0;
2126 dev->flush_domains = 0;
2137 if (dev->invalidate_domains | dev->flush_domains) {
2141 dev->invalidate_domains,
2142 dev->flush_domains);
2143 i915_gem_flush(dev,
2144 dev->invalidate_domains,
2145 dev->flush_domains);
2146 if (dev->flush_domains) {
2147 (void) i915_add_request(dev, dev->flush_domains);
2161 ret = i915_dispatch_gem_execbuffer(dev, &args, exec_offset);
2171 flush_domains = i915_retire_commands(dev);
2180 seqno = i915_add_request(dev, flush_domains);
2202 spin_unlock(&dev->struct_mutex);
2228 struct drm_device *dev = obj->dev;
2245 atomic_inc(&dev->pin_count);
2246 atomic_add(obj->size, &dev->pin_memory);
2259 struct drm_device *dev = obj->dev;
2260 drm_i915_private_t *dev_priv = dev->dev_private;
2276 atomic_dec(&dev->pin_count);
2277 atomic_sub(obj->size, &dev->pin_memory);
2291 if (dev->driver->use_gem != 1)
2297 spin_lock(&dev->struct_mutex);
2303 spin_unlock(&dev->struct_mutex);
2313 spin_unlock(&dev->struct_mutex);
2323 spin_unlock(&dev->struct_mutex);
2339 spin_unlock(&dev->struct_mutex);
2353 if (dev->driver->use_gem != 1)
2359 spin_lock(&dev->struct_mutex);
2365 spin_unlock(&dev->struct_mutex);
2374 spin_unlock(&dev->struct_mutex);
2383 spin_unlock(&dev->struct_mutex);
2397 if (dev->driver->use_gem != 1)
2403 spin_lock(&dev->struct_mutex);
2408 spin_unlock(&dev->struct_mutex);
2428 spin_unlock(&dev->struct_mutex);
2438 if (dev->driver->use_gem != 1)
2441 return i915_gem_ring_throttle(dev, fpriv);
2508 i915_gem_evict_from_list(struct drm_device *dev, struct list_head *head, uint32_t type)
2522 spin_unlock(&dev->struct_mutex);
2530 spin_unlock(&dev->struct_mutex);
2540 i915_gem_idle(struct drm_device *dev, uint32_t type)
2542 drm_i915_private_t *dev_priv = dev->dev_private;
2546 spin_lock(&dev->struct_mutex);
2549 spin_unlock(&dev->struct_mutex);
2565 i915_kernel_lost_context(dev);
2569 i915_gem_flush(dev, ~(I915_GEM_DOMAIN_CPU|I915_GEM_DOMAIN_GTT),
2571 seqno = i915_add_request(dev, ~(I915_GEM_DOMAIN_CPU |
2574 spin_unlock(&dev->struct_mutex);
2582 cur_seqno = i915_get_gem_seqno(dev);
2598 i915_gem_retire_requests(dev);
2626 ret = i915_gem_evict_from_list(dev, &dev_priv->mm.inactive_list, type);
2629 spin_unlock(&dev->struct_mutex);
2633 i915_gem_cleanup_ringbuffer(dev);
2634 spin_unlock(&dev->struct_mutex);
2640 i915_gem_init_hws(struct drm_device *dev)
2642 drm_i915_private_t *dev_priv = dev->dev_private;
2650 if (!I915_NEED_GFX_HWS(dev))
2654 obj = drm_gem_object_alloc(dev, 4096);
2669 dev_priv->hws_map.offset = dev->agp->agp_info.agpi_aperbase + obj_priv->gtt_offset;
2675 drm_core_ioremap(&dev_priv->hws_map, dev);
2696 i915_gem_cleanup_hws(struct drm_device *dev)
2698 drm_i915_private_t *dev_priv = dev->dev_private;
2706 drm_core_ioremapfree(&dev_priv->hws_map, dev);
2719 i915_gem_init_ringbuffer(struct drm_device *dev)
2721 drm_i915_private_t *dev_priv = dev->dev_private;
2727 ret = i915_gem_init_hws(dev);
2730 obj = drm_gem_object_alloc(dev, 128 * 1024);
2733 i915_gem_cleanup_hws(dev);
2741 i915_gem_cleanup_hws(dev);
2749 dev_priv->ring.map.offset = dev->agp->agp_info.agpi_aperbase + obj_priv->gtt_offset;
2755 drm_core_ioremap(&dev_priv->ring.map, dev);
2760 i915_gem_cleanup_hws(dev);
2815 i915_kernel_lost_context(dev);
2821 i915_gem_cleanup_ringbuffer(struct drm_device *dev)
2823 drm_i915_private_t *dev_priv = dev->dev_private;
2828 drm_core_ioremapfree(&dev_priv->ring.map, dev);
2834 i915_gem_cleanup_hws(dev);
2842 drm_i915_private_t *dev_priv = dev->dev_private;
2845 if (dev->driver->use_gem != 1)
2853 dev_priv->mm.gtt_mapping.offset = dev->agp->agp_info.agpi_aperbase;
2854 dev_priv->mm.gtt_mapping.size = dev->agp->agp_info.agpi_apersize;
2859 drm_core_ioremap(&dev_priv->mm.gtt_mapping, dev);
2861 spin_lock(&dev->struct_mutex);
2863 ret = i915_gem_init_ringbuffer(dev);
2867 spin_unlock(&dev->struct_mutex);
2869 (void) drm_irq_install(dev);
2879 drm_i915_private_t *dev_priv = dev->dev_private;
2882 if (dev->driver->use_gem != 1)
2885 ret = i915_gem_idle(dev, 0);
2886 (void) drm_irq_uninstall(dev);
2888 drm_core_ioremapfree(&dev_priv->mm.gtt_mapping, dev);
2893 i915_gem_lastclose(struct drm_device *dev)
2895 drm_i915_private_t *dev_priv = dev->dev_private;
2898 ret = i915_gem_idle(dev, 1);
2906 i915_gem_load(struct drm_device *dev)
2908 drm_i915_private_t *dev_priv = dev->dev_private;
2916 i915_gem_detect_bit_6_swizzle(dev);