Lines Matching refs:dev_priv
412 struct drm_i915_private *dev_priv = dev->dev_private;
421 dev_priv->pch_type = PCH_NOP;
443 dev_priv->pch_id = (unsigned short) device_id;
445 dev_priv->pch_type = PCH_IBX;
449 dev_priv->pch_type = PCH_CPT;
454 dev_priv->pch_type = PCH_CPT;
458 dev_priv->pch_type = PCH_LPT;
463 dev_priv->pch_type = PCH_LPT;
491 struct drm_i915_private *dev_priv = dev->dev_private;
495 mutex_lock(&dev_priv->modeset_restore_lock);
496 dev_priv->modeset_restore = MODESET_SUSPENDED;
497 mutex_unlock(&dev_priv->modeset_restore_lock);
506 if (drm_core_check_feature(dev, DRIVER_MODESET) && dev_priv->gtt.total !=0) {
511 del_timer_sync(&dev_priv->rps.delayed_resume_timer);
515 dev_priv->enable_hotplug_processing = false;
521 dev_priv->display.crtc_disable(crtc);
526 if (dev_priv->gtt.total !=0)
535 struct drm_i915_private *dev_priv = dev->dev_private;
543 if (!dev || !dev_priv) {
544 DRM_ERROR("dev: %p, dev_priv: %p\n", dev, dev_priv);
559 struct drm_i915_private *dev_priv = dev->dev_private;
562 if (dev_priv->gtt.total !=0)
566 if (drm_core_check_feature(dev, DRIVER_MODESET) && dev_priv->gtt.total !=0) {
570 dev_priv->mm.suspended = 0;
590 dev_priv->enable_hotplug_processing = true;
593 mutex_lock(&dev_priv->modeset_restore_lock);
594 dev_priv->modeset_restore = MODESET_DONE;
595 mutex_unlock(&dev_priv->modeset_restore_lock);
603 struct drm_i915_private *dev_priv = dev->dev_private;
606 if (dev_priv->gtt.total !=0)
609 dev_priv->gtt.total !=0) {
622 struct drm_i915_private *dev_priv = dev->dev_private;
624 if (dev_priv->gtt.total !=0)
627 dev_priv->gtt.total !=0) {
643 struct drm_i915_private *dev_priv = dev->dev_private;
707 struct drm_i915_private *dev_priv = dev->dev_private;
729 struct drm_i915_private *dev_priv = dev->dev_private;
736 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
750 if (dev_priv->forcewake_count)
751 dev_priv->gt.force_wake_get(dev_priv);
753 dev_priv->gt.force_wake_put(dev_priv);
756 dev_priv->gt_fifo_count = I915_READ_NOTRACE(GT_FIFO_FREE_ENTRIES);
758 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
791 drm_i915_private_t *dev_priv = dev->dev_private;
803 simulated = dev_priv->gpu_error.stop_rings != 0;
806 if (!simulated && cur_time.tv_sec - dev_priv->gpu_error.last_reset < 5) {
817 dev_priv->gpu_error.stop_rings = 0;
825 dev_priv->gpu_error.last_reset = cur_time.tv_sec;
849 !dev_priv->mm.suspended) {
853 dev_priv->mm.suspended = 0;
857 for_each_ring(ring, dev_priv, i)
861 if (dev_priv->mm.aliasing_ppgtt) {
862 ret = dev_priv->mm.aliasing_ppgtt->enable(dev);
1034 struct drm_i915_private *dev_priv;
1043 dev_priv = dev->dev_private;
1045 if (dev_priv && dev_priv->gtt.total !=0) {
1058 if (dev_priv->fbcon_obj != NULL)
1089 drm_mm_takedown(&dev_priv->mm.stolen);
1094 if (dev_priv->gtt.scratch_page)
1099 list_for_each_entry_safe(r_list, list_temp, struct batch_info_list, &dev_priv->batch_list, head) {
1104 list_del(&dev_priv->batch_list);
1180 #define NEEDS_FORCE_WAKE(dev_priv, reg) \
1181 ((HAS_FORCE_WAKE((dev_priv)->dev)) && \
1186 ilk_dummy_write(struct drm_i915_private *dev_priv)
1195 hsw_unclaimed_reg_clear(struct drm_i915_private *dev_priv, u32 reg)
1197 if (HAS_FPGA_DBG_UNCLAIMED(dev_priv->dev) &&
1206 hsw_unclaimed_reg_check(struct drm_i915_private *dev_priv, u32 reg)
1208 if (HAS_FPGA_DBG_UNCLAIMED(dev_priv->dev) &&
1215 u8 i915_read8(struct drm_i915_private *dev_priv, u32 reg)
1219 if (IS_GEN5(dev_priv->dev))
1220 ilk_dummy_write(dev_priv);
1222 if (NEEDS_FORCE_WAKE(dev_priv, reg)) {
1224 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1225 if (dev_priv->forcewake_count == 0)
1226 dev_priv->gt.force_wake_get(dev_priv);
1227 val = DRM_READ8(dev_priv->regs, reg);
1228 if (dev_priv->forcewake_count == 0)
1229 dev_priv->gt.force_wake_put(dev_priv);
1230 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1232 val = DRM_READ8(dev_priv->regs, (reg));
1236 u16 i915_read16(struct drm_i915_private *dev_priv, u32 reg)
1240 if (IS_GEN5(dev_priv->dev))
1241 ilk_dummy_write(dev_priv);
1243 if (NEEDS_FORCE_WAKE(dev_priv, reg)) {
1245 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1246 if (dev_priv->forcewake_count == 0)
1247 dev_priv->gt.force_wake_get(dev_priv);
1248 val = DRM_READ16(dev_priv->regs, reg);
1249 if (dev_priv->forcewake_count == 0)
1250 dev_priv->gt.force_wake_get(dev_priv);
1251 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1253 val = DRM_READ16(dev_priv->regs, (reg));
1257 u32 i915_read32(struct drm_i915_private *dev_priv, u32 reg)
1261 if (IS_GEN5(dev_priv->dev))
1262 ilk_dummy_write(dev_priv);
1264 if (NEEDS_FORCE_WAKE(dev_priv, reg)) {
1266 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1267 if (dev_priv->forcewake_count == 0)
1268 dev_priv->gt.force_wake_get(dev_priv);
1269 val = DRM_READ32(dev_priv->regs, reg);
1270 if (dev_priv->forcewake_count == 0)
1271 dev_priv->gt.force_wake_get(dev_priv);
1272 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1274 val = DRM_READ32(dev_priv->regs, (reg));
1278 u64 i915_read64(struct drm_i915_private *dev_priv, u32 reg)
1282 if (IS_GEN5(dev_priv->dev))
1283 ilk_dummy_write(dev_priv);
1285 if (NEEDS_FORCE_WAKE(dev_priv, reg)) {
1287 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1288 if (dev_priv->forcewake_count == 0)
1289 dev_priv->gt.force_wake_get(dev_priv);
1290 val = DRM_READ64(dev_priv->regs, reg);
1291 if (dev_priv->forcewake_count == 0)
1292 dev_priv->gt.force_wake_get(dev_priv);
1293 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1295 val = DRM_READ64(dev_priv->regs, (reg));
1299 void i915_write8(struct drm_i915_private *dev_priv, u32 reg,
1305 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1306 if (NEEDS_FORCE_WAKE(dev_priv, reg))
1307 __fifo_ret = __gen6_gt_wait_for_fifo(dev_priv);
1309 if (IS_GEN5(dev_priv->dev))
1310 ilk_dummy_write(dev_priv);
1312 hsw_unclaimed_reg_clear(dev_priv, reg);
1314 DRM_WRITE8(dev_priv->regs, (reg), (val));
1316 gen6_gt_check_fifodbg(dev_priv);
1317 hsw_unclaimed_reg_check(dev_priv, reg);
1318 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1321 void i915_write16(struct drm_i915_private *dev_priv, u32 reg,
1327 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1328 if (NEEDS_FORCE_WAKE(dev_priv, reg))
1329 __fifo_ret = __gen6_gt_wait_for_fifo(dev_priv);
1331 if (IS_GEN5(dev_priv->dev))
1332 ilk_dummy_write(dev_priv);
1334 hsw_unclaimed_reg_clear(dev_priv, reg);
1336 DRM_WRITE16(dev_priv->regs, (reg), (val));
1338 gen6_gt_check_fifodbg(dev_priv);
1339 hsw_unclaimed_reg_check(dev_priv, reg);
1340 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1343 void i915_write32(struct drm_i915_private *dev_priv, u32 reg,
1349 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1350 if (NEEDS_FORCE_WAKE(dev_priv, reg))
1351 __fifo_ret = __gen6_gt_wait_for_fifo(dev_priv);
1353 if (IS_GEN5(dev_priv->dev))
1354 ilk_dummy_write(dev_priv);
1356 hsw_unclaimed_reg_clear(dev_priv, reg);
1358 DRM_WRITE32(dev_priv->regs, (reg), (val));
1360 gen6_gt_check_fifodbg(dev_priv);
1361 hsw_unclaimed_reg_check(dev_priv, reg);
1362 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1365 void i915_write64(struct drm_i915_private *dev_priv, u32 reg,
1371 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
1372 if (NEEDS_FORCE_WAKE(dev_priv, reg))
1373 __fifo_ret = __gen6_gt_wait_for_fifo(dev_priv);
1375 if (IS_GEN5(dev_priv->dev))
1376 ilk_dummy_write(dev_priv);
1378 hsw_unclaimed_reg_clear(dev_priv, reg);
1380 DRM_WRITE64(dev_priv->regs, (reg), (val));
1382 gen6_gt_check_fifodbg(dev_priv);
1383 hsw_unclaimed_reg_check(dev_priv, reg);
1384 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
1388 u ## x i915_read ## x(struct drm_i915_private *dev_priv, u32 reg);
1397 void i915_write ## x(struct drm_i915_private *dev_priv, u32 reg, \
1416 struct drm_i915_private *dev_priv = dev->dev_private;