Lines Matching refs:dev_priv

60 	struct drm_i915_private *dev_priv = dev->dev_private;
83 struct drm_i915_private *dev_priv = dev->dev_private;
92 cfb_pitch = dev_priv->cfb_size / FBC_LL_SIZE;
125 struct drm_i915_private *dev_priv = dev->dev_private;
133 struct drm_i915_private *dev_priv = dev->dev_private;
159 struct drm_i915_private *dev_priv = dev->dev_private;
174 struct drm_i915_private *dev_priv = dev->dev_private;
181 struct drm_i915_private *dev_priv = dev->dev_private;
185 gen6_gt_force_wake_get(dev_priv);
196 gen6_gt_force_wake_put(dev_priv);
202 struct drm_i915_private *dev_priv = dev->dev_private;
239 struct drm_i915_private *dev_priv = dev->dev_private;
266 struct drm_i915_private *dev_priv = dev->dev_private;
274 struct drm_i915_private *dev_priv = dev->dev_private;
314 struct drm_i915_private *dev_priv = dev->dev_private;
316 if (!dev_priv->display.fbc_enabled)
319 return dev_priv->display.fbc_enabled(dev);
327 struct drm_i915_private *dev_priv = dev->dev_private;
330 if (work == dev_priv->fbc_work) {
335 dev_priv->display.enable_fbc(work->crtc,
338 dev_priv->cfb_plane = to_intel_crtc(work->crtc)->plane;
339 dev_priv->cfb_fb = work->crtc->fb->base.id;
340 dev_priv->cfb_y = work->crtc->y;
343 dev_priv->fbc_work = NULL;
355 drm_i915_private_t *dev_priv = dev->dev_private;
356 queue_work(dev_priv->other_wq, &work->work);
359 static void intel_cancel_fbc_work(struct drm_i915_private *dev_priv)
361 if (dev_priv->fbc_work == NULL)
367 * dev_priv->fbc_work, so we can perform the cancellation
370 del_timer_sync(&dev_priv->fbc_timer);
371 cancel_delayed_work(dev_priv->other_wq);
373 kfree(dev_priv->fbc_work, sizeof(struct intel_fbc_work));
380 dev_priv->fbc_work = NULL;
387 struct drm_i915_private *dev_priv = dev->dev_private;
389 if (!dev_priv->display.enable_fbc)
392 intel_cancel_fbc_work(dev_priv);
396 dev_priv->display.enable_fbc(crtc, interval);
406 setup_timer(&dev_priv->fbc_timer, intel_fbc_work_timer,
409 dev_priv->fbc_work = work;
424 test_set_timer(&dev_priv->fbc_timer, msecs_to_jiffies(50));
429 struct drm_i915_private *dev_priv = dev->dev_private;
431 intel_cancel_fbc_work(dev_priv);
433 if (!dev_priv->display.disable_fbc)
436 dev_priv->display.disable_fbc(dev);
437 dev_priv->cfb_plane = -1;
461 struct drm_i915_private *dev_priv = dev->dev_private;
490 dev_priv->no_fbc_reason = FBC_MULTIPLE_PIPES;
499 dev_priv->no_fbc_reason = FBC_NO_OUTPUT;
517 dev_priv->no_fbc_reason = FBC_MODULE_PARAM;
524 dev_priv->no_fbc_reason = FBC_UNSUPPORTED_MODE;
538 dev_priv->no_fbc_reason = FBC_MODE_TOO_LARGE;
544 dev_priv->no_fbc_reason = FBC_BAD_PLANE;
554 dev_priv->no_fbc_reason = FBC_NOT_TILED;
565 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL;
574 if (dev_priv->cfb_plane == intel_crtc->plane &&
575 dev_priv->cfb_fb == fb->base.id &&
576 dev_priv->cfb_y == crtc->y)
621 drm_i915_private_t *dev_priv = dev->dev_private;
628 dev_priv->fsb_freq = 533; /* 133*4 */
631 dev_priv->fsb_freq = 800; /* 200*4 */
634 dev_priv->fsb_freq = 667; /* 167*4 */
637 dev_priv->fsb_freq = 400; /* 100*4 */
643 dev_priv->mem_freq = 533;
646 dev_priv->mem_freq = 667;
649 dev_priv->mem_freq = 800;
655 dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0;
660 drm_i915_private_t *dev_priv = dev->dev_private;
668 dev_priv->mem_freq = 800;
671 dev_priv->mem_freq = 1066;
674 dev_priv->mem_freq = 1333;
677 dev_priv->mem_freq = 1600;
682 dev_priv->mem_freq = 0;
686 dev_priv->ips.r_t = dev_priv->mem_freq;
690 dev_priv->fsb_freq = 3200;
693 dev_priv->fsb_freq = 3733;
696 dev_priv->fsb_freq = 4266;
699 dev_priv->fsb_freq = 4800;
702 dev_priv->fsb_freq = 5333;
705 dev_priv->fsb_freq = 5866;
708 dev_priv->fsb_freq = 6400;
713 dev_priv->fsb_freq = 0;
717 if (dev_priv->fsb_freq == 3200) {
718 dev_priv->ips.c_m = 0;
719 } else if (dev_priv->fsb_freq > 3200 && dev_priv->fsb_freq <= 4800) {
720 dev_priv->ips.c_m = 1;
722 dev_priv->ips.c_m = 2;
790 struct drm_i915_private *dev_priv = dev->dev_private;
814 struct drm_i915_private *dev_priv = dev->dev_private;
830 struct drm_i915_private *dev_priv = dev->dev_private;
847 struct drm_i915_private *dev_priv = dev->dev_private;
863 struct drm_i915_private *dev_priv = dev->dev_private;
1095 struct drm_i915_private *dev_priv = dev->dev_private;
1101 latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3,
1102 dev_priv->fsb_freq, dev_priv->mem_freq);
1336 struct drm_i915_private *dev_priv = dev->dev_private;
1374 struct drm_i915_private *dev_priv = dev->dev_private;
1433 struct drm_i915_private *dev_priv = dev->dev_private;
1484 struct drm_i915_private *dev_priv = dev->dev_private;
1549 struct drm_i915_private *dev_priv = dev->dev_private;
1565 fifo_size = dev_priv->display.get_fifo_size(dev, 0);
1579 fifo_size = dev_priv->display.get_fifo_size(dev, 1);
1666 struct drm_i915_private *dev_priv = dev->dev_private;
1676 dev_priv->display.get_fifo_size(dev, 0),
1701 struct drm_i915_private *dev_priv = dev->dev_private;
1796 struct drm_i915_private *dev_priv = dev->dev_private;
1879 struct drm_i915_private *dev_priv = dev->dev_private;
1929 dev_priv->sprite_scaling_enabled)
1981 struct drm_i915_private *dev_priv = dev->dev_private;
2046 dev_priv->sprite_scaling_enabled)
2311 static uint32_t hsw_compute_wm_pipe(struct drm_i915_private *dev_priv,
2339 struct drm_i915_private *dev_priv = dev->dev_private;
2352 intel_ddi_get_cdclk_freq(dev_priv));
2364 struct drm_i915_private *dev_priv = dev->dev_private;
2437 struct drm_i915_private *dev_priv = dev->dev_private;
2476 results->wm_pipe[pipe] = hsw_compute_wm_pipe(dev_priv, wm[0],
2481 crtc = dev_priv->pipe_to_crtc_mapping[pipe];
2516 static void hsw_write_wm_values(struct drm_i915_private *dev_priv,
2611 struct drm_i915_private *dev_priv = dev->dev_private;
2632 hsw_write_wm_values(dev_priv, best_results, partitioning);
2735 struct drm_i915_private *dev_priv = dev->dev_private;
2848 struct drm_i915_private *dev_priv = dev->dev_private;
2850 if (dev_priv->display.update_wm)
2851 dev_priv->display.update_wm(dev);
2858 struct drm_i915_private *dev_priv = dev->dev_private;
2860 if (dev_priv->display.update_sprite_wm)
2861 dev_priv->display.update_sprite_wm(dev, pipe, sprite_width,
2911 struct drm_i915_private *dev_priv = dev->dev_private;
2935 struct drm_i915_private *dev_priv = dev->dev_private;
2964 dev_priv->ips.fmax = fmax; /* IPS callback will increase this */
2965 dev_priv->ips.fstart = fstart;
2967 dev_priv->ips.max_delay = fstart;
2968 dev_priv->ips.min_delay = fmin;
2969 dev_priv->ips.cur_delay = fstart;
2992 dev_priv->ips.last_count1 = I915_READ(0x112e4) + I915_READ(0x112e8) +
2994 dev_priv->ips.last_time1 = jiffies_to_msecs(jiffies);
2995 dev_priv->ips.last_count2 = I915_READ(0x112f4);
2996 dev_priv->ips.last_time2 = jiffies;
3003 struct drm_i915_private *dev_priv = dev->dev_private;
3018 (void) ironlake_set_drps(dev, dev_priv->ips.fstart);
3032 static u32 gen6_rps_limits(struct drm_i915_private *dev_priv, u8 *val)
3038 if (*val >= dev_priv->rps.max_delay)
3039 *val = dev_priv->rps.max_delay;
3040 limits |= dev_priv->rps.max_delay << 24;
3048 if (*val <= dev_priv->rps.min_delay) {
3049 *val = dev_priv->rps.min_delay;
3050 limits |= dev_priv->rps.min_delay << 16;
3058 struct drm_i915_private *dev_priv = dev->dev_private;
3059 u32 limits = gen6_rps_limits(dev_priv, &val);
3061 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3062 WARN_ON(val > dev_priv->rps.max_delay);
3063 WARN_ON(val < dev_priv->rps.min_delay);
3065 if (val == dev_priv->rps.cur_delay)
3084 dev_priv->rps.cur_delay = (u8)val;
3092 static void vlv_update_rps_cur_delay(struct drm_i915_private *dev_priv)
3097 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3100 pval = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
3110 if (pval != dev_priv->rps.cur_delay)
3112 vlv_gpu_freq(dev_priv->mem_freq, dev_priv->rps.cur_delay),
3113 dev_priv->rps.cur_delay,
3114 vlv_gpu_freq(dev_priv->mem_freq, pval), pval);
3116 dev_priv->rps.cur_delay = (u8)pval;
3121 struct drm_i915_private *dev_priv = dev->dev_private;
3123 gen6_rps_limits(dev_priv, &val);
3125 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3126 WARN_ON(val > dev_priv->rps.max_delay);
3127 WARN_ON(val < dev_priv->rps.min_delay);
3129 vlv_update_rps_cur_delay(dev_priv);
3132 vlv_gpu_freq(dev_priv->mem_freq,
3133 dev_priv->rps.cur_delay),
3134 dev_priv->rps.cur_delay,
3135 vlv_gpu_freq(dev_priv->mem_freq, val), val);
3137 if (val == dev_priv->rps.cur_delay)
3140 vlv_punit_write(dev_priv, PUNIT_REG_GPU_FREQ_REQ, val);
3142 dev_priv->rps.cur_delay = val;
3148 struct drm_i915_private *dev_priv = dev->dev_private;
3159 spin_lock_irq(&dev_priv->rps.lock);
3160 dev_priv->rps.pm_iir = 0;
3161 spin_unlock_irq(&dev_priv->rps.lock);
3168 struct drm_i915_private *dev_priv = dev->dev_private;
3178 spin_lock_irq(&dev_priv->rps.lock);
3179 dev_priv->rps.pm_iir = 0;
3180 spin_unlock_irq(&dev_priv->rps.lock);
3184 if (dev_priv->vlv_pctx) {
3185 drm_gem_object_unreference(&dev_priv->vlv_pctx->base);
3186 dev_priv->vlv_pctx = NULL;
3217 struct drm_i915_private *dev_priv = dev->dev_private;
3226 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3242 gen6_gt_force_wake_get(dev_priv);
3248 dev_priv->rps.hw_max = dev_priv->rps.max_delay = rp_state_cap & 0xff;
3249 dev_priv->rps.min_delay = (rp_state_cap & 0xff0000) >> 16;
3250 dev_priv->rps.cur_delay = 0;
3261 for_each_ring(ring, dev_priv, i)
3271 rc6_mode = intel_enable_rc6(dev_priv->dev);
3310 dev_priv->rps.max_delay << 24 |
3311 dev_priv->rps.min_delay << 16);
3327 ret = sandybridge_pcode_write(dev_priv, GEN6_PCODE_WRITE_MIN_FREQ_TABLE, 0);
3330 ret = sandybridge_pcode_read(dev_priv, GEN6_READ_OC_PARAMS, &pcu_mbox);
3333 (dev_priv->rps.max_delay & 0xff) * 50,
3335 dev_priv->rps.hw_max = pcu_mbox & 0xff;
3341 gen6_set_rps(dev_priv->dev, (gt_perf_status & 0xff00) >> 8);
3345 spin_lock_irq(&dev_priv->rps.lock);
3347 * dev_priv->rps.pm_iir really should be 0 here. */
3348 dev_priv->rps.pm_iir = 0;
3351 spin_unlock_irq(&dev_priv->rps.lock);
3356 ret = sandybridge_pcode_read(dev_priv, GEN6_PCODE_READ_RC6VIDS, &rc6vids);
3364 ret = sandybridge_pcode_write(dev_priv, GEN6_PCODE_WRITE_RC6VIDS, rc6vids);
3369 gen6_gt_force_wake_put(dev_priv);
3374 struct drm_i915_private *dev_priv = dev->dev_private;
3380 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3396 for (gpu_freq = dev_priv->rps.max_delay; gpu_freq >= dev_priv->rps.min_delay;
3398 int diff = dev_priv->rps.max_delay - gpu_freq;
3420 sandybridge_pcode_write(dev_priv,
3428 int valleyview_rps_max_freq(struct drm_i915_private *dev_priv)
3432 val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FREQ_FUSE);
3441 static int valleyview_rps_rpe_freq(struct drm_i915_private *dev_priv)
3445 val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FMAX_FUSE_LO);
3447 val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FMAX_FUSE_HI);
3453 int valleyview_rps_min_freq(struct drm_i915_private *dev_priv)
3455 return vlv_punit_read(dev_priv, PUNIT_REG_GPU_LFM) & 0xff;
3460 drm_i915_private_t *dev_priv = container_of(work, drm_i915_private_t,
3470 mutex_lock(&dev_priv->rps.hw_lock);
3471 if (dev_priv->rps.cur_delay > dev_priv->rps.rpe_delay)
3472 valleyview_set_rps(dev_priv->dev, dev_priv->rps.rpe_delay);
3473 mutex_unlock(&dev_priv->rps.hw_lock);
3478 struct drm_i915_private *dev_priv = dev->dev_private;
3489 pcbr_offset = (pcbr & (~4095)) - dev_priv->mm.stolen_base;
3490 pctx = i915_gem_object_create_stolen_for_preallocated(dev_priv->dev,
3511 pctx_paddr = dev_priv->mm.stolen_base + pctx->stolen->start;
3515 dev_priv->vlv_pctx = pctx;
3520 struct drm_i915_private *dev_priv = dev->dev_private;
3525 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
3534 gen6_gt_force_wake_get(dev_priv);
3555 for_each_ring(ring, dev_priv, i)
3565 val = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
3569 dev_priv->mem_freq = 800;
3572 dev_priv->mem_freq = 1066;
3575 dev_priv->mem_freq = 1333;
3578 DRM_DEBUG_DRIVER("DDR speed: %d MHz", dev_priv->mem_freq);
3583 dev_priv->rps.cur_delay = (val >> 8) & 0xff;
3585 vlv_gpu_freq(dev_priv->mem_freq,
3586 dev_priv->rps.cur_delay),
3587 dev_priv->rps.cur_delay);
3589 dev_priv->rps.max_delay = valleyview_rps_max_freq(dev_priv);
3590 dev_priv->rps.hw_max = dev_priv->rps.max_delay;
3592 vlv_gpu_freq(dev_priv->mem_freq,
3593 dev_priv->rps.max_delay),
3594 dev_priv->rps.max_delay);
3596 dev_priv->rps.rpe_delay = valleyview_rps_rpe_freq(dev_priv);
3598 vlv_gpu_freq(dev_priv->mem_freq,
3599 dev_priv->rps.rpe_delay),
3600 dev_priv->rps.rpe_delay);
3602 dev_priv->rps.min_delay = valleyview_rps_min_freq(dev_priv);
3604 vlv_gpu_freq(dev_priv->mem_freq,
3605 dev_priv->rps.min_delay),
3606 dev_priv->rps.min_delay);
3609 vlv_gpu_freq(dev_priv->mem_freq,
3610 dev_priv->rps.rpe_delay),
3611 dev_priv->rps.rpe_delay);
3613 //INIT_DELAYED_WORK(&dev_priv->rps.vlv_work, vlv_rps_timer_work);
3615 valleyview_set_rps(dev_priv->dev, dev_priv->rps.rpe_delay);
3619 spin_lock_irq(&dev_priv->rps.lock);
3620 WARN_ON(dev_priv->rps.pm_iir != 0);
3622 spin_unlock_irq(&dev_priv->rps.lock);
3626 gen6_gt_force_wake_put(dev_priv);
3631 struct drm_i915_private *dev_priv = dev->dev_private;
3633 if (dev_priv->ips.renderctx) {
3634 i915_gem_object_unpin(dev_priv->ips.renderctx);
3635 drm_gem_object_unreference(&dev_priv->ips.renderctx->base);
3636 dev_priv->ips.renderctx = NULL;
3639 if (dev_priv->ips.pwrctx) {
3640 i915_gem_object_unpin(dev_priv->ips.pwrctx);
3641 drm_gem_object_unreference(&dev_priv->ips.pwrctx->base);
3642 dev_priv->ips.pwrctx = NULL;
3648 struct drm_i915_private *dev_priv = dev->dev_private;
3666 struct drm_i915_private *dev_priv = dev->dev_private;
3668 if (dev_priv->ips.renderctx == NULL)
3669 dev_priv->ips.renderctx = intel_alloc_context_page(dev);
3670 if (!dev_priv->ips.renderctx)
3673 if (dev_priv->ips.pwrctx == NULL)
3674 dev_priv->ips.pwrctx = intel_alloc_context_page(dev);
3675 if (!dev_priv->ips.pwrctx) {
3685 struct drm_i915_private *dev_priv = dev->dev_private;
3686 struct intel_ring_buffer *ring = &dev_priv->ring[RCS];
3702 was_interruptible = dev_priv->mm.interruptible;
3703 dev_priv->mm.interruptible = false;
3712 dev_priv->mm.interruptible = was_interruptible;
3718 intel_ring_emit(ring, dev_priv->ips.renderctx->gtt_offset |
3734 dev_priv->mm.interruptible = was_interruptible;
3741 I915_WRITE(PWRCTXA, dev_priv->ips.pwrctx->gtt_offset | PWRCTX_EN);
3776 struct drm_i915_private *dev_priv = dev->dev_private;
3842 dev_priv->ips.corr = (lcfuse & LCFUSE_HIV_MASK);
3847 struct drm_i915_private *dev_priv = dev->dev_private;
3857 del_timer_sync(&dev_priv->rps.delayed_resume_timer);
3859 // cancel_delayed_work_sync(&dev_priv->rps.vlv_work);
3860 mutex_lock(&dev_priv->rps.hw_lock);
3865 mutex_unlock(&dev_priv->rps.hw_lock);
3871 drm_i915_private_t *dev_priv = container_of(work, drm_i915_private_t,
3873 struct drm_device *dev = dev_priv->dev;
3875 mutex_lock(&dev_priv->rps.hw_lock);
3883 mutex_unlock(&dev_priv->rps.hw_lock);
3890 struct drm_i915_private *dev_priv = dev->dev_private;
3891 (void) queue_work(dev_priv->wq, &dev_priv->rps.delayed_resume_work);
3895 struct drm_i915_private *dev_priv = dev->dev_private;
3907 test_set_timer(&dev_priv->rps.delayed_resume_timer, DRM_HZ);
3913 struct drm_i915_private *dev_priv = dev->dev_private;
3925 struct drm_i915_private *dev_priv = dev->dev_private;
3932 intel_flush_display_plane(dev_priv, pipe);
3938 struct drm_i915_private *dev_priv = dev->dev_private;
4006 struct drm_i915_private *dev_priv = dev->dev_private;
4025 if (dev_priv->vbt.fdi_rx_polarity_inverted)
4041 struct drm_i915_private *dev_priv = dev->dev_private;
4054 struct drm_i915_private *dev_priv = dev->dev_private;
4142 static void gen7_setup_fixed_func_scheduler(struct drm_i915_private *dev_priv)
4151 if (IS_HASWELL(dev_priv->dev))
4159 struct drm_i915_private *dev_priv = dev->dev_private;
4165 if (dev_priv->pch_id == INTEL_PCH_LPT_LP_DEVICE_ID_TYPE)
4178 struct drm_i915_private *dev_priv = dev->dev_private;
4180 if (dev_priv->pch_id == INTEL_PCH_LPT_LP_DEVICE_ID_TYPE) {
4190 struct drm_i915_private *dev_priv = dev->dev_private;
4219 gen7_setup_fixed_func_scheduler(dev_priv);
4241 struct drm_i915_private *dev_priv = dev->dev_private;
4317 gen7_setup_fixed_func_scheduler(dev_priv);
4336 struct drm_i915_private *dev_priv = dev->dev_private;
4428 struct drm_i915_private *dev_priv = dev->dev_private;
4452 struct drm_i915_private *dev_priv = dev->dev_private;
4465 struct drm_i915_private *dev_priv = dev->dev_private;
4479 struct drm_i915_private *dev_priv = dev->dev_private;
4495 struct drm_i915_private *dev_priv = dev->dev_private;
4502 struct drm_i915_private *dev_priv = dev->dev_private;
4509 struct drm_i915_private *dev_priv = dev->dev_private;
4511 dev_priv->display.init_clock_gating(dev);
4528 struct drm_i915_private *dev_priv = dev->dev_private;
4555 struct drm_i915_private *dev_priv = dev->dev_private;
4630 struct drm_i915_private *dev_priv = dev->dev_private;
4632 hsw_pwr = &dev_priv->power_well;
4648 struct drm_i915_private *dev_priv = dev->dev_private;
4649 struct i915_power_well *power_well = &dev_priv->power_well;
4678 struct drm_i915_private *dev_priv = dev->dev_private;
4695 struct drm_i915_private *dev_priv = dev->dev_private;
4699 dev_priv->display.fbc_enabled = ironlake_fbc_enabled;
4701 dev_priv->display.enable_fbc =
4704 dev_priv->display.enable_fbc =
4706 dev_priv->display.disable_fbc = ironlake_disable_fbc;
4708 dev_priv->display.fbc_enabled = g4x_fbc_enabled;
4709 dev_priv->display.enable_fbc = g4x_enable_fbc;
4710 dev_priv->display.disable_fbc = g4x_disable_fbc;
4712 dev_priv->display.fbc_enabled = i8xx_fbc_enabled;
4713 dev_priv->display.enable_fbc = i8xx_enable_fbc;
4714 dev_priv->display.disable_fbc = i8xx_disable_fbc;
4729 dev_priv->display.update_wm = ironlake_update_wm;
4733 dev_priv->display.update_wm = NULL;
4735 dev_priv->display.init_clock_gating = ironlake_init_clock_gating;
4738 dev_priv->display.update_wm = sandybridge_update_wm;
4739 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm;
4743 dev_priv->display.update_wm = NULL;
4745 dev_priv->display.init_clock_gating = gen6_init_clock_gating;
4749 dev_priv->display.update_wm = ivybridge_update_wm;
4750 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm;
4754 dev_priv->display.update_wm = NULL;
4756 dev_priv->display.init_clock_gating = ivybridge_init_clock_gating;
4759 dev_priv->display.update_wm = haswell_update_wm;
4760 dev_priv->display.update_sprite_wm =
4765 dev_priv->display.update_wm = NULL;
4767 dev_priv->display.init_clock_gating = haswell_init_clock_gating;
4769 dev_priv->display.update_wm = NULL;
4771 dev_priv->display.update_wm = valleyview_update_wm;
4772 dev_priv->display.init_clock_gating =
4776 dev_priv->is_ddr3,
4777 dev_priv->fsb_freq,
4778 dev_priv->mem_freq)) {
4782 (dev_priv->is_ddr3 == 1) ? "3" : "2",
4783 dev_priv->fsb_freq, dev_priv->mem_freq);
4786 dev_priv->display.update_wm = NULL;
4788 dev_priv->display.update_wm = pineview_update_wm;
4789 dev_priv->display.init_clock_gating = gen3_init_clock_gating;
4791 dev_priv->display.update_wm = g4x_update_wm;
4792 dev_priv->display.init_clock_gating = g4x_init_clock_gating;
4794 dev_priv->display.update_wm = i965_update_wm;
4796 dev_priv->display.init_clock_gating = crestline_init_clock_gating;
4798 dev_priv->display.init_clock_gating = broadwater_init_clock_gating;
4800 dev_priv->display.update_wm = i9xx_update_wm;
4801 dev_priv->display.get_fifo_size = i9xx_get_fifo_size;
4802 dev_priv->display.init_clock_gating = gen3_init_clock_gating;
4804 dev_priv->display.update_wm = i830_update_wm;
4805 dev_priv->display.init_clock_gating = i85x_init_clock_gating;
4806 dev_priv->display.get_fifo_size = i830_get_fifo_size;
4808 dev_priv->display.update_wm = i9xx_update_wm;
4809 dev_priv->display.get_fifo_size = i85x_get_fifo_size;
4810 dev_priv->display.init_clock_gating = i85x_init_clock_gating;
4812 dev_priv->display.update_wm = i830_update_wm;
4813 dev_priv->display.init_clock_gating = i830_init_clock_gating;
4815 dev_priv->display.get_fifo_size = i845_get_fifo_size;
4817 dev_priv->display.get_fifo_size = i830_get_fifo_size;
4821 static void __gen6_gt_wait_for_thread_c0(struct drm_i915_private *dev_priv)
4825 if (IS_HASWELL(dev_priv->dev))
4837 static void __gen6_gt_force_wake_reset(struct drm_i915_private *dev_priv)
4843 static void __gen6_gt_force_wake_get(struct drm_i915_private *dev_priv)
4856 __gen6_gt_wait_for_thread_c0(dev_priv);
4859 static void __gen6_gt_force_wake_mt_reset(struct drm_i915_private *dev_priv)
4865 static void __gen6_gt_force_wake_mt_get(struct drm_i915_private *dev_priv)
4869 if (IS_HASWELL(dev_priv->dev))
4886 __gen6_gt_wait_for_thread_c0(dev_priv);
4895 void gen6_gt_force_wake_get(struct drm_i915_private *dev_priv)
4899 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
4900 if (dev_priv->forcewake_count++ == 0)
4901 dev_priv->gt.force_wake_get(dev_priv);
4902 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
4905 void gen6_gt_check_fifodbg(struct drm_i915_private *dev_priv)
4915 static void __gen6_gt_force_wake_put(struct drm_i915_private *dev_priv)
4920 gen6_gt_check_fifodbg(dev_priv);
4923 static void __gen6_gt_force_wake_mt_put(struct drm_i915_private *dev_priv)
4928 gen6_gt_check_fifodbg(dev_priv);
4934 void gen6_gt_force_wake_put(struct drm_i915_private *dev_priv)
4938 spin_lock_irqsave(&dev_priv->gt_lock, irqflags);
4939 if (--dev_priv->forcewake_count == 0)
4940 dev_priv->gt.force_wake_put(dev_priv);
4941 spin_unlock_irqrestore(&dev_priv->gt_lock, irqflags);
4944 int __gen6_gt_wait_for_fifo(struct drm_i915_private *dev_priv)
4948 if (dev_priv->gt_fifo_count < GT_FIFO_NUM_RESERVED_ENTRIES) {
4959 dev_priv->gt_fifo_count = fifo;
4961 dev_priv->gt_fifo_count--;
4966 static void vlv_force_wake_reset(struct drm_i915_private *dev_priv)
4973 static void vlv_force_wake_get(struct drm_i915_private *dev_priv)
4992 __gen6_gt_wait_for_thread_c0(dev_priv);
4995 static void vlv_force_wake_put(struct drm_i915_private *dev_priv)
5002 gen6_gt_check_fifodbg(dev_priv);
5007 struct drm_i915_private *dev_priv = dev->dev_private;
5010 vlv_force_wake_reset(dev_priv);
5012 __gen6_gt_force_wake_reset(dev_priv);
5014 __gen6_gt_force_wake_mt_reset(dev_priv);
5024 struct drm_i915_private *dev_priv = dev->dev_private;
5027 dev_priv->gt.force_wake_get = vlv_force_wake_get;
5028 dev_priv->gt.force_wake_put = vlv_force_wake_put;
5030 dev_priv->gt.force_wake_get = __gen6_gt_force_wake_mt_get;
5031 dev_priv->gt.force_wake_put = __gen6_gt_force_wake_mt_put;
5045 __gen6_gt_force_wake_mt_get(dev_priv);
5047 __gen6_gt_force_wake_mt_put(dev_priv);
5051 dev_priv->gt.force_wake_get =
5053 dev_priv->gt.force_wake_put =
5058 dev_priv->gt.force_wake_get = __gen6_gt_force_wake_get;
5059 dev_priv->gt.force_wake_put = __gen6_gt_force_wake_put;
5062 dev_priv->gt.force_wake_get = __gen6_gt_force_wake_get;
5063 dev_priv->gt.force_wake_put = __gen6_gt_force_wake_put;
5069 struct drm_i915_private *dev_priv = dev->dev_private;
5071 INIT_WORK(&dev_priv->rps.delayed_resume_work, intel_gen6_powersave_work);
5072 setup_timer(&dev_priv->rps.delayed_resume_timer, intel_gen6_powersave_work_timer,
5076 int sandybridge_pcode_read(struct drm_i915_private *dev_priv, u8 mbox, u32 *val)
5078 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
5100 int sandybridge_pcode_write(struct drm_i915_private *dev_priv, u8 mbox, u32 val)
5102 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));