Lines Matching refs:g7
62 rd %pc, %g7
1849 rdpr %tt, %g7
1859 wrpr %g7, %tt
1892 * only use g5, g6, g7 registers after we have switched to alternate
1898 rdpr %tt, %g7
1899 cmp %g7, FAST_IMMU_MISS_TT
1902 cmp %g7, T_INSTR_MMU_MISS
1906 cmp %g7, FAST_DMMU_MISS_TT
1908 cmp %g7, T_DATA_MMU_MISS
2424 * g3 - g7 = scratch registers
2430 mov %g2, %g7 ! TSB pointer macro clobbers tagacc
2437 GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
2440 mov %g2, %g7 ! TSB pointer macro clobbers tagacc
2447 GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)
2450 CPU_TSBMISS_AREA(%g6, %g7)
2451 HAT_PERCPU_STAT16(%g6, TSBMISS_KPROTS, %g7)
2461 * g4 - g7 = scratch registers
2488 mov %g2, %g7
2489 GET_2ND_TSBE_PTR(%g7, %g1, %g3, %g4, %g5, sfmmu_uprot)
2490 /* %g3 = second TSB entry ptr now, %g7 clobbered */
2491 mov %g1, %g7
2492 GET_1ST_TSBE_PTR(%g7, %g1, %g5, sfmmu_uprot)
2496 CPU_TSBMISS_AREA(%g6, %g7)
2497 HAT_PERCPU_STAT16(%g6, TSBMISS_UPROTS, %g7)
2509 * %g7 = TSB tag to match (used)
2528 cmp %g4, %g7
2547 cmp %g4, %g7
2589 RUNTIME_PATCH_SETX(%g7, %g6)
2590 /* %g7 = contents of ktsb_base or ktsb_pbase */
2600 ldda [%g7 + %g1]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2604 add %g7, %g1, %g1 /* form tsb ptr */
2637 RUNTIME_PATCH_SETX(%g7, %g6)
2638 /* %g7 = contents of ktsb4m_base or ktsb4m_pbase */
2648 ldda [%g7 + %g3]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2654 add %g7, %g3, %g3 ! %g3 = kernel second TSB ptr
2697 * g7 = TSB tag to match
2702 PROBE_1ST_ITSB(%g1, %g7, uitlb_fast_8k_probefail)
2716 * g7 = TSB tag to match
2721 PROBE_1ST_DTSB(%g1, %g7, udtlb_fast_8k_probefail)
2740 * g7 = TSB tag to match
2746 PROBE_1ST_ITSB(%g1, %g7, uitlb_8k_probefail)
2751 srlx %g2, TAG_VALO_SHIFT, %g7
2752 PROBE_2ND_ITSB(%g3, %g7)
2779 * g7 = TSB tag to match
2790 PROBE_4TH_ITSB(%g6, %g7, uitlb_4m_scd_probefail)
2796 PROBE_2ND_ITSB(%g3, %g7, uitlb_4m_probefail)
2802 PROBE_3RD_ITSB(%g6, %g7, uitlb_8K_scd_probefail)
2809 PROBE_1ST_ITSB(%g1, %g7, uitlb_8k_probefail)
2811 mov %g3, %g7 /* copy tsb8k reg in %g7 */
2812 GET_2ND_TSBE_PTR(%g6, %g7, %g3, %g4, %g5, sfmmu_uitlb)
2814 srlx %g2, TAG_VALO_SHIFT, %g7
2815 PROBE_2ND_ITSB(%g3, %g7, isynth)
2836 * g7 = TSB tag to match
2856 PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail)
2866 PROBE_4TH_DTSB(%g6, %g7, udtlb_4m_shctx_probefail)
2876 PROBE_3RD_DTSB(%g6, %g7, udtlb_8k_shctx_probefail)
2887 * g7 = TSB tag to match
2904 PROBE_4TH_DTSB(%g6, %g7, udtlb_4m_shctx_probefail2)
2914 PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail2)
2917 PROBE_1ST_DTSB(%g1, %g7, udtlb_8k_first_probefail2)
2926 PROBE_3RD_DTSB(%g6, %g7, udtlb_8k_shctx_probefail2)
2946 * g7 = vpg_4m
2950 PROBE_1ST_DTSB(%g1, %g7, udtlb_first_probefail)
2963 PROBE_1ST_DTSB(%g1, %g7, udtlb_first_probefail)
2984 * g7 = vpg_4m
2991 mov %g3, %g7
2992 GET_2ND_TSBE_PTR(%g2, %g7, %g3, %g4, %g5, sfmmu_udtlb)
2998 srlx %g2, TAG_VALO_SHIFT, %g7
2999 PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail)
3000 /* g4 - g5 = clobbered here; %g7 still vpg_4m at this point */
3016 * g4 - g7 = scratch registers
3026 rdpr %tl, %g7
3027 cmp %g7, 1
3030 rdpr %tpc, %g7
3032 cmp %g7, %g6
3037 add %g7, RUNTIME_PATCH, %g7 /* must match TSTAT_TSBMISS_INSTR */
3038 wrpr %g7, %tpc
3039 add %g7, 4, %g7
3040 wrpr %g7, %tnpc
3042 CPU_TSBMISS_AREA(%g6, %g7)
3048 ldn [%g6 + TSBMISS_KHATID], %g7
3050 ldn [%g6 + TSBMISS_UHATID], %g7 /* g7 = hatid */
3056 stn %g7, [%g6 + (TSBMISS_SCRATCH + TSBMISS_HATID)]
3059 ldub [%g6 + TSBMISS_URTTEFLAGS], %g7 /* clear ctx1 flag set from */
3060 andn %g7, HAT_CHKCTX1_FLAG, %g7 /* the previous tsb miss */
3061 stub %g7, [%g6 + TSBMISS_URTTEFLAGS]
3064 ISM_CHECK(%g2, %g6, %g3, %g4, %g5, %g7, %g1, tsb_l1, tsb_ism)
3068 * %g1 %g3, %g4, %g5, %g7 all clobbered
3073 ldn [%g6 + (TSBMISS_SCRATCH + TSBMISS_HATID)], %g7
3082 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3111 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3128 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3149 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3167 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3179 * g7 = hatid
3182 rdpr %tt, %g7
3198 ldx [%g6 + TSBMISS_SHARED_UHATID], %g7 /* g7 = srdp */
3199 brz,pn %g7, tsb_pagefault
3202 GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3217 GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3231 GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3246 GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3261 GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
3269 rdpr %tt, %g7
3275 * g7 = tt
3284 SAVE_CTX1(%g7, %g2, %g1, tsb_shmel)
3292 * g7 = tt
3298 cmp %g7, FAST_PROT_TT
3302 TTE_SET_REFMOD_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_refmod,
3305 GET_MMU_D_TTARGET(%g2, %g7) /* %g2 = ttarget */
3307 MMU_FAULT_STATUS_AREA(%g7)
3308 ldx [%g7 + MMFSA_D_ADDR], %g5 /* load fault addr for later */
3320 cmp %g7, T_INSTR_MMU_MISS
3323 cmp %g7, FAST_IMMU_MISS_TT
3334 TTE_SET_REF_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_ref)
3342 rdpr %tt, %g7
3345 cmp %g7, T_INSTR_MMU_MISS
3348 cmp %g7, FAST_IMMU_MISS_TT
3353 ldx [%g2 + MMFSA_CTX_], %g7
3354 sllx %g7, TTARGET_CTX_SHIFT, %g7
3358 or %g2, %g7, %g2
3361 cmp %g7, FAST_IMMU_MISS_TT
3372 srlx %g2, TTARGET_CTX_SHIFT, %g7
3373 brz,pn %g7, tsb_kernel
3375 and %g3, TTE_SZ_BITS, %g7 ! assumes TTE_SZ_SHFT is 0
3377 srlx %g3, TTE_SZ_SHFT, %g7
3382 cmp %g7, TTE4M
3386 cmp %g7, TTESZ_VALID | TTE4M
3388 srlx %g3, TTE_SZ2_SHFT, %g7
3389 andcc %g7, TTE_SZ2_BITS, %g7 ! check 32/256MB
3401 ldub [%g6 + TSBMISS_URTTEFLAGS], %g7
3402 and %g7, HAT_CHKCTX1_FLAG, %g1
3408 GET_3RD_TSBE_PTR(%g5, %g1, %g6, %g7)
3415 mov ASI_N, %g7 ! user TSBs accessed by VA
3416 mov %g7, %asi
3419 TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l3)
3441 ldub [%g6 + TSBMISS_URTTEFLAGS], %g7
3442 and %g7, HAT_CHKCTX1_FLAG, %g1
3448 GET_4TH_TSBE_PTR(%g5, %g1, %g6, %g7)
3458 mov ASI_N, %g7 ! user TSBs accessed by VA
3459 mov %g7, %asi
3462 TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l4)
3511 mov MMU_TAG_ACCESS, %g7
3512 ldxa [%g7]ASI_DMMU, %g6 /* get tag access va */
3513 GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 1) /* make 4M pfn offset */
3515 mov ASI_N, %g7 /* user TSBs always accessed by VA */
3516 mov %g7, %asi
3517 TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l5) /* update TSB */
3525 mov MMU_TAG_ACCESS, %g7
3526 ldxa [%g7]ASI_IMMU, %g6 /* get tag access va */
3527 GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 2) /* make 4M pfn offset */
3531 mov ASI_N, %g7 /* user TSBs always accessed by VA */
3532 mov %g7, %asi
3533 TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l6) /* update TSB */
3535 SET_TTE4M_PN(%g5, %g7) /* add TTE4M pagesize to TTE */
3543 cmp %g7, TTE4M
3546 cmp %g7, TTESZ_VALID | TTE4M ! no 32M or 256M support
3563 TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l7)
3592 ldna [%g4]ASI_MEM, %g7 /* g7 = ism hatid */
3593 brz,a,pn %g7, ptl1_panic /* if zero jmp ahead */
3626 * g7 = ISM hatid
3643 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT32M,
3650 rdpr %tt, %g7
3662 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT256M,
3668 rdpr %tt, %g7
3674 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT4M,
3681 rdpr %tt, %g7
3688 GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT64K,
3695 rdpr %tt, %g7
3698 rdpr %tt, %g7
3699 cmp %g7, FAST_PROT_TT
3717 cmp %g7, FAST_IMMU_MISS_TT
3723 cmp %g7, T_INSTR_MMU_MISS
3730 cmp %g7, T_INSTR_MMU_MISS
4202 CPU_INDEX(%g7, %g6)
4204 sllx %g7, KPMTSBM_SHIFT, %g7
4206 add %g6, %g7, %g6 /* g6 = kpmtsbm ptr */
4215 ldx [%g6 + KPMTSBM_VBASE], %g7
4216 cmp %g2, %g7
4243 * g7 = kpm_vbase
4248 sub %g2, %g7, %g4 /* paddr = vaddr-kpm_vbase */
4260 PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmp2m)
4270 ldxa [%g3 + MEMSEG_KPM_PBASE]%asi, %g7
4273 sub %g4, %g7, %g4
4284 ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
4286 ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
4297 * g2=pfn g3=mseg_pa g4=offset g5=kp g7=kpmp_table_sz
4300 sub %g7, 1, %g7 /* mask */
4303 and %g5, %g7, %g5 /* hashinx = y & mask */
4343 GET_MMU_D_TTARGET(%g2, %g7) /* %g2 = ttarget */
4351 KPMLOCK_ENTER(%g3, %g7, kpmtsbmhdlr1, ASI_MEM)
4354 ldsha [%g1 + KPMPAGE_REFCNTC]%asi, %g7 /* kp_refcntc */
4355 cmp %g7, -1
4361 ldsha [%g1 + KPMPAGE_REFCNT]%asi, %g7
4362 brz,pn %g7, 5f /* let C-handler deal with this */
4367 ldub [%g6 + KPMTSBM_FLAGS], %g7
4369 andcc %g7, KPMTSBM_TSBPHYS_FLAG, %g0
4379 TSB_LOCK_ENTRY(%g4, %g1, %g7, locked_tsb_l1)
4382 TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
4395 rdpr %tl, %g7
4396 cmp %g7, 1
4399 rdpr %tpc, %g7
4401 cmp %g7, %g6
4404 add %g7, RUNTIME_PATCH, %g7 /* must match TSTAT_TSBMISS_INSTR */
4405 wrpr %g7, %tpc
4406 add %g7, 4, %g7
4407 wrpr %g7, %tnpc
4425 CPU_INDEX(%g7, %g6)
4427 sllx %g7, KPMTSBM_SHIFT, %g7
4429 add %g6, %g7, %g6 /* g6 = kpmtsbm ptr */
4441 ldx [%g6 + KPMTSBM_VBASE], %g7
4442 cmp %g2, %g7
4469 * g7 = kpm_vbase
4478 sub %g2, %g7, %g4 /* paddr = vaddr-kpm_vbase */
4479 srax %g4, %g3, %g7 /* which alias range (r) */
4480 brz,pt %g7, 2f
4486 sllx %g7, %g3, %g5 /* g5 = r << kpm_size_shift */
4487 cmp %g7, %g1 /* if (r > v) */
4490 sub %g7, %g1, %g5 /* g5 = r - v */
4491 sllx %g5, MMU_PAGESHIFT, %g7 /* (r-v) << MMU_PAGESHIFT */
4492 add %g4, %g7, %g4 /* paddr += (r-v)<<MMU_PAGESHIFT */
4496 sllx %g7, MMU_PAGESHIFT, %g5 /* else */
4507 * g7 = clobbered
4517 * g4 g5 g7 for scratch use.
4520 PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmsp2m)
4529 ldxa [%g3 + MEMSEG_KPM_PBASE]%asi, %g7
4530 sub %g2, %g7, %g4
4540 ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
4542 ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
4551 * g6=per-CPU kpm tsbmiss area g7=kpmp_stable_sz
4554 sub %g7, 1, %g7 /* mask */
4557 and %g5, %g7, %g5 /* hashinx = y & mask */
4589 GET_MMU_D_TTARGET(%g2, %g7) /* %g2 = ttarget */
4594 * g6=per-CPU kpm tsbmiss area g7=scratch register
4598 KPMLOCK_ENTER(%g3, %g7, kpmtsbsmlock, ASI_MEM)
4601 ldsba [%g1 + KPMSPAGE_MAPPED]%asi, %g7 /* kp_mapped */
4602 andcc %g7, KPM_MAPPED_GO, %g0 /* go or no go ? */
4605 and %g7, KPM_MAPPED_MASK, %g7 /* go */
4606 cmp %g7, KPM_MAPPEDS /* cacheable ? */
4611 ldub [%g6 + KPMTSBM_FLAGS], %g7
4613 andcc %g7, KPMTSBM_TSBPHYS_FLAG, %g0
4623 TSB_LOCK_ENTRY(%g4, %g1, %g7, locked_tsb_l2)
4626 TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
4639 rdpr %tl, %g7
4640 cmp %g7, 1
4643 rdpr %tpc, %g7
4645 cmp %g7, %g6
4648 add %g7, RUNTIME_PATCH, %g7 /* must match TSTAT_TSBMISS_INSTR */
4649 wrpr %g7, %tpc
4650 add %g7, 4, %g7
4651 wrpr %g7, %tnpc
4799 mov %g2, %g7 ! TSB pointer macro clobbers tagacc
4805 GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
4808 mov %g2, %g7 ! TSB pointer macro clobbers tagacc
4814 GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)