Lines Matching refs:g5
676 * %g5 = sfmmu gnum returned
683 SFMMU_MMUID_GNUM_CNUM(%g2, %g5, %g6, %g4)
706 cmp %g5, %o4
729 * %g5 = sfmmu gnum returned
734 SFMMU_MMUID_GNUM_CNUM(%g2, %g5, %g6, %g4)
750 cmp %g5, %o4
1421 MAKE_JMP_INSTR(5, %o1, %o2) ! jmp %g5
1727 USE_ALTERNATE_GLOBALS(%g5)
1728 GET_MMU_BOTH_TAGACC(%g5 /*dtag*/, %g2 /*itag*/, %g6, %g4)
1736 mov %g5, %g2
1768 USE_ALTERNATE_GLOBALS(%g5)
1769 GET_MMU_BOTH_TAGACC(%g5 /*dtag*/, %g2 /*itag*/, %g4, %g6)
1777 mov %g5, %g2
1797 USE_ALTERNATE_GLOBALS(%g5)
1798 GET_MMU_BOTH_TAGACC(%g5 /*dtag*/, %g2 /*itag*/, %g4, %g3)
1803 mov %g5, %g2
1851 rdpr %tl, %g5
1852 sub %g5, 1, %g3
1855 wrpr %g5, %tl
1862 rdpr %tl, %g5
1863 sub %g5, 1, %g3
1866 wrpr %g5, %tl
1888 * g5 = trap type, g6 = tag access reg
1892 * only use g5, g6, g7 registers after we have switched to alternate
1896 USE_ALTERNATE_GLOBALS(%g5)
1897 GET_MMU_D_TAGACC(%g6 /*dtag*/, %g5 /*scratch*/)
1905 mov T_DATA_PROT, %g5
1907 move %icc, T_DATA_MMU_MISS, %g5
1909 move %icc, T_DATA_MMU_MISS, %g5
1950 USE_ALTERNATE_GLOBALS(%g5)
2437 GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
2447 GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)
2465 GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
2472 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2482 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2489 GET_2ND_TSBE_PTR(%g7, %g1, %g3, %g4, %g5, sfmmu_uprot)
2492 GET_1ST_TSBE_PTR(%g7, %g1, %g5, sfmmu_uprot)
2521 RUNTIME_PATCH_SETX(%g4, %g5)
2527 ldda [%g1]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2532 andcc %g5, TTE_EXECPRM_INT, %g0 ! check exec bit
2536 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
2546 ldda [%g3]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2549 andcc %g5, TTE_EXECPRM_INT, %g0 ! check exec bit
2553 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
2579 KPM_TLBMISS_STAT_INCR(%g2, %g4, %g5, %g6, kpmtlbm_stat_out)
2600 ldda [%g7 + %g1]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2606 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
2607 /* trapstat expects tte in %g5 */
2648 ldda [%g7 + %g3]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
2657 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
2658 /* trapstat expects tte in %g5 */
2703 /* g4 - g5 = clobbered by PROBE_1ST_ITSB */
2722 /* g4 - g5 = clobbered by PROBE_1ST_DTSB */
2745 GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
2747 /* g4 - g5 = clobbered here */
2749 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2789 GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
2795 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2801 GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
2808 GET_1ST_TSBE_PTR(%g3, %g1, %g5, sfmmu_uitlb)
2812 GET_2ND_TSBE_PTR(%g6, %g7, %g3, %g4, %g5, sfmmu_uitlb)
2855 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2865 GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
2875 GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
2903 GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
2913 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2925 GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
2949 GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
2962 GET_1ST_TSBE_PTR(%g4, %g1, %g5, sfmmu_udtlb)
2988 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
2992 GET_2ND_TSBE_PTR(%g2, %g7, %g3, %g4, %g5, sfmmu_udtlb)
3000 /* g4 - g5 = clobbered here; %g7 still vpg_4m at this point */
3052 HAT_PERCPU_STAT32(%g6, TSBMISS_UTSBMISS, %g5)
3064 ISM_CHECK(%g2, %g6, %g3, %g4, %g5, %g7, %g1, tsb_l1, tsb_ism)
3068 * %g1 %g3, %g4, %g5, %g7 all clobbered
3076 HAT_PERCPU_STAT32(%g6, TSBMISS_KTSBMISS, %g5)
3083 MMU_PAGESHIFT64K, TTE64K, %g5, tsb_l8K, tsb_checktte,
3088 sllx %g2, TAGACC_CTX_LSHIFT, %g5
3089 brz,pn %g5, 3f
3091 and %g4, HAT_512K_FLAG, %g5
3104 brz,pn %g5, tsb_4M
3112 MMU_PAGESHIFT512K, TTE512K, %g5, tsb_l512K, tsb_checktte,
3117 sllx %g2, TAGACC_CTX_LSHIFT, %g5
3118 brz,pn %g5, 4f
3120 and %g4, HAT_4M_FLAG, %g5
3121 brz,pn %g5, tsb_32M
3129 MMU_PAGESHIFT4M, TTE4M, %g5, tsb_l4M, tsb_checktte,
3134 sllx %g2, TAGACC_CTX_LSHIFT, %g5
3136 brz,pn %g5, 6f
3138 brz,pn %g5, tsb_pagefault
3141 and %g4, HAT_32M_FLAG, %g5
3142 brz,pn %g5, tsb_256M
3150 MMU_PAGESHIFT32M, TTE32M, %g5, tsb_l32M, tsb_checktte,
3159 and %g4, HAT_256M_FLAG, %g5
3160 brz,pn %g5, tsb_shme
3168 MMU_PAGESHIFT256M, TTE256M, %g5, tsb_l256M, tsb_checktte,
3195 sllx %g2, TAGACC_CTX_LSHIFT, %g5
3196 brz,pn %g5, tsb_pagefault
3203 MMU_PAGESHIFT64K, TTE64K, %g5, tsb_shme_l8K, tsb_shme_checktte,
3209 and %g4, HAT_512K_FLAG, %g5
3210 brz,pn %g5, tsb_shme_4M
3218 MMU_PAGESHIFT512K, TTE512K, %g5, tsb_shme_l512K, tsb_shme_checktte,
3224 and %g4, HAT_4M_FLAG, %g5
3225 brz,pn %g5, tsb_shme_32M
3232 MMU_PAGESHIFT4M, TTE4M, %g5, tsb_shme_l4M, tsb_shme_checktte,
3238 and %g4, HAT_32M_FLAG, %g5
3239 brz,pn %g5, tsb_shme_256M
3247 MMU_PAGESHIFT32M, TTE32M, %g5, tsb_shme_l32M, tsb_shme_checktte,
3253 and %g4, HAT_256M_FLAG, %g5
3254 brz,pn %g5, tsb_pagefault
3262 MMU_PAGESHIFT256M, TTE256M, %g5, tsb_shme_l256M, tsb_shme_checktte,
3302 TTE_SET_REFMOD_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_refmod,
3308 ldx [%g7 + MMFSA_D_ADDR], %g5 /* load fault addr for later */
3310 mov MMU_TAG_ACCESS, %g5
3311 ldxa [%g5]ASI_DMMU, %g5
3334 TTE_SET_REF_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_ref)
3356 mov %g2, %g5 ! load the fault addr for later use
3360 mov MMU_TAG_ACCESS, %g5
3366 ldxa [%g5]ASI_DMMU, %g5
3368 ldxa [%g5]ASI_IMMU, %g5
3408 GET_3RD_TSBE_PTR(%g5, %g1, %g6, %g7)
3421 rdpr %tt, %g5
3423 cmp %g5, T_INSTR_MMU_MISS
3425 mov %g3, %g5
3427 cmp %g5, FAST_IMMU_MISS_TT
3429 mov %g3, %g5
3431 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3432 ! trapstat wants TTE in %g5
3435 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3436 ! trapstat wants TTE in %g5
3448 GET_4TH_TSBE_PTR(%g5, %g1, %g6, %g7)
3465 rdpr %tt, %g5
3467 cmp %g5, T_INSTR_MMU_MISS
3469 mov %g3, %g5
3471 cmp %g5, FAST_IMMU_MISS_TT
3473 mov %g3, %g5
3475 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3476 ! trapstat wants TTE in %g5
3479 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3480 ! trapstat wants TTE in %g5
3498 * g5 = tt
3502 rdpr %tt, %g5
3503 cmp %g5, FAST_IMMU_MISS_TT
3509 mov %g3, %g5
3513 GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 1) /* make 4M pfn offset */
3517 TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l5) /* update TSB */
3519 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3527 GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 2) /* make 4M pfn offset */
3529 or %g5, %g3, %g5 /* add 4M bits to TTE */
3533 TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l6) /* update TSB */
3535 SET_TTE4M_PN(%g5, %g7) /* add TTE4M pagesize to TTE */
3536 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3541 rdpr %tt, %g5
3566 cmp %g5, T_INSTR_MMU_MISS
3568 mov %g3, %g5 ! trapstat wants TTE in %g5
3570 cmp %g5, FAST_IMMU_MISS_TT
3572 mov %g3, %g5 ! trapstat wants TTE in %g5
3573 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3574 ! trapstat wants TTE in %g5
3577 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
3578 ! trapstat wants TTE in %g5
3595 /* g5 = pa of imap_vb_shift */
3596 sub %g4, (IMAP_ISMHAT - IMAP_VB_SHIFT), %g5
3597 lduba [%g5]ASI_MEM, %g4 /* g4 = imap_vb_shift */
3605 sub %g5, (IMAP_VB_SHIFT - IMAP_HATFLAGS), %g5
3606 lduha [%g5]ASI_MEM, %g4 /* g5 = pa of imap_hatflags */
3608 and %g4, HAT_CTX1_FLAG, %g5 /* g5 = imap_hatflags */
3609 brz,pt %g5, tsb_chk4M_ism
3611 ldub [%g6 + TSBMISS_URTTEFLAGS], %g5
3612 or %g5, HAT_CHKCTX1_FLAG, %g5
3613 stub %g5, [%g6 + TSBMISS_URTTEFLAGS]
3614 rdpr %tt, %g5
3615 SAVE_CTX1(%g5, %g3, %g1, tsb_shctxl)
3630 and %g4, HAT_4M_FLAG, %g5 /* g4 = imap_hatflags */
3631 brnz,pt %g5, tsb_ism_4M /* branch if 4M pages */
3635 and %g4, HAT_32M_FLAG, %g5 /* check default 32M next */
3636 brz,pn %g5, tsb_ism_256M
3644 TTE32M, %g5, tsb_ism_l32M, tsb_ism_32M_found, sfmmu_suspend_tl,
3655 and %g4, HAT_256M_FLAG, %g5 /* 256M is last resort */
3656 brz,a,pn %g5, ptl1_panic
3663 TTE256M, %g5, tsb_ism_l256M, tsb_ism_256M_found, sfmmu_suspend_tl,
3675 TTE4M, %g5, tsb_ism_l4M, tsb_ism_4M_found, sfmmu_suspend_tl,
3689 TTE64K, %g5, tsb_ism_l8K, tsb_ism_8K_found, sfmmu_suspend_tl,
3720 ldx [%g4 + MMFSA_I_CTX], %g5
3722 move %icc, %g5, %g4
3724 move %icc, %g5, %g4
3728 ldxa [%g4]ASI_IMMU, %g5
3729 move %icc, %g5, %g2
3731 move %icc, %g5, %g2
3735 rdpr %tl, %g5
3744 cmp %g5, 1
3778 cmp %g5, 1
3819 CPU_INDEX(%g5, %g6)
3821 sllx %g5, CPU_CORE_SHIFT, %g5
3822 add %g5, %g6, %g5
3823 lduh [%g5 + CPUC_DTRACE_FLAGS], %g6
3827 stuh %g6, [%g5 + CPUC_DTRACE_FLAGS]
3829 stx %g3, [%g5 + CPUC_DTRACE_ILLVAL]
3836 rdpr %tstate, %g5
3837 btst TSTATE_PRIV, %g5
3845 rdpr %tt, %g5
3846 cmp %g5, FAST_IMMU_MISS_TT
3851 cmp %g5, T_INSTR_MMU_MISS
3865 rdpr %tl, %g5
3866 cmp %g5, 1
3920 mov TTE64K, %g5 /* g5 = rehash # */
3938 * g5 = rehash #
3947 GET_TTE(%o0, %o4, %g1, %g2, %o5, %g4, %g6, %g5, %g3,
3981 cmp %g5, MAX_HASHCNT
3983 cmp %g5, DEFAULT_MAX_HASHCNT /* no 32/256M kernel pages */
3989 add %g5, 2, %g5
3990 cmp %g5, 3
3995 inc %g5
3996 cmp %g5, 2
4088 GET_TTE(%o0, %o4, %g3, %g4, %g1, %o5, %g6, %o1, %g5,
4099 TTETOPFN(%g3, %o0, kvaszc2pfn_l2, %g2, %g4, %g5)
4210 and %g4, KPMTSBM_ENABLE_FLAG, %g5
4211 brz,pn %g5, sfmmu_tsb_miss /* if kpm not enabled */
4218 ldx [%g6 + KPMTSBM_VEND], %g5
4219 cmp %g2, %g5
4227 lduw [%g6 + KPMTSBM_TSBMISS], %g5
4230 inc %g5
4232 st %g5, [%g6 + KPMTSBM_TSBMISS]
4234 inc %g5
4235 st %g5, [%g6 + KPMTSBM_TSBMISS]
4260 PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmp2m)
4269 ldub [%g6 + KPMTSBM_KPMP2PSHFT], %g5
4271 srlx %g2, %g5, %g4
4272 sllx %g4, %g5, %g4
4274 srlx %g4, %g5, %g4
4281 ldxa [%g3 + MEMSEG_KPM_NKPMPGS]%asi, %g5
4282 cmp %g4, %g5 /* inx - nkpmpgs */
4292 ldxa [%g3 + MEMSEG_KPM_PAGES]%asi, %g5 /* kpm_pages */
4293 add %g5, %g4, %g5 /* kp */
4297 * g2=pfn g3=mseg_pa g4=offset g5=kp g7=kpmp_table_sz
4301 srlx %g5, %g1, %g1 /* x = ksp >> kpmp_shift */
4302 add %g5, %g1, %g5 /* y = ksp + x */
4303 and %g5, %g7, %g5 /* hashinx = y & mask */
4307 * g2=pfn g3=mseg_pa g4=offset g5=hashinx
4314 * g1=kp_refcntc_pa g2=pfn g5=hashinx
4317 sllx %g5, KPMHLK_SHIFT, %g5
4318 add %g4, %g5, %g3
4326 sethi %hi(TTE_VALID_INT), %g5 /* upper part */
4327 sllx %g5, 32, %g5
4330 or %g5, %g4, %g5
4333 mov TTE4M, %g5
4334 sllx %g5, TTE_SZ_SHFT_INT, %g5
4335 or %g5, %g4, %g5 /* upper part */
4336 sllx %g5, 32, %g5
4338 or %g5, %g4, %g5
4341 or %g5, %g4, %g5 /* tte */
4347 * g1=kp_pa g2=ttarget g3=hlck_pa g4=kpmtsbp4m g5=tte g6=kpmtsbm_area
4382 TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
4384 DTLB_STUFF(%g5, %g1, %g2, %g4, %g6)
4393 * Note: TTE is expected in %g5 (allows per pagesize reporting).
4433 and %g4, KPMTSBM_ENABLE_FLAG, %g5
4434 brz,pn %g5, sfmmu_tsb_miss /* if kpm not enabled */
4444 ldx [%g6 + KPMTSBM_VEND], %g5
4445 cmp %g2, %g5
4453 lduw [%g6 + KPMTSBM_TSBMISS], %g5
4456 inc %g5
4458 st %g5, [%g6 + KPMTSBM_TSBMISS]
4460 inc %g5
4461 st %g5, [%g6 + KPMTSBM_TSBMISS]
4481 sethi %hi(vac_colors_mask), %g5
4482 ld [%g5 + %lo(vac_colors_mask)], %g5
4485 and %g1, %g5, %g1 /* g1 = v */
4486 sllx %g7, %g3, %g5 /* g5 = r << kpm_size_shift */
4489 sub %g4, %g5, %g4 /* paddr -= r << kpm_size_shift */
4490 sub %g7, %g1, %g5 /* g5 = r - v */
4491 sllx %g5, MMU_PAGESHIFT, %g7 /* (r-v) << MMU_PAGESHIFT */
4496 sllx %g7, MMU_PAGESHIFT, %g5 /* else */
4497 sub %g4, %g5, %g4 /* paddr -= r << MMU_PAGESHIFT */
4505 * g5 = clobbered
4517 * g4 g5 g7 for scratch use.
4520 PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmsp2m)
4537 ldxa [%g3 + MEMSEG_KPM_NKPMPGS]%asi, %g5
4538 cmp %g4, %g5 /* inx - nkpmpgs */
4545 ldxa [%g3 + MEMSEG_KPM_SPAGES]%asi, %g5
4546 add %g5, %g4, %g5 /* ksp */
4550 * g2=pfn g3=mseg_pa g4=inx g5=ksp
4555 sllx %g5, %g1, %g1 /* x = ksp << kpmp_shift */
4556 add %g5, %g1, %g5 /* y = ksp + x */
4557 and %g5, %g7, %g5 /* hashinx = y & mask */
4561 * g2=pfn g3=mseg_pa g4=offset g5=hashinx
4570 * g1=ksp_pa g2=pfn g5=hashinx
4574 sllx %g5, KPMSHLK_SHIFT, %g5
4575 add %g4, %g5, %g3 /* hlck_pa */
4582 sethi %hi(TTE_VALID_INT), %g5 /* upper part */
4583 sllx %g5, 32, %g5
4585 or %g5, %g4, %g5
4587 or %g5, %g4, %g5 /* tte */
4593 * g1=ksp_pa g2=ttarget g3=hlck_pa g4=ktsbp g5=tte (non-cacheable)
4608 or %g5, TTE_CV_INT, %g5 /* cacheable */
4626 TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
4628 DTLB_STUFF(%g5, %g2, %g4, %g5, %g6)
4637 * Note: TTE is expected in %g5 (allows per pagesize reporting).
4805 GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
4814 GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)
4826 GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
4833 GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)