Lines Matching refs:t_afsr

359 	volatile uint64_t t_afsr;
375 t_afsr = *ue_reg;
378 *ue_reg = t_afsr;
388 offset = ((t_afsr & SB_UE_AFSR_OFF) >> SB_UE_DW_SHIFT) * 8;
392 ecc.flt_stat = t_afsr;
430 uint64_t t_afsr = ecc->flt_stat;
436 if (t_afsr & SB_UE_AFSR_P_PIO) {
439 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
442 if (t_afsr & SB_UE_AFSR_P_DRD) {
445 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
448 if (t_afsr & SB_UE_AFSR_P_DWR) {
451 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
457 if (t_afsr & SB_UE_AFSR_S_PIO) {
460 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
463 if (t_afsr & SB_UE_AFSR_S_DRD) {
466 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
469 if (t_afsr & SB_UE_AFSR_S_DWR) {
472 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
479 (uint32_t)((t_afsr & SB_UE_AFSR_OFF) >> SB_UE_DW_SHIFT),
480 (uint32_t)((t_afsr & SB_UE_AFSR_SIZE) >> SB_UE_SIZE_SHIFT),
481 (uint32_t)((t_afsr & SB_UE_AFSR_MID) >> SB_UE_MID_SHIFT));
492 volatile uint64_t t_afsr;
499 t_afsr = *ce_reg;
502 *ce_reg = t_afsr;
512 offset = ((t_afsr & SB_UE_AFSR_OFF) >> SB_UE_DW_SHIFT) * 8;
516 ecc.flt_stat = t_afsr;
523 ecc.flt_synd = (ushort_t)((t_afsr & SB_CE_AFSR_SYND) >>
541 uint64_t t_afsr = ecc->flt_stat;
552 if (t_afsr & SB_CE_AFSR_P_PIO) {
559 cmn_err(CE_CONT, fmtstr, inst, (uint32_t)(t_afsr>>32),
560 (uint32_t)t_afsr, (uint32_t)(t_afar>>32),
563 if (t_afsr & SB_CE_AFSR_P_DRD) {
571 cmn_err(CE_CONT, fmtstr, inst, (uint32_t)(t_afsr>>32),
572 (uint32_t)t_afsr, (uint32_t)(t_afar>>32), (uint32_t)t_afar,
575 if (t_afsr & SB_CE_AFSR_P_DWR) {
582 cmn_err(CE_CONT, fmtstr, inst, (uint32_t)(t_afsr>>32),
583 (uint32_t)t_afsr, (uint32_t)(t_afar>>32), (uint32_t)t_afar,
587 if (t_afsr & SB_CE_AFSR_S_PIO) {
594 cmn_err(CE_CONT, fmtstr, inst, (uint32_t)(t_afsr>>32),
595 (uint32_t)t_afsr, (uint32_t)(t_afar>>32), (uint32_t)t_afar,
598 if (t_afsr & SB_CE_AFSR_S_DRD) {
606 cmn_err(CE_CONT, fmtstr, inst, (uint32_t)(t_afsr>>32),
607 (uint32_t)t_afsr, (uint32_t)(t_afar>>32), (uint32_t)t_afar,
610 if (t_afsr & SB_CE_AFSR_S_DWR) {
619 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
627 (uint32_t)((t_afsr & SB_CE_AFSR_SYND) >> SB_CE_SYND_SHIFT),
628 (uint32_t)((t_afsr & SB_CE_AFSR_OFF) >> SB_CE_OFFSET_SHIFT),
629 (uint32_t)((t_afsr & SB_CE_AFSR_SIZE) >> SB_CE_SIZE_SHIFT),
630 (uint32_t)((t_afsr & SB_CE_AFSR_MID) >> SB_CE_MID_SHIFT));
636 volatile uint64_t t_afsr;
643 t_afsr = *softsp->sbus_err_reg;
648 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
659 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
663 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
668 sbus_log_error(softsp, (uint64_t *)&t_afsr,
672 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
692 uint64_t t_afsr;
696 t_afsr = *pafsr;
698 if (t_afsr & SB_AFSR_P_LE) {
700 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
703 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
706 if (t_afsr & SB_AFSR_P_TO) {
713 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
718 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
721 if (t_afsr & SB_AFSR_P_BERR) {
728 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
733 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
737 if (t_afsr & SB_AFSR_S_LE) {
739 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
742 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
745 if (t_afsr & SB_AFSR_S_TO) {
748 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
753 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,
756 if (t_afsr & SB_AFSR_S_BERR) {
759 sbus_clear_intr(softsp, (uint64_t *)&t_afsr);
764 inst, (uint32_t)(t_afsr>>32), (uint32_t)t_afsr,