Lines Matching defs:sc

252 	struct arn_softc *sc = ah->ah_sc;
254 mutex_enter(&sc->sc_serial_rw);
255 ddi_put32(sc->sc_io_handle,
256 (uint32_t *)((uintptr_t)(sc->mem) + (reg_offset)), val);
257 mutex_exit(&sc->sc_serial_rw);
259 ddi_put32(sc->sc_io_handle,
260 (uint32_t *)((uintptr_t)(sc->mem) + (reg_offset)), val);
268 struct arn_softc *sc = ah->ah_sc;
270 mutex_enter(&sc->sc_serial_rw);
271 val = ddi_get32(sc->sc_io_handle,
272 (uint32_t *)((uintptr_t)(sc->mem) + (reg_offset)));
273 mutex_exit(&sc->sc_serial_rw);
275 val = ddi_get32(sc->sc_io_handle,
276 (uint32_t *)((uintptr_t)(sc->mem) + (reg_offset)));
349 struct arn_softc *sc,
365 bf->bf_daddr = sc->sc_desc_dma.cookie.dmac_address +
366 ((uintptr_t)ds - (uintptr_t)sc->sc_desc);
412 arn_desc_free(struct arn_softc *sc)
414 arn_buflist_cleanup(&sc->sc_txbuf_list);
415 arn_buflist_cleanup(&sc->sc_rxbuf_list);
417 arn_buflist_cleanup(&sc->sc_bcbuf_list);
421 arn_free_dma_mem(&sc->sc_desc_dma);
423 kmem_free((void *)sc->sc_vbufptr, sc->sc_vbuflen);
424 sc->sc_vbufptr = NULL;
428 arn_desc_alloc(dev_info_t *devinfo, struct arn_softc *sc)
443 DDI_DMA_RDWR | DDI_DMA_CONSISTENT, &sc->sc_desc_dma);
446 sc->sc_desc = (struct ath_desc *)sc->sc_desc_dma.mem_va;
448 ds = sc->sc_desc;
451 sc->sc_desc, sc->sc_desc_dma.alength,
452 sc->sc_desc_dma.cookie.dmac_address));
456 sc->sc_vbuflen = sizeof (struct ath_buf) * (ATH_TXBUF + ATH_RXBUF +
459 sc->sc_vbuflen = sizeof (struct ath_buf) * (ATH_TXBUF + ATH_RXBUF);
461 bf = (struct ath_buf *)kmem_zalloc(sc->sc_vbuflen, KM_SLEEP);
462 sc->sc_vbufptr = bf;
466 sc->tx_dmabuf_size =
468 min(sc->sc_cachelsz, (uint16_t)64));
470 sc->tx_dmabuf_size =
471 roundup(IEEE80211_MAX_MPDU_LEN, min(sc->sc_cachelsz, (uint16_t)64));
473 sc->rx_dmabuf_size =
474 roundup(IEEE80211_MAX_MPDU_LEN, min(sc->sc_cachelsz, (uint16_t)64));
477 err = arn_buflist_setup(devinfo, sc, &sc->sc_rxbuf_list, &bf, &ds,
478 ATH_RXBUF, DDI_DMA_READ | DDI_DMA_STREAMING, sc->rx_dmabuf_size);
480 arn_desc_free(sc);
485 err = arn_buflist_setup(devinfo, sc, &sc->sc_txbuf_list, &bf, &ds,
486 ATH_TXBUF, DDI_DMA_STREAMING, sc->tx_dmabuf_size);
488 arn_desc_free(sc);
494 err = arn_buflist_setup(devinfo, sc, &sc->sc_bcbuf_list, &bf, &ds,
497 arn_desc_free(sc);
506 arn_setcurmode(struct arn_softc *sc, enum wireless_mode mode)
511 for (i = 0; i < sizeof (sc->asc_rixmap); i++)
512 sc->asc_rixmap[i] = 0xff;
514 rt = sc->hw_rate_table[mode];
518 sc->asc_rixmap[rt->info[i].dot11rate &
521 sc->sc_currates = rt;
522 sc->sc_curmode = mode;
529 sc->sc_protrix = (mode == ATH9K_MODE_11G ? 1 : 0);
558 arn_update_txpow(struct arn_softc *sc)
560 struct ath_hal *ah = sc->sc_ah;
563 if (sc->sc_curtxpow != sc->sc_config.txpowlimit) {
564 (void) ath9k_hw_set_txpowerlimit(ah, sc->sc_config.txpowlimit);
567 sc->sc_curtxpow = (uint32_t)txpow;
610 arn_setup_rates(struct arn_softc *sc, uint32_t mode)
615 ieee80211com_t *ic = (ieee80211com_t *)sc;
617 /* rate_table = arn_get_ratetable(sc, mode); */
620 rate_table = sc->hw_rate_table[ATH9K_MODE_11A];
623 rate_table = sc->hw_rate_table[ATH9K_MODE_11B];
626 rate_table = sc->hw_rate_table[ATH9K_MODE_11G];
630 rate_table = sc->hw_rate_table[ATH9K_MODE_11NA_HT20];
633 rate_table = sc->hw_rate_table[ATH9K_MODE_11NG_HT20];
636 rate_table = sc->hw_rate_table[ATH9K_MODE_11NA_HT40PLUS];
639 rate_table = sc->hw_rate_table[ATH9K_MODE_11NA_HT40MINUS];
642 rate_table = sc->hw_rate_table[ATH9K_MODE_11NG_HT40PLUS];
645 rate_table = sc->hw_rate_table[ATH9K_MODE_11NG_HT40MINUS];
676 arn_setup_channels(struct arn_softc *sc)
678 struct ath_hal *ah = sc->sc_ah;
679 ieee80211com_t *ic = (ieee80211com_t *)sc;
743 sc->sc_have11g = 1;
801 arn_chan_change(struct arn_softc *sc, struct ieee80211_channel *chan)
803 struct ieee80211com *ic = &sc->sc_isc;
825 if (wlmode != sc->sc_curmode)
826 arn_setcurmode(sc, wlmode);
836 arn_set_channel(struct arn_softc *sc, struct ath9k_channel *hchan)
838 struct ath_hal *ah = sc->sc_ah;
839 ieee80211com_t *ic = &sc->sc_isc;
845 if (sc->sc_flags & SC_OP_INVALID)
848 if (hchan->channel != sc->sc_ah->ah_curchan->channel ||
849 hchan->channelFlags != sc->sc_ah->ah_curchan->channelFlags ||
850 (sc->sc_flags & SC_OP_CHAINMASK_UPDATE) ||
851 (sc->sc_flags & SC_OP_FULL_RESET)) {
864 arn_draintxq(sc, B_FALSE); /* clear pending tx frames */
865 stopped = arn_stoprecv(sc); /* turn off frame recv */
873 if (!stopped || (sc->sc_flags & SC_OP_FULL_RESET))
878 sc->sc_ah->ah_curchan->channel,
879 hchan->channel, hchan->channelFlags, sc->tx_chan_width));
881 if (!ath9k_hw_reset(ah, hchan, sc->tx_chan_width,
882 sc->sc_tx_chainmask, sc->sc_rx_chainmask,
883 sc->sc_ht_extprotspacing, fastcc, &status)) {
893 sc->sc_curchan = *hchan;
895 sc->sc_flags &= ~SC_OP_CHAINMASK_UPDATE;
896 sc->sc_flags &= ~SC_OP_FULL_RESET;
898 if (arn_startrecv(sc) != 0) {
913 if (curmode != sc->sc_curmode)
914 arn_setcurmode(sc, arn_chan2mode(hchan));
916 arn_update_txpow(sc);
918 (void) ath9k_hw_set_interrupts(ah, sc->sc_imask);
935 struct arn_softc *sc = (struct arn_softc *)ic;
936 struct ath_hal *ah = sc->sc_ah;
951 if ((timestamp - sc->sc_ani.sc_longcal_timer) >= ATH_LONG_CALINTERVAL) {
955 sc->sc_ani.sc_longcal_timer = timestamp;
959 if (!sc->sc_ani.sc_caldone) {
960 if ((timestamp - sc->sc_ani.sc_shortcal_timer) >=
966 sc->sc_ani.sc_shortcal_timer = timestamp;
967 sc->sc_ani.sc_resetcal_timer = timestamp;
970 if ((timestamp - sc->sc_ani.sc_resetcal_timer) >=
973 &sc->sc_ani.sc_caldone);
974 if (sc->sc_ani.sc_caldone)
975 sc->sc_ani.sc_resetcal_timer = timestamp;
980 if ((timestamp - sc->sc_ani.sc_checkani_timer) >=
983 sc->sc_ani.sc_checkani_timer = timestamp;
990 ath9k_hw_ani_monitor(ah, &sc->sc_halstats,
998 sc->sc_rx_chainmask, longcal, &iscaldone)) {
1000 sc->sc_ani.sc_noise_floor =
1009 sc->sc_ani.sc_noise_floor));
1017 sc->sc_ani.sc_caldone = iscaldone;
1028 if (sc->sc_ah->ah_config.enable_ani)
1032 if (!sc->sc_ani.sc_caldone)
1036 sc->sc_scan_timer = 0;
1037 sc->sc_scan_timer = timeout(arn_ani_calibrate, (void *)sc,
1042 arn_stop_caltimer(struct arn_softc *sc)
1046 while ((sc->sc_cal_timer != 0) && (tmp_id != sc->sc_cal_timer)) {
1047 tmp_id = sc->sc_cal_timer;
1050 sc->sc_cal_timer = 0;
1057 struct arn_softc *sc = (struct arn_softc *)arg;
1058 struct ath_hal *ah = sc->sc_ah;
1060 ieee80211com_t *ic = (ieee80211com_t *)sc;
1062 ARN_LOCK(sc);
1064 if (sc->sc_flags & SC_OP_INVALID) {
1070 ARN_UNLOCK(sc);
1074 ARN_UNLOCK(sc);
1086 status &= sc->sc_imask; /* discard unasked-for bits */
1093 ARN_UNLOCK(sc);
1097 sc->sc_intrstatus = status;
1118 sc->sc_rxlink = NULL;
1130 sc->sc_rx_pend = 1;
1131 ddi_trigger_softintr(sc->sc_softint_id);
1136 if (ddi_taskq_dispatch(sc->sc_tq,
1137 arn_tx_int_proc, sc, DDI_NOSLEEP) !=
1156 ath9k_hw_procmibevent(ah, &sc->sc_halstats);
1157 (void) ath9k_hw_set_interrupts(ah, sc->sc_imask);
1185 if (ddi_taskq_dispatch(sc->sc_tq, arn_bmiss_proc,
1186 sc, DDI_NOSLEEP) != DDI_SUCCESS) {
1196 ARN_UNLOCK(sc);
1219 ARN_UNLOCK(sc);
1224 arn_get_channel(struct arn_softc *sc, struct ieee80211_channel *chan)
1228 for (i = 0; i < sc->sc_ah->ah_nchan; i++) {
1229 if (sc->sc_ah->ah_channels[i].channel == chan->ich_freq)
1239 struct arn_softc *sc = (struct arn_softc *)ic;
1240 struct ath_hal *ah = sc->sc_ah;
1245 arn_draintxq(sc, 0);
1246 (void) arn_stoprecv(sc);
1248 if (!ath9k_hw_reset(ah, sc->sc_ah->ah_curchan, sc->tx_chan_width,
1249 sc->sc_tx_chainmask, sc->sc_rx_chainmask,
1250 sc->sc_ht_extprotspacing, B_FALSE, &status)) {
1256 if (arn_startrecv(sc) != 0)
1265 arn_setcurmode(sc, arn_chan2mode(sc->sc_ah->ah_curchan));
1267 arn_update_txpow(sc);
1269 if (sc->sc_flags & SC_OP_BEACONS)
1270 arn_beacon_config(sc); /* restart beacons */
1272 (void) ath9k_hw_set_interrupts(ah, sc->sc_imask);
1278 arn_get_hal_qnum(uint16_t queue, struct arn_softc *sc)
1284 qnum = sc->sc_haltype2q[ATH9K_WME_AC_VO];
1287 qnum = sc->sc_haltype2q[ATH9K_WME_AC_VI];
1290 qnum = sc->sc_haltype2q[ATH9K_WME_AC_BE];
1293 qnum = sc->sc_haltype2q[ATH9K_WME_AC_BK];
1296 qnum = sc->sc_haltype2q[ATH9K_WME_AC_BE];
1366 struct arn_softc *sc = (struct arn_softc *)ic;
1368 sc->sc_scan_timer = 0;
1370 sc->sc_scan_timer = timeout(arn_next_scan, (void *)sc,
1377 arn_stop_scantimer(struct arn_softc *sc)
1381 while ((sc->sc_scan_timer != 0) && (tmp_id != sc->sc_scan_timer)) {
1382 tmp_id = sc->sc_scan_timer;
1385 sc->sc_scan_timer = 0;
1391 struct arn_softc *sc = (struct arn_softc *)ic;
1392 struct ath_hal *ah = sc->sc_ah;
1403 if (sc->sc_flags & SC_OP_INVALID)
1410 ARN_LOCK(sc);
1413 arn_stop_scantimer(sc);
1415 arn_stop_caltimer(sc);
1420 sc->sc_imask &= ~(ATH9K_INT_SWBA | ATH9K_INT_BMISS);
1425 (ah, sc->sc_imask &~ ATH9K_INT_GLOBAL);
1429 (void) ath9k_hw_stoptxdma(ah, sc->sc_beaconq);
1430 arn_beacon_return(sc);
1433 ARN_UNLOCK(sc);
1439 pos = arn_get_channel(sc, ic->ic_curchan);
1445 ARN_UNLOCK(sc);
1450 arn_update_chainmask(sc);
1451 sc->tx_chan_width = ATH9K_HT_MACMODE_2040;
1453 sc->tx_chan_width = ATH9K_HT_MACMODE_20;
1455 sc->sc_ah->ah_channels[pos].chanmode =
1457 channel = &sc->sc_ah->ah_channels[pos];
1460 ARN_UNLOCK(sc);
1463 error = arn_set_channel(sc, channel);
1466 ARN_UNLOCK(sc);
1476 rfilt = arn_calcrxfilter(sc);
1507 (void) ath9k_hw_stoptxdma(ah, sc->sc_beaconq);
1508 arn_beacon_return(sc);
1509 error = arn_beacon_alloc(sc, in);
1511 ARN_UNLOCK(sc);
1522 sc->sc_bsync = 1;
1524 arn_beacon_config(sc);
1536 sc->sc_bsync = 1;
1539 arn_beacon_config(sc);
1541 sc->sc_halstats.ns_avgbrssi =
1543 sc->sc_halstats.ns_avgrssi =
1545 sc->sc_halstats.ns_avgtxrssi =
1547 sc->sc_halstats.ns_avgtxrate =
1558 sc->sc_imask &= ~(ATH9K_INT_SWBA | ATH9K_INT_BMISS);
1559 (void) ath9k_hw_set_interrupts(ah, sc->sc_imask);
1565 arn_rate_ctl_reset(sc, nstate);
1567 ARN_UNLOCK(sc);
1572 error = sc->sc_newstate(ic, nstate, arg);
1579 ASSERT(sc->sc_cal_timer == 0);
1580 sc->sc_cal_timer = timeout(arn_ani_calibrate, (void *)sc,
1584 /* ASSERT(sc->sc_scan_timer == 0); */
1585 if (sc->sc_scan_timer != 0) {
1586 (void) untimeout(sc->sc_scan_timer);
1587 sc->sc_scan_timer = 0;
1589 sc->sc_scan_timer = timeout(arn_next_scan, (void *)sc,
1600 struct arn_softc *sc = arg;
1601 ieee80211com_t *ic = &sc->sc_isc;
1604 ARN_LOCK(sc);
1606 if (sc->sc_flags & SC_OP_INVALID) {
1607 ARN_UNLOCK(sc);
1618 sc->sc_stats.ast_rate_calls ++;
1623 arn_rate_ctl, sc);
1635 ARN_UNLOCK(sc);
1643 ARN_UNLOCK(sc);
1656 struct arn_softc *sc = (struct arn_softc *)ic;
1663 arn_rate_update(sc, &an->an_node, 0);
1667 if (sc->sc_flags & SC_OP_TXAGGR) {
1668 arn_tx_node_init(sc, an);
1681 struct arn_softc *sc = (struct arn_softc *)ic;
1687 if (sc->sc_flags & SC_OP_TXAGGR)
1688 arn_tx_node_cleanup(sc, in);
1692 if (ARN_TXQ_SETUP(sc, i)) {
1693 txq = &sc->sc_txq[i];
1725 arn_key_alloc_pair(struct arn_softc *sc, ieee80211_keyix *txkeyix,
1730 ASSERT(!sc->sc_splitmic);
1731 for (i = 0; i < ARRAY_SIZE(sc->sc_keymap)/4; i++) {
1732 uint8_t b = sc->sc_keymap[i];
1737 if ((b & 1) || is_set(keyix+64, sc->sc_keymap)) {
1741 set_bit(keyix, sc->sc_keymap);
1742 set_bit(keyix+64, sc->sc_keymap);
1761 arn_key_alloc_2pair(struct arn_softc *sc, ieee80211_keyix *txkeyix,
1766 ASSERT(sc->sc_splitmic);
1767 for (i = 0; i < ARRAY_SIZE(sc->sc_keymap)/4; i++) {
1768 uint8_t b = sc->sc_keymap[i];
1780 if (is_set(keyix+32, sc->sc_keymap) ||
1781 is_set(keyix+64, sc->sc_keymap) ||
1782 is_set(keyix+32+64, sc->sc_keymap)) {
1790 set_bit(keyix, sc->sc_keymap);
1791 set_bit(keyix+64, sc->sc_keymap);
1792 set_bit(keyix+32, sc->sc_keymap);
1793 set_bit(keyix+32+64, sc->sc_keymap);
1811 arn_key_alloc_single(struct arn_softc *sc, ieee80211_keyix *txkeyix,
1817 for (i = 0; i < ARRAY_SIZE(sc->sc_keymap); i++) {
1818 uint8_t b = sc->sc_keymap[i];
1827 set_bit(keyix, sc->sc_keymap);
1851 struct arn_softc *sc = (struct arn_softc *)ic;
1862 return (arn_key_alloc_single(sc, keyix, rxkeyix));
1865 if (sc->sc_splitmic)
1866 return (arn_key_alloc_2pair(sc, keyix, rxkeyix));
1868 return (arn_key_alloc_pair(sc, keyix, rxkeyix));
1870 return (arn_key_alloc_single(sc, keyix, rxkeyix));
1880 struct arn_softc *sc = (struct arn_softc *)ic;
1881 struct ath_hal *ah = sc->sc_ah;
1893 (k->wk_flags & IEEE80211_KEY_SWMIC) == 0 && sc->sc_splitmic)
1901 clr_bit(keyix, sc->sc_keymap);
1908 clr_bit(keyix+64, sc->sc_keymap);
1909 if (sc->sc_splitmic) {
1911 clr_bit(keyix+32, sc->sc_keymap);
1913 clr_bit(keyix+32+64, sc->sc_keymap);
1926 arn_keyset_tkip(struct arn_softc *sc, const struct ieee80211_key *k,
1932 struct ath_hal *ah = sc->sc_ah;
1943 if (!sc->sc_splitmic) {
1977 struct arn_softc *sc = (struct arn_softc *)ic;
2009 return (arn_keyset_tkip(sc, k, &hk, mac));
2011 return (ath9k_hw_set_keycache_entry(sc->sc_ah,
2031 arn_open(struct arn_softc *sc)
2033 ieee80211com_t *ic = (ieee80211com_t *)sc;
2038 ARN_LOCK_ASSERT(sc);
2040 pos = arn_get_channel(sc, curchan);
2048 sc->tx_chan_width = ATH9K_HT_MACMODE_20;
2050 if (sc->sc_curmode == ATH9K_MODE_11A) {
2051 sc->sc_ah->ah_channels[pos].chanmode = CHANNEL_A;
2053 sc->sc_ah->ah_channels[pos].chanmode = CHANNEL_G;
2056 init_channel = &sc->sc_ah->ah_channels[pos];
2059 ath9k_hw_configpcipowersave(sc->sc_ah, 0);
2068 if (!ath9k_hw_reset(sc->sc_ah, init_channel,
2069 sc->tx_chan_width, sc->sc_tx_chainmask,
2070 sc->sc_rx_chainmask, sc->sc_ht_extprotspacing,
2085 arn_update_txpow(sc);
2094 if (arn_startrecv(sc) != 0) {
2102 sc->sc_imask = ATH9K_INT_RX | ATH9K_INT_TX |
2106 if (sc->sc_ah->ah_caps.hw_caps & ATH9K_HW_CAP_GTT)
2107 sc->sc_imask |= ATH9K_INT_GTT;
2111 if (sc->sc_ah->ah_caps.hw_caps & ATH9K_HW_CAP_HT)
2112 sc->sc_imask |= ATH9K_INT_CST;
2120 if (ath9k_hw_phycounters(sc->sc_ah) &&
2121 ((sc->sc_ah->ah_opmode == ATH9K_M_STA) ||
2122 (sc->sc_ah->ah_opmode == ATH9K_M_IBSS)))
2123 sc->sc_imask |= ATH9K_INT_MIB;
2132 if ((sc->sc_ah->ah_caps.hw_caps & ATH9K_HW_CAP_ENHANCEDPM) &&
2133 (sc->sc_ah->ah_opmode == ATH9K_M_STA) &&
2134 !sc->sc_config.swBeaconProcess)
2135 sc->sc_imask |= ATH9K_INT_TIM;
2137 if (arn_chan2mode(init_channel) != sc->sc_curmode)
2138 arn_setcurmode(sc, arn_chan2mode(init_channel));
2141 __func__, sc->sc_curmode));
2143 sc->sc_isrunning = 1;
2146 sc->sc_imask &= ~(ATH9K_INT_SWBA | ATH9K_INT_BMISS);
2147 (void) ath9k_hw_set_interrupts(sc->sc_ah, sc->sc_imask);
2156 arn_close(struct arn_softc *sc)
2158 ieee80211com_t *ic = (ieee80211com_t *)sc;
2159 struct ath_hal *ah = sc->sc_ah;
2161 ARN_LOCK_ASSERT(sc);
2163 if (!sc->sc_isrunning)
2171 ARN_UNLOCK(sc);
2174 ARN_LOCK(sc);
2182 if (!(sc->sc_flags & SC_OP_INVALID)) {
2183 arn_draintxq(sc, 0);
2184 (void) arn_stoprecv(sc);
2187 sc->sc_rxlink = NULL;
2190 sc->sc_isrunning = 0;
2199 struct arn_softc *sc = arg;
2200 ieee80211com_t *ic = (ieee80211com_t *)sc;
2204 ARN_LOCK(sc);
2213 *val = sc->sc_stats.ast_tx_nobuf +
2214 sc->sc_stats.ast_tx_nobufmgt;
2217 *val = sc->sc_stats.ast_rx_tooshort;
2233 *val = sc->sc_stats.ast_tx_fifoerr +
2234 sc->sc_stats.ast_tx_xretries +
2235 sc->sc_stats.ast_tx_discard;
2238 *val = sc->sc_stats.ast_tx_xretries;
2241 *val = sc->sc_stats.ast_rx_crcerr;
2244 *val = sc->sc_stats.ast_rx_badcrypt;
2254 ARN_UNLOCK(sc);
2257 ARN_UNLOCK(sc);
2260 ARN_UNLOCK(sc);
2268 struct arn_softc *sc = arg;
2271 ARN_LOCK(sc);
2278 arn_close(sc);
2280 if ((err = arn_open(sc)) != 0) {
2281 ARN_UNLOCK(sc);
2286 sc->sc_flags &= ~SC_OP_INVALID;
2288 ARN_UNLOCK(sc);
2296 struct arn_softc *sc = arg;
2298 ARN_LOCK(sc);
2299 arn_close(sc);
2302 (void) ath9k_hw_disable(sc->sc_ah);
2303 ath9k_hw_configpcipowersave(sc->sc_ah, 1);
2306 sc->sc_flags |= SC_OP_INVALID;
2307 ARN_UNLOCK(sc);
2313 struct arn_softc *sc = arg;
2314 struct ath_hal *ah = sc->sc_ah;
2317 ARN_LOCK(sc);
2324 sc->sc_promisc = on;
2327 ARN_UNLOCK(sc);
2335 struct arn_softc *sc = arg;
2336 struct ath_hal *ah = sc->sc_ah;
2339 uint32_t *mfilt = sc->sc_mcast_hash;
2341 ARN_LOCK(sc);
2353 sc->sc_mcast_refs[pos]++;
2356 if (--sc->sc_mcast_refs[pos] == 0)
2361 ARN_UNLOCK(sc);
2368 struct arn_softc *sc = arg;
2369 struct ath_hal *ah = sc->sc_ah;
2370 ieee80211com_t *ic = (ieee80211com_t *)sc;
2377 ARN_LOCK(sc);
2378 IEEE80211_ADDR_COPY(sc->sc_isc.ic_macaddr, macaddr);
2379 (void) ath9k_hw_setmac(ah, sc->sc_isc.ic_macaddr);
2381 ARN_UNLOCK(sc);
2388 struct arn_softc *sc = arg;
2391 ieee80211com_t *ic = (ieee80211com_t *)sc;
2401 sc->sc_stats.ast_tx_discard++;
2428 struct arn_softc *sc = arg;
2431 err = ieee80211_ioctl(&sc->sc_isc, wq, mp);
2433 ARN_LOCK(sc);
2435 if (!(sc->sc_flags & SC_OP_INVALID)) {
2436 ARN_UNLOCK(sc);
2438 (void) arn_m_start(sc);
2440 (void) ieee80211_new_state(&sc->sc_isc,
2442 ARN_LOCK(sc);
2445 ARN_UNLOCK(sc);
2452 struct arn_softc *sc = arg;
2455 err = ieee80211_setprop(&sc->sc_isc, pr_name, wldp_pr_num,
2458 ARN_LOCK(sc);
2461 if (!(sc->sc_flags & SC_OP_INVALID)) {
2462 ARN_UNLOCK(sc);
2463 (void) arn_m_start(sc);
2464 (void) ieee80211_new_state(&sc->sc_isc,
2466 ARN_LOCK(sc);
2471 ARN_UNLOCK(sc);
2481 struct arn_softc *sc = arg;
2484 err = ieee80211_getprop(&sc->sc_isc, pr_name, wldp_pr_num,
2494 struct arn_softc *sc = arg;
2496 ieee80211_propinfo(&sc->sc_isc, pr_name, wldp_pr_num, prh);
2501 arn_pci_config_cachesize(struct arn_softc *sc)
2509 csz = pci_config_get8(sc->sc_cfg_handle, PCI_CONF_CACHE_LINESZ);
2517 pci_config_put8(sc->sc_cfg_handle, PCI_CONF_CACHE_LINESZ,
2520 sc->sc_cachelsz = csz << 2;
2524 arn_pci_setup(struct arn_softc *sc)
2531 ASSERT(sc != NULL);
2532 command = pci_config_get16(sc->sc_cfg_handle, PCI_CONF_COMM);
2534 pci_config_put16(sc->sc_cfg_handle, PCI_CONF_COMM, command);
2535 command = pci_config_get16(sc->sc_cfg_handle, PCI_CONF_COMM);
2553 arn_get_hw_encap(struct arn_softc *sc)
2558 ic = (ieee80211com_t *)sc;
2559 ah = sc->sc_ah;
2579 arn_setup_ht_cap(struct arn_softc *sc)
2586 arn_ht_conf *ht_info = &sc->sc_ht_conf;
2600 rx_streams = ISP2(sc->sc_ah->ah_caps.rx_chainmask) ? 1 : 2;
2609 arn_overwrite_11n_rateset(struct arn_softc *sc)
2611 uint8_t *ht_rs = sc->sc_ht_conf.rx_mcs_mask;
2648 arn_tx_queue_update(struct arn_softc *sc, int ac)
2652 ieee80211com_t *ic = (ieee80211com_t *)sc;
2655 struct ath_hal *ah = sc->sc_ah;
2658 txq = &sc->sc_txq[arn_get_hal_qnum(ac, sc)];
2722 struct arn_softc *sc = (struct arn_softc *)ic;
2725 return (!arn_tx_queue_update(sc, WME_AC_BE) ||
2726 !arn_tx_queue_update(sc, WME_AC_BK) ||
2727 !arn_tx_queue_update(sc, WME_AC_VI) ||
2728 !arn_tx_queue_update(sc, WME_AC_VO) ? EIO : 0);
2737 arn_update_chainmask(struct arn_softc *sc)
2740 sc->sc_flags |= SC_OP_CHAINMASK_UPDATE;
2742 is_ht = sc->sc_ht_conf.ht_supported;
2744 sc->sc_tx_chainmask = sc->sc_ah->ah_caps.tx_chainmask;
2745 sc->sc_rx_chainmask = sc->sc_ah->ah_caps.rx_chainmask;
2747 sc->sc_tx_chainmask = 1;
2748 sc->sc_rx_chainmask = 1;
2753 sc->sc_tx_chainmask, sc->sc_rx_chainmask));
2759 struct arn_softc *sc;
2762 sc = ddi_get_soft_state(arn_soft_state_p, ddi_get_instance(devinfo));
2763 if (sc == NULL) {
2769 ARN_LOCK(sc);
2774 if (arn_pci_setup(sc) != 0) {
2777 ARN_UNLOCK(sc);
2781 if (!(sc->sc_flags & SC_OP_INVALID))
2782 ret = arn_open(sc);
2783 ARN_UNLOCK(sc);
2791 struct arn_softc *sc;
2821 sc = ddi_get_soft_state(arn_soft_state_p, ddi_get_instance(devinfo));
2822 ic = (ieee80211com_t *)sc;
2823 sc->sc_dev = devinfo;
2825 mutex_init(&sc->sc_genlock, NULL, MUTEX_DRIVER, NULL);
2826 mutex_init(&sc->sc_serial_rw, NULL, MUTEX_DRIVER, NULL);
2827 mutex_init(&sc->sc_txbuflock, NULL, MUTEX_DRIVER, NULL);
2828 mutex_init(&sc->sc_rxbuflock, NULL, MUTEX_DRIVER, NULL);
2829 mutex_init(&sc->sc_resched_lock, NULL, MUTEX_DRIVER, NULL);
2831 mutex_init(&sc->sc_bcbuflock, NULL, MUTEX_DRIVER, NULL);
2834 sc->sc_flags |= SC_OP_INVALID;
2836 err = pci_config_setup(devinfo, &sc->sc_cfg_handle);
2843 if (arn_pci_setup(sc) != 0)
2847 arn_pci_config_cachesize(sc);
2849 vendor_id = pci_config_get16(sc->sc_cfg_handle, PCI_CONF_VENID);
2850 device_id = pci_config_get16(sc->sc_cfg_handle, PCI_CONF_DEVID);
2854 pci_config_get8(sc->sc_cfg_handle, PCI_CONF_CACHE_LINESZ)));
2856 pci_config_put8(sc->sc_cfg_handle, PCI_CONF_LATENCY_TIMER, 0xa8);
2857 val = pci_config_get32(sc->sc_cfg_handle, 0x40);
2859 pci_config_put32(sc->sc_cfg_handle, 0x40, val & 0xffff00ff);
2862 &sc->mem, 0, 0, &arn_reg_accattr, &sc->sc_io_handle);
2864 "regs map1 = %x err=%d\n", sc->mem, err));
2871 ah = ath9k_hw_attach(device_id, sc, sc->mem, &status);
2878 sc->sc_ah = ah;
2883 sc->sc_keymax = ah->ah_caps.keycache_size;
2884 if (sc->sc_keymax > ATH_KEYMAX) {
2887 ATH_KEYMAX, sc->sc_keymax));
2888 sc->sc_keymax = ATH_KEYMAX;
2895 for (i = 0; i < sc->sc_keymax; i++)
2904 set_bit(i, sc->sc_keymap);
2905 set_bit(i + 32, sc->sc_keymap);
2906 set_bit(i + 64, sc->sc_keymap);
2907 set_bit(i + 32 + 64, sc->sc_keymap);
2911 err = arn_setup_channels(sc);
2919 sc->sc_ah->ah_opmode = ATH9K_M_STA;
2922 arn_rate_attach(sc);
2923 arn_setup_rates(sc, IEEE80211_MODE_11A);
2924 arn_setup_rates(sc, IEEE80211_MODE_11B);
2925 arn_setup_rates(sc, IEEE80211_MODE_11G);
2928 arn_setcurmode(sc, ATH9K_MODE_11G);
2931 if (sc->sc_have11g)
2936 sc->sc_mrretry = 1;
2937 sc->sc_config.ath_aggr_prot = 0;
2940 err = arn_desc_alloc(devinfo, sc);
2947 if ((sc->sc_tq = ddi_taskq_create(devinfo, "ath_taskq", 1,
2961 sc->sc_beaconq = arn_beaconq_setup(ah);
2962 if (sc->sc_beaconq == (-1)) {
2969 sc->sc_cabq = arn_txq_setup(sc, ATH9K_TX_QUEUE_CAB, 0);
2970 if (sc->sc_cabq == NULL) {
2976 sc->sc_config.cabqReadytime = ATH_CABQ_READY_TIME;
2977 ath_cabq_update(sc);
2980 for (i = 0; i < ARRAY_SIZE(sc->sc_haltype2q); i++)
2981 sc->sc_haltype2q[i] = -1;
2985 if (!arn_tx_setup(sc, ATH9K_WME_AC_BK)) {
2990 if (!arn_tx_setup(sc, ATH9K_WME_AC_BE)) {
2995 if (!arn_tx_setup(sc, ATH9K_WME_AC_VI)) {
3000 if (!arn_tx_setup(sc, ATH9K_WME_AC_VO)) {
3011 sc->sc_ani.sc_noise_floor = ATH_DEFAULT_NOISE_FLOOR;
3022 (void) ath9k_hw_setcapability(sc->sc_ah, ATH9K_CAP_TKIP_MIC,
3027 arn_get_hw_encap(sc);
3041 sc->sc_splitmic = 1;
3048 sc->sc_config.txpowlimit = ATH_TXPOWER_MAX;
3049 sc->sc_config.txpowlimit_override = 0;
3053 sc->sc_flags |= SC_OP_TXAGGR;
3054 sc->sc_flags |= SC_OP_RXAGGR;
3055 arn_setup_ht_cap(sc);
3056 arn_overwrite_11n_rateset(sc);
3059 sc->sc_tx_chainmask = 1;
3060 sc->sc_rx_chainmask = 1;
3063 sc->sc_tx_chainmask, sc->sc_rx_chainmask));
3065 /* arn_update_chainmask(sc); */
3068 sc->sc_defant = ath9k_hw_getdefantenna(ah);
3070 ath9k_hw_getmac(ah, sc->sc_myaddr);
3072 ath9k_hw_getbssidmask(ah, sc->sc_bssidmask);
3073 ATH_SET_VAP_BSSID_MASK(sc->sc_bssidmask);
3074 (void) ath9k_hw_setbssidmask(ah, sc->sc_bssidmask);
3078 sc->sc_slottime = ATH9K_SLOT_TIME_9;
3082 for (i = 0; i < ARRAY_SIZE(sc->sc_bslot); i++)
3083 sc->sc_bslot[i] = ATH_IF_ID_ANY;
3086 sc->sc_config.swBeaconProcess = 1;
3093 if (sc->sc_ht_conf.ht_supported) {
3132 if (sc->sc_ht_conf.ht_supported) {
3133 sc->sc_recv_action = ic->ic_recv_action;
3135 // sc->sc_send_action = ic->ic_send_action;
3138 ic->ic_ampdu_rxmax = sc->sc_ht_conf.ampdu_factor;
3139 ic->ic_ampdu_density = sc->sc_ht_conf.ampdu_density;
3144 sc->sc_newstate = ic->ic_newstate;
3147 sc->sc_recv_mgmt = ic->ic_recv_mgmt;
3164 sc->sc_rx_pend = 0;
3165 (void) ath9k_hw_set_interrupts(sc->sc_ah, 0);
3167 &sc->sc_softint_id, NULL, 0, arn_softint_handler, (caddr_t)sc);
3174 if (ddi_get_iblock_cookie(devinfo, 0, &sc->sc_iblock)
3182 (caddr_t)sc) != DDI_SUCCESS) {
3209 macp->m_driver = sc;
3238 sc->sc_promisc = B_FALSE;
3239 bzero(sc->sc_mcast_refs, sizeof (sc->sc_mcast_refs));
3240 bzero(sc->sc_mcast_hash, sizeof (sc->sc_mcast_hash));
3249 (unsigned long)sc->mem));
3252 sc->sc_flags |= SC_OP_INVALID;
3253 sc->sc_isrunning = 0;
3258 ddi_remove_intr(devinfo, 0, sc->sc_iblock);
3260 ddi_remove_softintr(sc->sc_softint_id);
3264 arn_desc_free(sc);
3265 if (sc->sc_tq)
3266 ddi_taskq_destroy(sc->sc_tq);
3270 ddi_regs_map_free(&sc->sc_io_handle);
3272 pci_config_teardown(&sc->sc_cfg_handle);
3274 sc->sc_flags |= SC_OP_INVALID;
3276 mutex_destroy(&sc->sc_txbuflock);
3278 if (ARN_TXQ_SETUP(sc, i)) {
3280 mutex_destroy(&((&sc->sc_txq[i])->axq_lock));
3283 mutex_destroy(&sc->sc_rxbuflock);
3284 mutex_destroy(&sc->sc_serial_rw);
3285 mutex_destroy(&sc->sc_genlock);
3286 mutex_destroy(&sc->sc_resched_lock);
3288 mutex_destroy(&sc->sc_bcbuflock);
3301 arn_suspend(struct arn_softc *sc)
3303 ARN_LOCK(sc);
3304 arn_close(sc);
3305 ARN_UNLOCK(sc);
3313 struct arn_softc *sc;
3316 sc = ddi_get_soft_state(arn_soft_state_p, ddi_get_instance(devinfo));
3317 ASSERT(sc != NULL);
3324 return (arn_suspend(sc));
3330 if (mac_disable(sc->sc_isc.ic_mach) != 0)
3333 arn_stop_scantimer(sc);
3334 arn_stop_caltimer(sc);
3337 (void) ath9k_hw_set_interrupts(sc->sc_ah, 0);
3342 (void) mac_unregister(sc->sc_isc.ic_mach);
3345 ddi_remove_intr(devinfo, 0, sc->sc_iblock);
3346 ddi_remove_softintr(sc->sc_softint_id);
3359 ieee80211_detach(&sc->sc_isc);
3361 arn_desc_free(sc);
3363 ddi_taskq_destroy(sc->sc_tq);
3365 if (!(sc->sc_flags & SC_OP_INVALID))
3366 (void) ath9k_hw_setpower(sc->sc_ah, ATH9K_PM_AWAKE);
3369 mutex_destroy(&sc->sc_txbuflock);
3371 if (ARN_TXQ_SETUP(sc, i)) {
3372 arn_tx_cleanupq(sc, &sc->sc_txq[i]);
3373 mutex_destroy(&((&sc->sc_txq[i])->axq_lock));
3377 ath9k_hw_detach(sc->sc_ah);
3380 ddi_regs_map_free(&sc->sc_io_handle);
3381 pci_config_teardown(&sc->sc_cfg_handle);
3384 mutex_destroy(&sc->sc_genlock);
3385 mutex_destroy(&sc->sc_serial_rw);
3386 mutex_destroy(&sc->sc_rxbuflock);
3387 mutex_destroy(&sc->sc_resched_lock);
3389 mutex_destroy(&sc->sc_bcbuflock);
3411 struct arn_softc *sc;
3415 sc = ddi_get_soft_state(arn_soft_state_p, ddi_get_instance(devinfo));
3417 if (sc == NULL || (ah = sc->sc_ah) == NULL)
3429 if (ARN_TXQ_SETUP(sc, i))
3430 (void) ath9k_hw_stoptxdma(ah, sc->sc_txq[i].axq_qnum);