txq              1376 dev/ic/re.c    	struct rl_txq	*txq;
txq              1383 dev/ic/re.c    		txq = &sc->rl_ldata.rl_txq[idx];
txq              1385 dev/ic/re.c    		if (txq->txq_mbuf == NULL) {
txq              1390 dev/ic/re.c    		descidx = txq->txq_descidx;
txq              1400 dev/ic/re.c    		sc->rl_ldata.rl_tx_free += txq->txq_nsegs;
txq              1402 dev/ic/re.c    		bus_dmamap_sync(sc->sc_dmat, txq->txq_dmamap,
txq              1403 dev/ic/re.c    		    0, txq->txq_dmamap->dm_mapsize, BUS_DMASYNC_POSTWRITE);
txq              1404 dev/ic/re.c    		bus_dmamap_unload(sc->sc_dmat, txq->txq_dmamap);
txq              1405 dev/ic/re.c    		m_freem(txq->txq_mbuf);
txq              1406 dev/ic/re.c    		txq->txq_mbuf = NULL;
txq              1534 dev/ic/re.c    	struct rl_txq	*txq;
txq              1568 dev/ic/re.c    	txq = &sc->rl_ldata.rl_txq[*idx];
txq              1569 dev/ic/re.c    	map = txq->txq_dmamap;
txq              1691 dev/ic/re.c    	txq->txq_mbuf = m;
txq              1692 dev/ic/re.c    	txq->txq_descidx = lastidx;
txq              1693 dev/ic/re.c    	txq->txq_nsegs = nsegs;
txq               202 dev/ic/rt2560.c 	error = rt2560_alloc_tx_ring(sc, &sc->txq, RT2560_TX_RING_COUNT);
txq               311 dev/ic/rt2560.c fail2:	rt2560_free_tx_ring(sc, &sc->txq);
txq               332 dev/ic/rt2560.c 	rt2560_free_tx_ring(sc, &sc->txq);
txq               886 dev/ic/rt2560.c 	hw = (RAL_READ(sc, RT2560_SECCSR1) - sc->txq.physaddr) /
txq               889 dev/ic/rt2560.c 	for (; sc->txq.next_encrypt != hw;) {
txq               891 dev/ic/rt2560.c 		    &sc->txq.desc[sc->txq.next_encrypt];
txq               893 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               894 dev/ic/rt2560.c 		    sc->txq.next_encrypt * RT2560_TX_DESC_SIZE,
txq               909 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               910 dev/ic/rt2560.c 		    sc->txq.next_encrypt * RT2560_TX_DESC_SIZE,
txq               914 dev/ic/rt2560.c 		    sc->txq.next_encrypt));
txq               916 dev/ic/rt2560.c 		sc->txq.next_encrypt =
txq               917 dev/ic/rt2560.c 		    (sc->txq.next_encrypt + 1) % RT2560_TX_RING_COUNT;
txq               931 dev/ic/rt2560.c 		struct rt2560_tx_desc *desc = &sc->txq.desc[sc->txq.next];
txq               932 dev/ic/rt2560.c 		struct rt2560_tx_data *data = &sc->txq.data[sc->txq.next];
txq               935 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               936 dev/ic/rt2560.c 		    sc->txq.next * RT2560_TX_DESC_SIZE, RT2560_TX_DESC_SIZE,
txq               988 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               989 dev/ic/rt2560.c 		    sc->txq.next * RT2560_TX_DESC_SIZE, RT2560_TX_DESC_SIZE,
txq               992 dev/ic/rt2560.c 		DPRINTFN(15, ("tx done idx=%u\n", sc->txq.next));
txq               994 dev/ic/rt2560.c 		sc->txq.queued--;
txq               995 dev/ic/rt2560.c 		sc->txq.next = (sc->txq.next + 1) % RT2560_TX_RING_COUNT;
txq              1699 dev/ic/rt2560.c 	struct rt2560_tx_ring *txq = &sc->txq;
txq              1782 dev/ic/rt2560.c 		desc = &txq->desc[txq->cur_encrypt];
txq              1783 dev/ic/rt2560.c 		data = &txq->data[txq->cur_encrypt];
txq              1808 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, txq->map,
txq              1809 dev/ic/rt2560.c 		    txq->cur_encrypt * RT2560_TX_DESC_SIZE,
txq              1812 dev/ic/rt2560.c 		txq->queued++;
txq              1813 dev/ic/rt2560.c 		if (++txq->cur_encrypt >= txq->count)
txq              1814 dev/ic/rt2560.c 			txq->cur_encrypt = 0;
txq              1819 dev/ic/rt2560.c 	data = &txq->data[txq->cur_encrypt];
txq              1820 dev/ic/rt2560.c 	desc = &txq->desc[txq->cur_encrypt];
txq              1903 dev/ic/rt2560.c 	bus_dmamap_sync(sc->sc_dmat, txq->map,
txq              1904 dev/ic/rt2560.c 	    txq->cur_encrypt * RT2560_TX_DESC_SIZE, RT2560_TX_DESC_SIZE,
txq              1908 dev/ic/rt2560.c 	    m0->m_pkthdr.len, txq->cur_encrypt, rate));
txq              1911 dev/ic/rt2560.c 	txq->queued++;
txq              1912 dev/ic/rt2560.c 	if (++txq->cur_encrypt >= txq->count)
txq              1913 dev/ic/rt2560.c 		txq->cur_encrypt = 0;
txq              1958 dev/ic/rt2560.c 			if (sc->txq.queued >= RT2560_TX_RING_COUNT - 1) {
txq              2612 dev/ic/rt2560.c 	RAL_WRITE(sc, RT2560_TXCSR3, sc->txq.physaddr);
txq              2713 dev/ic/rt2560.c 	rt2560_reset_tx_ring(sc, &sc->txq);
txq               126 dev/ic/rt2560var.h 	struct rt2560_tx_ring	txq;
txq               221 dev/ic/rt2661.c 		error = rt2661_alloc_tx_ring(sc, &sc->txq[ac],
txq               345 dev/ic/rt2661.c 		rt2661_free_tx_ring(sc, &sc->txq[ac]);
txq               368 dev/ic/rt2661.c 		rt2661_free_tx_ring(sc, &sc->txq[ac]);
txq               897 dev/ic/rt2661.c 	struct rt2661_tx_ring *txq;
txq               909 dev/ic/rt2661.c 		txq = (qid <= 3) ? &sc->txq[qid] : &sc->mgtq;
txq               912 dev/ic/rt2661.c 		data = &txq->data[txq->stat];
txq               949 dev/ic/rt2661.c 		DPRINTFN(15, ("tx done q=%d idx=%u\n", qid, txq->stat));
txq               951 dev/ic/rt2661.c 		txq->queued--;
txq               952 dev/ic/rt2661.c 		if (++txq->stat >= txq->count)	/* faster than % count */
txq               953 dev/ic/rt2661.c 			txq->stat = 0;
txq               962 dev/ic/rt2661.c rt2661_tx_dma_intr(struct rt2661_softc *sc, struct rt2661_tx_ring *txq)
txq               965 dev/ic/rt2661.c 		struct rt2661_tx_desc *desc = &txq->desc[txq->next];
txq               966 dev/ic/rt2661.c 		struct rt2661_tx_data *data = &txq->data[txq->next];
txq               968 dev/ic/rt2661.c 		bus_dmamap_sync(sc->sc_dmat, txq->map,
txq               969 dev/ic/rt2661.c 		    txq->next * RT2661_TX_DESC_SIZE, RT2661_TX_DESC_SIZE,
txq               986 dev/ic/rt2661.c 		bus_dmamap_sync(sc->sc_dmat, txq->map,
txq               987 dev/ic/rt2661.c 		    txq->next * RT2661_TX_DESC_SIZE, RT2661_TX_DESC_SIZE,
txq               990 dev/ic/rt2661.c 		DPRINTFN(15, ("tx dma done q=%p idx=%u\n", txq, txq->next));
txq               992 dev/ic/rt2661.c 		if (++txq->next >= txq->count)	/* faster than % count */
txq               993 dev/ic/rt2661.c 			txq->next = 0;
txq              1230 dev/ic/rt2661.c 		rt2661_tx_dma_intr(sc, &sc->txq[0]);
txq              1233 dev/ic/rt2661.c 		rt2661_tx_dma_intr(sc, &sc->txq[1]);
txq              1236 dev/ic/rt2661.c 		rt2661_tx_dma_intr(sc, &sc->txq[2]);
txq              1239 dev/ic/rt2661.c 		rt2661_tx_dma_intr(sc, &sc->txq[3]);
txq              1547 dev/ic/rt2661.c 	struct rt2661_tx_ring *txq = &sc->txq[ac];
txq              1628 dev/ic/rt2661.c 		desc = &txq->desc[txq->cur];
txq              1629 dev/ic/rt2661.c 		data = &txq->data[txq->cur];
txq              1654 dev/ic/rt2661.c 		bus_dmamap_sync(sc->sc_dmat, txq->map,
txq              1655 dev/ic/rt2661.c 		    txq->cur * RT2661_TX_DESC_SIZE, RT2661_TX_DESC_SIZE,
txq              1658 dev/ic/rt2661.c 		txq->queued++;
txq              1659 dev/ic/rt2661.c 		txq->cur = (txq->cur + 1) % RT2661_TX_RING_COUNT;
txq              1664 dev/ic/rt2661.c 	data = &txq->data[txq->cur];
txq              1665 dev/ic/rt2661.c 	desc = &txq->desc[txq->cur];
txq              1747 dev/ic/rt2661.c 	bus_dmamap_sync(sc->sc_dmat, txq->map, txq->cur * RT2661_TX_DESC_SIZE,
txq              1751 dev/ic/rt2661.c 	    m0->m_pkthdr.len, txq->cur, rate));
txq              1754 dev/ic/rt2661.c 	txq->queued++;
txq              1755 dev/ic/rt2661.c 	txq->cur = (txq->cur + 1) % RT2661_TX_RING_COUNT;
txq              1800 dev/ic/rt2661.c 			if (sc->txq[0].queued >= RT2661_TX_RING_COUNT - 1) {
txq              2479 dev/ic/rt2661.c 	RAL_WRITE(sc, RT2661_AC1_BASE_CSR, sc->txq[1].physaddr);
txq              2480 dev/ic/rt2661.c 	RAL_WRITE(sc, RT2661_AC0_BASE_CSR, sc->txq[0].physaddr);
txq              2481 dev/ic/rt2661.c 	RAL_WRITE(sc, RT2661_AC2_BASE_CSR, sc->txq[2].physaddr);
txq              2482 dev/ic/rt2661.c 	RAL_WRITE(sc, RT2661_AC3_BASE_CSR, sc->txq[3].physaddr);
txq              2629 dev/ic/rt2661.c 		rt2661_reset_tx_ring(sc, &sc->txq[ac]);
txq               125 dev/ic/rt2661var.h 	struct rt2661_tx_ring		txq[5];
txq               232 dev/pci/if_iwi.c 	error = iwi_alloc_tx_ring(sc, &sc->txq[0], IWI_CSR_TX1_RIDX,
txq               239 dev/pci/if_iwi.c 	error = iwi_alloc_tx_ring(sc, &sc->txq[1], IWI_CSR_TX2_RIDX,
txq               246 dev/pci/if_iwi.c 	error = iwi_alloc_tx_ring(sc, &sc->txq[2], IWI_CSR_TX3_RIDX,
txq               253 dev/pci/if_iwi.c 	error = iwi_alloc_tx_ring(sc, &sc->txq[3], IWI_CSR_TX4_RIDX,
txq               359 dev/pci/if_iwi.c fail5:	iwi_free_tx_ring(sc, &sc->txq[3]);
txq               360 dev/pci/if_iwi.c fail4:	iwi_free_tx_ring(sc, &sc->txq[2]);
txq               361 dev/pci/if_iwi.c fail3:	iwi_free_tx_ring(sc, &sc->txq[1]);
txq               362 dev/pci/if_iwi.c fail2:	iwi_free_tx_ring(sc, &sc->txq[0]);
txq              1134 dev/pci/if_iwi.c iwi_tx_intr(struct iwi_softc *sc, struct iwi_tx_ring *txq)
txq              1141 dev/pci/if_iwi.c 	hw = CSR_READ_4(sc, txq->csr_ridx);
txq              1143 dev/pci/if_iwi.c 	for (; txq->next != hw;) {
txq              1144 dev/pci/if_iwi.c 		data = &txq->data[txq->next];
txq              1154 dev/pci/if_iwi.c 		txq->queued--;
txq              1155 dev/pci/if_iwi.c 		txq->next = (txq->next + 1) % IWI_TX_RING_COUNT;
txq              1206 dev/pci/if_iwi.c 		iwi_tx_intr(sc, &sc->txq[0]);
txq              1209 dev/pci/if_iwi.c 		iwi_tx_intr(sc, &sc->txq[1]);
txq              1212 dev/pci/if_iwi.c 		iwi_tx_intr(sc, &sc->txq[2]);
txq              1215 dev/pci/if_iwi.c 		iwi_tx_intr(sc, &sc->txq[3]);
txq              1263 dev/pci/if_iwi.c 	struct iwi_tx_ring *txq = &sc->txq[0];
txq              1286 dev/pci/if_iwi.c 	data = &txq->data[txq->cur];
txq              1287 dev/pci/if_iwi.c 	desc = &txq->desc[txq->cur];
txq              1382 dev/pci/if_iwi.c 	bus_dmamap_sync(sc->sc_dmat, txq->map,
txq              1383 dev/pci/if_iwi.c 	    txq->cur * sizeof (struct iwi_tx_desc),
txq              1386 dev/pci/if_iwi.c 	DPRINTFN(5, ("sending data frame idx=%u len=%u nseg=%u\n", txq->cur,
txq              1389 dev/pci/if_iwi.c 	txq->queued++;
txq              1390 dev/pci/if_iwi.c 	txq->cur = (txq->cur + 1) % IWI_TX_RING_COUNT;
txq              1391 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, txq->csr_widx, txq->cur);
txq              1412 dev/pci/if_iwi.c 		if (sc->txq[0].queued >= IWI_TX_RING_COUNT - 8) {
txq              2204 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX1_BASE, sc->txq[0].map->dm_segs[0].ds_addr);
txq              2206 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX1_WIDX, sc->txq[0].cur);
txq              2208 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX2_BASE, sc->txq[1].map->dm_segs[0].ds_addr);
txq              2210 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX2_WIDX, sc->txq[1].cur);
txq              2212 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX3_BASE, sc->txq[2].map->dm_segs[0].ds_addr);
txq              2214 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX3_WIDX, sc->txq[2].cur);
txq              2216 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX4_BASE, sc->txq[3].map->dm_segs[0].ds_addr);
txq              2218 dev/pci/if_iwi.c 	CSR_WRITE_4(sc, IWI_CSR_TX4_WIDX, sc->txq[3].cur);
txq              2279 dev/pci/if_iwi.c 		iwi_reset_tx_ring(sc, &sc->txq[i]);
txq               110 dev/pci/if_iwivar.h 	struct iwi_tx_ring	txq[4];
txq               257 dev/pci/if_nfe.c 	if (nfe_alloc_tx_ring(sc, &sc->txq) != 0) {
txq               266 dev/pci/if_nfe.c 		nfe_free_tx_ring(sc, &sc->txq);
txq               561 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               562 dev/pci/if_nfe.c 	    (caddr_t)desc32 - (caddr_t)sc->txq.desc32,
txq               569 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               570 dev/pci/if_nfe.c 	    (caddr_t)desc64 - (caddr_t)sc->txq.desc64,
txq               578 dev/pci/if_nfe.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               579 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc32[start] - (caddr_t)sc->txq.desc32,
txq               580 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc32[end] -
txq               581 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc32[start], ops);
txq               585 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               586 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc32[start] - (caddr_t)sc->txq.desc32,
txq               587 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc32[NFE_TX_RING_COUNT] -
txq               588 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc32[start], ops);
txq               591 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map, 0,
txq               592 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc32[end] - (caddr_t)sc->txq.desc32, ops);
txq               599 dev/pci/if_nfe.c 		bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               600 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc64[start] - (caddr_t)sc->txq.desc64,
txq               601 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc64[end] -
txq               602 dev/pci/if_nfe.c 		    (caddr_t)&sc->txq.desc64[start], ops);
txq               606 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map,
txq               607 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc64[start] - (caddr_t)sc->txq.desc64,
txq               608 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc64[NFE_TX_RING_COUNT] -
txq               609 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc64[start], ops);
txq               612 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->txq.map, 0,
txq               613 dev/pci/if_nfe.c 	    (caddr_t)&sc->txq.desc64[end] - (caddr_t)sc->txq.desc64, ops);
txq               809 dev/pci/if_nfe.c 	while (sc->txq.next != sc->txq.cur) {
txq               811 dev/pci/if_nfe.c 			desc64 = &sc->txq.desc64[sc->txq.next];
txq               816 dev/pci/if_nfe.c 			desc32 = &sc->txq.desc32[sc->txq.next];
txq               825 dev/pci/if_nfe.c 		data = &sc->txq.data[sc->txq.next];
txq               864 dev/pci/if_nfe.c skip:		sc->txq.queued--;
txq               865 dev/pci/if_nfe.c 		sc->txq.next = (sc->txq.next + 1) % NFE_TX_RING_COUNT;
txq               885 dev/pci/if_nfe.c 	int error, i, first = sc->txq.cur;
txq               887 dev/pci/if_nfe.c 	map = sc->txq.data[first].map;
txq               896 dev/pci/if_nfe.c 	if (sc->txq.queued + map->dm_nsegs >= NFE_TX_RING_COUNT - 1) {
txq               915 dev/pci/if_nfe.c 		data = &sc->txq.data[sc->txq.cur];
txq               918 dev/pci/if_nfe.c 			desc64 = &sc->txq.desc64[sc->txq.cur];
txq               931 dev/pci/if_nfe.c 			desc32 = &sc->txq.desc32[sc->txq.cur];
txq               954 dev/pci/if_nfe.c 		sc->txq.queued++;
txq               955 dev/pci/if_nfe.c 		sc->txq.cur = (sc->txq.cur + 1) % NFE_TX_RING_COUNT;
txq               965 dev/pci/if_nfe.c 		sc->txq.desc64[first].flags |= htole16(NFE_TX_VALID);
txq               975 dev/pci/if_nfe.c 		sc->txq.desc32[first].flags |= htole16(NFE_TX_VALID);
txq               991 dev/pci/if_nfe.c 	int old = sc->txq.cur;
txq              1012 dev/pci/if_nfe.c 	if (sc->txq.cur == old)	/* nothing sent */
txq              1016 dev/pci/if_nfe.c 		nfe_txdesc64_rsync(sc, old, sc->txq.cur, BUS_DMASYNC_PREWRITE);
txq              1018 dev/pci/if_nfe.c 		nfe_txdesc32_rsync(sc, old, sc->txq.cur, BUS_DMASYNC_PREWRITE);
txq              1088 dev/pci/if_nfe.c 	NFE_WRITE(sc, NFE_TX_RING_ADDR_HI, sc->txq.physaddr >> 32);
txq              1090 dev/pci/if_nfe.c 	NFE_WRITE(sc, NFE_TX_RING_ADDR_LO, sc->txq.physaddr & 0xffffffff);
txq              1174 dev/pci/if_nfe.c 	nfe_reset_tx_ring(sc, &sc->txq);
txq                89 dev/pci/if_nfevar.h 	struct nfe_tx_ring	txq;
txq               251 dev/pci/if_wpi.c 		error = wpi_alloc_tx_ring(sc, &sc->txq[ac], WPI_TX_RING_COUNT,
txq               350 dev/pci/if_wpi.c 		wpi_free_tx_ring(sc, &sc->txq[ac]);
txq              1322 dev/pci/if_wpi.c 	struct wpi_tx_ring *ring = &sc->txq[desc->qid & 0x3];
txq              1560 dev/pci/if_wpi.c 	struct wpi_tx_ring *ring = &sc->txq[ac];
txq              1781 dev/pci/if_wpi.c 			if (sc->txq[0].queued >= sc->txq[0].count - 8) {
txq              1802 dev/pci/if_wpi.c 			if (sc->txq[0].queued >= sc->txq[0].count - 8) {
txq              2911 dev/pci/if_wpi.c 		wpi_reset_tx_ring(sc, &sc->txq[ac]);
txq               144 dev/pci/if_wpivar.h 	struct wpi_tx_ring	txq[4];