rxq               226 dev/ic/rt2560.c 	error = rt2560_alloc_rx_ring(sc, &sc->rxq, RT2560_RX_RING_COUNT);
rxq               336 dev/ic/rt2560.c 	rt2560_free_rx_ring(sc, &sc->rxq);
rxq               548 dev/ic/rt2560.c 		struct rt2560_rx_desc *desc = &sc->rxq.desc[i];
rxq               549 dev/ic/rt2560.c 		struct rt2560_rx_data *data = &sc->rxq.data[i];
rxq              1084 dev/ic/rt2560.c 	hw = (RAL_READ(sc, RT2560_SECCSR0) - sc->rxq.physaddr) /
rxq              1087 dev/ic/rt2560.c 	for (; sc->rxq.cur_decrypt != hw;) {
rxq              1089 dev/ic/rt2560.c 		    &sc->rxq.desc[sc->rxq.cur_decrypt];
rxq              1091 dev/ic/rt2560.c 		    &sc->rxq.data[sc->rxq.cur_decrypt];
rxq              1093 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1094 dev/ic/rt2560.c 		    sc->rxq.cur_decrypt * RT2560_TX_DESC_SIZE,
rxq              1206 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1207 dev/ic/rt2560.c 		    sc->rxq.cur_decrypt * RT2560_TX_DESC_SIZE,
rxq              1210 dev/ic/rt2560.c 		DPRINTFN(15, ("decryption done idx=%u\n", sc->rxq.cur_decrypt));
rxq              1212 dev/ic/rt2560.c 		sc->rxq.cur_decrypt =
rxq              1213 dev/ic/rt2560.c 		    (sc->rxq.cur_decrypt + 1) % RT2560_RX_RING_COUNT;
rxq              1232 dev/ic/rt2560.c 		struct rt2560_rx_desc *desc = &sc->rxq.desc[sc->rxq.cur];
rxq              1233 dev/ic/rt2560.c 		struct rt2560_rx_data *data = &sc->rxq.data[sc->rxq.cur];
rxq              1235 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1236 dev/ic/rt2560.c 		    sc->rxq.cur * RT2560_RX_DESC_SIZE, RT2560_RX_DESC_SIZE,
rxq              1264 dev/ic/rt2560.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1265 dev/ic/rt2560.c 		    sc->rxq.cur * RT2560_RX_DESC_SIZE, RT2560_RX_DESC_SIZE,
rxq              1268 dev/ic/rt2560.c 		DPRINTFN(15, ("rx done idx=%u\n", sc->rxq.cur));
rxq              1270 dev/ic/rt2560.c 		sc->rxq.cur = (sc->rxq.cur + 1) % RT2560_RX_RING_COUNT;
rxq              2621 dev/ic/rt2560.c 	RAL_WRITE(sc, RT2560_RXCSR2, sc->rxq.physaddr);
rxq              2717 dev/ic/rt2560.c 	rt2560_reset_rx_ring(sc, &sc->rxq);
rxq               130 dev/ic/rt2560var.h 	struct rt2560_rx_ring	rxq;
rxq               237 dev/ic/rt2661.c 	error = rt2661_alloc_rx_ring(sc, &sc->rxq, RT2661_RX_RING_COUNT);
rxq               370 dev/ic/rt2661.c 	rt2661_free_rx_ring(sc, &sc->rxq);
rxq               578 dev/ic/rt2661.c 		struct rt2661_rx_desc *desc = &sc->rxq.desc[i];
rxq               579 dev/ic/rt2661.c 		struct rt2661_rx_data *data = &sc->rxq.data[i];
rxq              1008 dev/ic/rt2661.c 		struct rt2661_rx_desc *desc = &sc->rxq.desc[sc->rxq.cur];
rxq              1009 dev/ic/rt2661.c 		struct rt2661_rx_data *data = &sc->rxq.data[sc->rxq.cur];
rxq              1011 dev/ic/rt2661.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1012 dev/ic/rt2661.c 		    sc->rxq.cur * RT2661_RX_DESC_SIZE, RT2661_RX_DESC_SIZE,
rxq              1136 dev/ic/rt2661.c 		bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq              1137 dev/ic/rt2661.c 		    sc->rxq.cur * RT2661_RX_DESC_SIZE, RT2661_RX_DESC_SIZE,
rxq              1140 dev/ic/rt2661.c 		DPRINTFN(15, ("rx intr idx=%u\n", sc->rxq.cur));
rxq              1142 dev/ic/rt2661.c 		sc->rxq.cur = (sc->rxq.cur + 1) % RT2661_RX_RING_COUNT;
rxq              2488 dev/ic/rt2661.c 	RAL_WRITE(sc, RT2661_RX_BASE_CSR, sc->rxq.physaddr);
rxq              2631 dev/ic/rt2661.c 	rt2661_reset_rx_ring(sc, &sc->rxq);
rxq               127 dev/ic/rt2661var.h 	struct rt2661_rx_ring		rxq;
rxq               260 dev/pci/if_iwi.c 	error = iwi_alloc_rx_ring(sc, &sc->rxq);
rxq               587 dev/pci/if_iwi.c 		data = &sc->rxq.data[i];
rxq               645 dev/pci/if_iwi.c 		data = &sc->rxq.data[i];
rxq              1101 dev/pci/if_iwi.c 	for (; sc->rxq.cur != hw;) {
rxq              1102 dev/pci/if_iwi.c 		data = &sc->rxq.data[sc->rxq.cur];
rxq              1125 dev/pci/if_iwi.c 		sc->rxq.cur = (sc->rxq.cur + 1) % IWI_RX_RING_COUNT;
rxq              2221 dev/pci/if_iwi.c 		struct iwi_rx_data *data = &sc->rxq.data[i];
rxq              2280 dev/pci/if_iwi.c 	iwi_reset_rx_ring(sc, &sc->rxq);
rxq               111 dev/pci/if_iwivar.h 	struct iwi_rx_ring	rxq;
rxq               263 dev/pci/if_nfe.c 	if (nfe_alloc_rx_ring(sc, &sc->rxq) != 0) {
rxq               619 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq               620 dev/pci/if_nfe.c 	    (caddr_t)desc32 - (caddr_t)sc->rxq.desc32,
rxq               627 dev/pci/if_nfe.c 	bus_dmamap_sync(sc->sc_dmat, sc->rxq.map,
rxq               628 dev/pci/if_nfe.c 	    (caddr_t)desc64 - (caddr_t)sc->rxq.desc64,
rxq               646 dev/pci/if_nfe.c 		data = &sc->rxq.data[sc->rxq.cur];
rxq               649 dev/pci/if_nfe.c 			desc64 = &sc->rxq.desc64[sc->rxq.cur];
rxq               655 dev/pci/if_nfe.c 			desc32 = &sc->rxq.desc32[sc->rxq.cur];
rxq               709 dev/pci/if_nfe.c 			bus_dmamap_sync(sc->sc_dmat, sc->rxq.jmap,
rxq               710 dev/pci/if_nfe.c 			    mtod(data->m, caddr_t) - sc->rxq.jpool, NFE_JBYTES,
rxq               785 dev/pci/if_nfe.c 			desc64->length = htole16(sc->rxq.bufsz);
rxq               790 dev/pci/if_nfe.c 			desc32->length = htole16(sc->rxq.bufsz);
rxq               796 dev/pci/if_nfe.c 		sc->rxq.cur = (sc->rxq.cur + 1) % NFE_RX_RING_COUNT;
rxq              1084 dev/pci/if_nfe.c 	NFE_WRITE(sc, NFE_RX_RING_ADDR_HI, sc->rxq.physaddr >> 32);
rxq              1086 dev/pci/if_nfe.c 	NFE_WRITE(sc, NFE_RX_RING_ADDR_LO, sc->rxq.physaddr & 0xffffffff);
rxq              1096 dev/pci/if_nfe.c 	NFE_WRITE(sc, NFE_RXBUFSZ, sc->rxq.bufsz);
rxq              1175 dev/pci/if_nfe.c 	nfe_reset_rx_ring(sc, &sc->rxq);
rxq              1248 dev/pci/if_nfe.c 		data = &sc->rxq.data[i];
rxq              1296 dev/pci/if_nfe.c 			desc64 = &sc->rxq.desc64[i];
rxq              1301 dev/pci/if_nfe.c 			desc64->length = htole16(sc->rxq.bufsz);
rxq              1304 dev/pci/if_nfe.c 			desc32 = &sc->rxq.desc32[i];
rxq              1306 dev/pci/if_nfe.c 			desc32->length = htole16(sc->rxq.bufsz);
rxq              1384 dev/pci/if_nfe.c 	jbuf = SLIST_FIRST(&sc->rxq.jfreelist);
rxq              1387 dev/pci/if_nfe.c 	SLIST_REMOVE_HEAD(&sc->rxq.jfreelist, jnext);
rxq              1404 dev/pci/if_nfe.c 	i = (buf - sc->rxq.jpool) / NFE_JBYTES;
rxq              1410 dev/pci/if_nfe.c 	jbuf = &sc->rxq.jbuf[i];
rxq              1413 dev/pci/if_nfe.c 	SLIST_INSERT_HEAD(&sc->rxq.jfreelist, jbuf, jnext);
rxq              1419 dev/pci/if_nfe.c 	struct nfe_rx_ring *ring = &sc->rxq;
rxq              1486 dev/pci/if_nfe.c 	struct nfe_rx_ring *ring = &sc->rxq;
rxq                90 dev/pci/if_nfevar.h 	struct nfe_rx_ring	rxq;
rxq               265 dev/pci/if_wpi.c 	error = wpi_alloc_rx_ring(sc, &sc->rxq);
rxq               476 dev/pci/if_wpi.c 	rbuf = SLIST_FIRST(&sc->rxq.freelist);
rxq               479 dev/pci/if_wpi.c 	SLIST_REMOVE_HEAD(&sc->rxq.freelist, next);
rxq               494 dev/pci/if_wpi.c 	SLIST_INSERT_HEAD(&sc->rxq.freelist, rbuf, next);
rxq               500 dev/pci/if_wpi.c 	struct wpi_rx_ring *ring = &sc->rxq;
rxq               529 dev/pci/if_wpi.c 	wpi_dma_contig_free(&sc->rxq.buf_dma);
rxq              1200 dev/pci/if_wpi.c 	struct wpi_rx_ring *ring = &sc->rxq;
rxq              1390 dev/pci/if_wpi.c 	while (sc->rxq.cur != hw) {
rxq              1391 dev/pci/if_wpi.c 		struct wpi_rx_data *data = &sc->rxq.data[sc->rxq.cur];
rxq              1479 dev/pci/if_wpi.c 		sc->rxq.cur = (sc->rxq.cur + 1) % WPI_RX_RING_COUNT;
rxq              2805 dev/pci/if_wpi.c 	WPI_WRITE(sc, WPI_RX_BASE, sc->rxq.desc_dma.paddr);
rxq              2915 dev/pci/if_wpi.c 	wpi_reset_rx_ring(sc, &sc->rxq);
rxq               146 dev/pci/if_wpivar.h 	struct wpi_rx_ring	rxq;