cur_rx 2063 dev/ic/dc.c struct dc_desc *cur_rx; cur_rx 2071 dev/ic/dc.c cur_rx = &sc->dc_ldata->dc_rx_list[idx]; cur_rx 2112 dev/ic/dc.c cur_rx->dc_status = htole32(rxstat | DC_RXSTAT_FIRSTFRAG); cur_rx 2168 dev/ic/dc.c struct dc_desc *cur_rx; cur_rx 2183 dev/ic/dc.c cur_rx = &sc->dc_ldata->dc_rx_list[i]; cur_rx 2184 dev/ic/dc.c rxstat = letoh32(cur_rx->dc_status); cur_rx 2204 dev/ic/dc.c rxstat = letoh32(cur_rx->dc_status); cur_rx 906 dev/ic/mtd8xx.c struct mtd_rx_desc *cur_rx; cur_rx 921 dev/ic/mtd8xx.c cur_rx = &sc->mtd_ldata->mtd_rx_list[i]; cur_rx 922 dev/ic/mtd8xx.c rxstat = letoh32(cur_rx->rd_rsr); cur_rx 993 dev/ic/mtd8xx.c struct mtd_rx_desc *cur_rx; cur_rx 1003 dev/ic/mtd8xx.c cur_rx = &sc->mtd_ldata->mtd_rx_list[pos]; cur_rx 1004 dev/ic/mtd8xx.c if (!(cur_rx->rd_rsr & htole32(RSR_OWN))) cur_rx 642 dev/ic/re.c struct rl_desc *cur_rx; cur_rx 746 dev/ic/re.c cur_rx = &sc->rl_ldata.rl_rx_list[0]; cur_rx 747 dev/ic/re.c rxstat = letoh32(cur_rx->rl_cmdstat); cur_rx 1228 dev/ic/re.c struct rl_desc *cur_rx; cur_rx 1235 dev/ic/re.c cur_rx = &sc->rl_ldata.rl_rx_list[i]; cur_rx 1238 dev/ic/re.c rxstat = letoh32(cur_rx->rl_cmdstat); cur_rx 597 dev/ic/rtl81x9.c u_int16_t cur_rx; cur_rx 603 dev/ic/rtl81x9.c cur_rx = (CSR_READ_2(sc, RL_CURRXADDR) + 16) % RL_RXBUFLEN; cur_rx 608 dev/ic/rtl81x9.c if (limit < cur_rx) cur_rx 609 dev/ic/rtl81x9.c max_bytes = (RL_RXBUFLEN - cur_rx) + limit; cur_rx 611 dev/ic/rtl81x9.c max_bytes = limit - cur_rx; cur_rx 616 dev/ic/rtl81x9.c rxbufpos = sc->rl_cdata.rl_rx_buf + cur_rx; cur_rx 671 dev/ic/rtl81x9.c ((cur_rx + sizeof(u_int32_t)) % RL_RXBUFLEN); cur_rx 692 dev/ic/rtl81x9.c cur_rx = (total_len - wrap + ETHER_CRC_LEN); cur_rx 700 dev/ic/rtl81x9.c cur_rx += total_len + 4 + ETHER_CRC_LEN; cur_rx 706 dev/ic/rtl81x9.c cur_rx = (cur_rx + 3) & ~3; cur_rx 707 dev/ic/rtl81x9.c CSR_WRITE_2(sc, RL_CURRXADDR, cur_rx - 16); cur_rx 464 dev/ic/rtl81x9reg.h u_int16_t cur_rx; cur_rx 1186 dev/ic/xl.c struct xl_chain_onefrag *cur_rx; cur_rx 1196 dev/ic/xl.c cur_rx = sc->xl_cdata.xl_rx_head; cur_rx 1197 dev/ic/xl.c sc->xl_cdata.xl_rx_head = cur_rx->xl_next; cur_rx 1201 dev/ic/xl.c ((caddr_t)cur_rx->xl_ptr - sc->sc_listkva), cur_rx 1222 dev/ic/xl.c cur_rx->xl_ptr->xl_status = htole32(0); cur_rx 1235 dev/ic/xl.c cur_rx->xl_ptr->xl_status = htole32(0); cur_rx 1240 dev/ic/xl.c m = cur_rx->xl_mbuf; cur_rx 1249 dev/ic/xl.c if (xl_newbuf(sc, cur_rx) == ENOBUFS) { cur_rx 1251 dev/ic/xl.c cur_rx->xl_ptr->xl_status = htole32(0); cur_rx 2301 dev/pci/if_bge.c struct bge_rx_bd *cur_rx; cur_rx 2308 dev/pci/if_bge.c cur_rx = &sc->bge_rdata-> cur_rx 2311 dev/pci/if_bge.c rxidx = cur_rx->bge_idx; cur_rx 2314 dev/pci/if_bge.c if (cur_rx->bge_flags & BGE_RXBDFLAG_JUMBO_RING) { cur_rx 2319 dev/pci/if_bge.c if (cur_rx->bge_flags & BGE_RXBDFLAG_ERROR) { cur_rx 2328 dev/pci/if_bge.c cur_rx->bge_len - ETHER_CRC_LEN + cur_rx 2346 dev/pci/if_bge.c if (cur_rx->bge_flags & BGE_RXBDFLAG_ERROR) { cur_rx 2367 dev/pci/if_bge.c cur_rx->bge_len); cur_rx 2371 dev/pci/if_bge.c m->m_pkthdr.len = m->m_len = cur_rx->bge_len - ETHER_CRC_LEN; cur_rx 2383 dev/pci/if_bge.c if ((cur_rx->bge_ip_csum ^ 0xffff) == 0) cur_rx 2388 dev/pci/if_bge.c if (cur_rx->bge_flags & BGE_RXBDFLAG_TCP_UDP_CSUM) { cur_rx 2390 dev/pci/if_bge.c cur_rx->bge_tcp_udp_csum; cur_rx 895 dev/pci/if_lge.c struct lge_rx_desc *cur_rx; cur_rx 909 dev/pci/if_lge.c cur_rx = &sc->lge_ldata->lge_rx_list[i]; cur_rx 910 dev/pci/if_lge.c rxctl = cur_rx->lge_ctl; cur_rx 911 dev/pci/if_lge.c rxsts = cur_rx->lge_sts; cur_rx 912 dev/pci/if_lge.c m = cur_rx->lge_mbuf; cur_rx 913 dev/pci/if_lge.c cur_rx->lge_mbuf = NULL; cur_rx 914 dev/pci/if_lge.c total_len = LGE_RXBYTES(cur_rx); cur_rx 1609 dev/pci/if_msk.c struct sk_chain *cur_rx; cur_rx 1622 dev/pci/if_msk.c cur_rx = &sc_if->sk_cdata.sk_rx_chain[cur]; cur_rx 1628 dev/pci/if_msk.c m = cur_rx->sk_mbuf; cur_rx 1629 dev/pci/if_msk.c cur_rx->sk_mbuf = NULL; cur_rx 1274 dev/pci/if_nge.c struct nge_desc *cur_rx; cur_rx 1285 dev/pci/if_nge.c cur_rx = &sc->nge_ldata->nge_rx_list[i]; cur_rx 1286 dev/pci/if_nge.c rxstat = cur_rx->nge_rxstat; cur_rx 1287 dev/pci/if_nge.c extsts = cur_rx->nge_extsts; cur_rx 1288 dev/pci/if_nge.c m = cur_rx->nge_mbuf; cur_rx 1289 dev/pci/if_nge.c cur_rx->nge_mbuf = NULL; cur_rx 1290 dev/pci/if_nge.c total_len = NGE_RXBYTES(cur_rx); cur_rx 1301 dev/pci/if_nge.c nge_newbuf(sc, cur_rx, m); cur_rx 1319 dev/pci/if_nge.c if (nge_newbuf(sc, cur_rx, NULL) == ENOBUFS) { cur_rx 1323 dev/pci/if_nge.c nge_newbuf(sc, cur_rx, m); cur_rx 1296 dev/pci/if_sis.c struct sis_desc *cur_rx; cur_rx 1302 dev/pci/if_sis.c for(cur_rx = sc->sis_cdata.sis_rx_pdsc; SIS_OWNDESC(cur_rx); cur_rx 1303 dev/pci/if_sis.c cur_rx = cur_rx->sis_nextdesc) { cur_rx 1306 dev/pci/if_sis.c ((caddr_t)cur_rx - sc->sc_listkva), cur_rx 1310 dev/pci/if_sis.c rxstat = cur_rx->sis_rxstat; cur_rx 1311 dev/pci/if_sis.c m = cur_rx->sis_mbuf; cur_rx 1312 dev/pci/if_sis.c cur_rx->sis_mbuf = NULL; cur_rx 1313 dev/pci/if_sis.c total_len = SIS_RXBYTES(cur_rx); cur_rx 1325 dev/pci/if_sis.c sis_newbuf(sc, cur_rx, m); cur_rx 1330 dev/pci/if_sis.c bus_dmamap_sync(sc->sc_dmat, cur_rx->map, 0, cur_rx 1331 dev/pci/if_sis.c cur_rx->map->dm_mapsize, BUS_DMASYNC_POSTREAD); cur_rx 1342 dev/pci/if_sis.c if (sis_newbuf(sc, cur_rx, NULL) == 0) { cur_rx 1351 dev/pci/if_sis.c sis_newbuf(sc, cur_rx, m); cur_rx 1371 dev/pci/if_sis.c sc->sis_cdata.sis_rx_pdsc = cur_rx; cur_rx 1729 dev/pci/if_sk.c struct sk_chain *cur_rx; cur_rx 1755 dev/pci/if_sk.c cur_rx = &sc_if->sk_cdata.sk_rx_chain[cur]; cur_rx 1763 dev/pci/if_sk.c m = cur_rx->sk_mbuf; cur_rx 1764 dev/pci/if_sk.c cur_rx->sk_mbuf = NULL; cur_rx 614 dev/pci/if_ste.c struct ste_chain_onefrag *cur_rx; cur_rx 617 dev/pci/if_ste.c cur_rx = sc->ste_cdata.ste_rx_head; cur_rx 619 dev/pci/if_ste.c cur_rx = cur_rx->ste_next; cur_rx 621 dev/pci/if_ste.c if (cur_rx == sc->ste_cdata.ste_rx_head) cur_rx 623 dev/pci/if_ste.c } while (cur_rx->ste_ptr->ste_status == 0); cur_rx 626 dev/pci/if_ste.c sc->ste_cdata.ste_rx_head = cur_rx; cur_rx 640 dev/pci/if_ste.c struct ste_chain_onefrag *cur_rx; cur_rx 651 dev/pci/if_ste.c cur_rx = sc->ste_cdata.ste_rx_head; cur_rx 652 dev/pci/if_ste.c sc->ste_cdata.ste_rx_head = cur_rx->ste_next; cur_rx 662 dev/pci/if_ste.c cur_rx->ste_ptr->ste_status = 0; cur_rx 675 dev/pci/if_ste.c cur_rx->ste_ptr->ste_status = 0; cur_rx 680 dev/pci/if_ste.c m = cur_rx->ste_mbuf; cur_rx 681 dev/pci/if_ste.c total_len = cur_rx->ste_ptr->ste_status & STE_RXSTAT_FRAMELEN; cur_rx 690 dev/pci/if_ste.c if (ste_newbuf(sc, cur_rx, NULL) == ENOBUFS) { cur_rx 692 dev/pci/if_ste.c cur_rx->ste_ptr->ste_status = 0; cur_rx 709 dev/pci/if_ste.c cur_rx->ste_ptr->ste_status = 0; cur_rx 1737 dev/pci/if_ti.c struct ti_rx_desc *cur_rx; cur_rx 1743 dev/pci/if_ti.c cur_rx = cur_rx 1745 dev/pci/if_ti.c rxidx = cur_rx->ti_idx; cur_rx 1748 dev/pci/if_ti.c if (cur_rx->ti_flags & TI_BDFLAG_JUMBO_RING) { cur_rx 1752 dev/pci/if_ti.c if (cur_rx->ti_flags & TI_BDFLAG_ERROR) { cur_rx 1761 dev/pci/if_ti.c cur_rx->ti_len + ETHER_ALIGN, 0, ifp, NULL); cur_rx 1770 dev/pci/if_ti.c } else if (cur_rx->ti_flags & TI_BDFLAG_MINI_RING) { cur_rx 1776 dev/pci/if_ti.c if (cur_rx->ti_flags & TI_BDFLAG_ERROR) { cur_rx 1793 dev/pci/if_ti.c if (cur_rx->ti_flags & TI_BDFLAG_ERROR) { cur_rx 1809 dev/pci/if_ti.c m->m_pkthdr.len = m->m_len = cur_rx->ti_len; cur_rx 1821 dev/pci/if_ti.c if ((cur_rx->ti_ip_cksum ^ 0xffff) == 0) cur_rx 1119 dev/pci/if_tl.c struct tl_chain_onefrag *cur_rx; cur_rx 1125 dev/pci/if_tl.c cur_rx = sc->tl_cdata.tl_rx_head; cur_rx 1126 dev/pci/if_tl.c if (!(cur_rx->tl_ptr->tlist_cstat & TL_CSTAT_FRAMECMP)) cur_rx 1129 dev/pci/if_tl.c sc->tl_cdata.tl_rx_head = cur_rx->tl_next; cur_rx 1130 dev/pci/if_tl.c m = cur_rx->tl_mbuf; cur_rx 1131 dev/pci/if_tl.c total_len = cur_rx->tl_ptr->tlist_frsize; cur_rx 1133 dev/pci/if_tl.c if (tl_newbuf(sc, cur_rx) == ENOBUFS) { cur_rx 1135 dev/pci/if_tl.c cur_rx->tl_ptr->tlist_frsize = MCLBYTES; cur_rx 1136 dev/pci/if_tl.c cur_rx->tl_ptr->tlist_cstat = TL_CSTAT_READY; cur_rx 1137 dev/pci/if_tl.c cur_rx->tl_ptr->tl_frag.tlist_dcnt = MCLBYTES; cur_rx 1142 dev/pci/if_tl.c VTOPHYS(cur_rx->tl_ptr); cur_rx 1143 dev/pci/if_tl.c sc->tl_cdata.tl_rx_tail->tl_next = cur_rx; cur_rx 1144 dev/pci/if_tl.c sc->tl_cdata.tl_rx_tail = cur_rx; cur_rx 954 dev/pci/if_vge.c struct vge_rx_desc *cur_rx; cur_rx 970 dev/pci/if_vge.c cur_rx = &sc->vge_ldata.vge_rx_list[i]; cur_rx 972 dev/pci/if_vge.c total_len = VGE_RXBYTES(cur_rx); cur_rx 973 dev/pci/if_vge.c rxstat = letoh32(cur_rx->vge_sts); cur_rx 974 dev/pci/if_vge.c rxctl = letoh32(cur_rx->vge_ctl); cur_rx 925 dev/pci/if_vr.c struct vr_chain_onefrag *cur_rx; cur_rx 941 dev/pci/if_vr.c cur_rx = sc->vr_cdata.vr_rx_head; cur_rx 942 dev/pci/if_vr.c sc->vr_cdata.vr_rx_head = cur_rx->vr_nextdesc; cur_rx 973 dev/pci/if_vr.c cur_rx->vr_ptr->vr_status = htole32(VR_RXSTAT); cur_rx 974 dev/pci/if_vr.c cur_rx->vr_ptr->vr_data = cur_rx 975 dev/pci/if_vr.c htole32(cur_rx->vr_map->dm_segs[0].ds_addr + cur_rx 977 dev/pci/if_vr.c cur_rx->vr_ptr->vr_ctl = htole32(VR_RXCTL | VR_RXLEN); cur_rx 985 dev/pci/if_vr.c total_len = VR_RXBYTES(letoh32(cur_rx->vr_ptr->vr_status)); cur_rx 996 dev/pci/if_vr.c bus_dmamap_sync(sc->sc_dmat, cur_rx->vr_map, 0, cur_rx 997 dev/pci/if_vr.c cur_rx->vr_map->dm_mapsize, cur_rx 999 dev/pci/if_vr.c m0 = m_devget(cur_rx->vr_buf + sizeof(u_int64_t) - ETHER_ALIGN, cur_rx 1001 dev/pci/if_vr.c bus_dmamap_sync(sc->sc_dmat, cur_rx->vr_map, 0, cur_rx 1002 dev/pci/if_vr.c cur_rx->vr_map->dm_mapsize, cur_rx 1006 dev/pci/if_vr.c cur_rx->vr_ptr->vr_status = htole32(VR_RXSTAT); cur_rx 1007 dev/pci/if_vr.c cur_rx->vr_ptr->vr_data = cur_rx 1008 dev/pci/if_vr.c htole32(cur_rx->vr_map->dm_segs[0].ds_addr + cur_rx 1010 dev/pci/if_vr.c cur_rx->vr_ptr->vr_ctl = htole32(VR_RXCTL | VR_RXLEN); cur_rx 1005 dev/pci/if_wb.c struct wb_chain_onefrag *cur_rx; cur_rx 1015 dev/pci/if_wb.c cur_rx = sc->wb_cdata.wb_rx_head; cur_rx 1016 dev/pci/if_wb.c sc->wb_cdata.wb_rx_head = cur_rx->wb_nextdesc; cur_rx 1018 dev/pci/if_wb.c m = cur_rx->wb_mbuf; cur_rx 1021 dev/pci/if_wb.c (WB_RXBYTES(cur_rx->wb_ptr->wb_status) < WB_MIN_FRAMELEN) || cur_rx 1022 dev/pci/if_wb.c (WB_RXBYTES(cur_rx->wb_ptr->wb_status) > ETHER_MAX_DIX_LEN) || cur_rx 1026 dev/pci/if_wb.c wb_newbuf(sc, cur_rx, m); cur_rx 1037 dev/pci/if_wb.c wb_newbuf(sc, cur_rx, m); cur_rx 1042 dev/pci/if_wb.c total_len = WB_RXBYTES(cur_rx->wb_ptr->wb_status); cur_rx 1055 dev/pci/if_wb.c wb_newbuf(sc, cur_rx, m); cur_rx 1155 dev/pci/musycc.c struct dma_desc *cur_rx, *start_rx; cur_rx 1163 dev/pci/musycc.c start_rx = cur_rx = mg->mg_dma_d[channel].rx_prod; cur_rx 1164 dev/pci/musycc.c if (cur_rx == NULL) cur_rx 1168 dev/pci/musycc.c ((caddr_t)cur_rx - mg->mg_listkva), cur_rx 1172 dev/pci/musycc.c rxstat = letoh32(cur_rx->status); cur_rx 1176 dev/pci/musycc.c m = cur_rx->mbuf; cur_rx 1177 dev/pci/musycc.c cur_rx->mbuf = NULL; cur_rx 1191 dev/pci/musycc.c musycc_newbuf(mg, cur_rx, m); cur_rx 1192 dev/pci/musycc.c cur_rx = cur_rx->nextdesc; cur_rx 1198 dev/pci/musycc.c bus_dmamap_sync(mg->mg_dmat, cur_rx->map, 0, cur_rx 1199 dev/pci/musycc.c cur_rx->map->dm_mapsize, BUS_DMASYNC_POSTREAD); cur_rx 1200 dev/pci/musycc.c if (musycc_newbuf(mg, cur_rx, NULL) != 0) { cur_rx 1201 dev/pci/musycc.c cur_rx = cur_rx->nextdesc; cur_rx 1206 dev/pci/musycc.c cur_rx = cur_rx->nextdesc; cur_rx 1221 dev/pci/musycc.c } while (cur_rx != start_rx); cur_rx 1223 dev/pci/musycc.c mg->mg_dma_d[channel].rx_prod = cur_rx; cur_rx 1225 dev/pci/musycc.c if ((cur_rx == start_rx && consumed) || forcekick) { cur_rx 1231 dev/pci/musycc.c (caddr_t)cur_rx - mg->mg_listkva);