ifd 60 altq/altq_rmclass.c #define reset_cutoff(ifd) { ifd->cutoff_ = RM_MAXDEPTH; } ifd 175 altq/altq_rmclass.c rmc_newclass(int pri, struct rm_ifdat *ifd, u_int nsecPerByte, ifd 224 altq/altq_rmclass.c cl->ifdat_ = ifd; ifd 295 altq/altq_rmclass.c if ((peer = ifd->active_[pri]) != NULL) { ifd 298 altq/altq_rmclass.c while (peer->peer_ != ifd->active_[pri]) ifd 302 altq/altq_rmclass.c ifd->active_[pri] = cl; ifd 321 altq/altq_rmclass.c if (ifd->wrr_) { ifd 322 altq/altq_rmclass.c ifd->num_[pri]++; ifd 323 altq/altq_rmclass.c ifd->alloc_[pri] += cl->allotment_; ifd 324 altq/altq_rmclass.c rmc_wrr_set_weights(ifd); ifd 334 altq/altq_rmclass.c struct rm_ifdat *ifd; ifd 338 altq/altq_rmclass.c ifd = cl->ifdat_; ifd 371 altq/altq_rmclass.c if (ifd->wrr_) { ifd 372 altq/altq_rmclass.c ifd->alloc_[cl->pri_] += cl->allotment_ - old_allotment; ifd 373 altq/altq_rmclass.c rmc_wrr_set_weights(ifd); ifd 389 altq/altq_rmclass.c rmc_wrr_set_weights(struct rm_ifdat *ifd) ifd 399 altq/altq_rmclass.c if (ifd->num_[i] == 0) ifd 400 altq/altq_rmclass.c ifd->M_[i] = 0; ifd 402 altq/altq_rmclass.c ifd->M_[i] = ifd->alloc_[i] / ifd 403 altq/altq_rmclass.c (ifd->num_[i] * ifd->maxpkt_); ifd 411 altq/altq_rmclass.c if (ifd->active_[i] != NULL) { ifd 412 altq/altq_rmclass.c clh = cl = ifd->active_[i]; ifd 415 altq/altq_rmclass.c if (ifd->M_[i] == 0) ifd 419 altq/altq_rmclass.c ifd->M_[i]; ifd 427 altq/altq_rmclass.c rmc_get_weight(struct rm_ifdat *ifd, int pri) ifd 430 altq/altq_rmclass.c return (ifd->M_[pri]); ifd 525 altq/altq_rmclass.c rmc_delete_class(struct rm_ifdat *ifd, struct rm_class *cl) ifd 572 altq/altq_rmclass.c if ((p = ifd->active_[cl->pri_]) != NULL) { ifd 582 altq/altq_rmclass.c if (ifd->active_[cl->pri_] == cl) ifd 583 altq/altq_rmclass.c ifd->active_[cl->pri_] = cl->peer_; ifd 586 altq/altq_rmclass.c ifd->active_[cl->pri_] = NULL; ifd 593 altq/altq_rmclass.c if (ifd->wrr_) { ifd 594 altq/altq_rmclass.c ifd->alloc_[cl->pri_] -= cl->allotment_; ifd 595 altq/altq_rmclass.c ifd->num_[cl->pri_]--; ifd 596 altq/altq_rmclass.c rmc_wrr_set_weights(ifd); ifd 605 altq/altq_rmclass.c rmc_depth_recompute(ifd->root_); ifd 645 altq/altq_rmclass.c rmc_init(struct ifaltq *ifq, struct rm_ifdat *ifd, u_int nsecPerByte, ifd 656 altq/altq_rmclass.c bzero((char *)ifd, sizeof (*ifd)); ifd 658 altq/altq_rmclass.c ifd->ifq_ = ifq; ifd 659 altq/altq_rmclass.c ifd->restart = restart; ifd 660 altq/altq_rmclass.c ifd->maxqueued_ = maxqueued; ifd 661 altq/altq_rmclass.c ifd->ns_per_byte_ = nsecPerByte; ifd 662 altq/altq_rmclass.c ifd->maxpkt_ = mtu; ifd 663 altq/altq_rmclass.c ifd->wrr_ = (flags & RMCF_WRR) ? 1 : 0; ifd 664 altq/altq_rmclass.c ifd->efficient_ = (flags & RMCF_EFFICIENT) ? 1 : 0; ifd 666 altq/altq_rmclass.c ifd->maxiftime_ = mtu * nsecPerByte / 1000 * 16; ifd 668 altq/altq_rmclass.c ifd->maxiftime_ /= 4; ifd 671 altq/altq_rmclass.c reset_cutoff(ifd); ifd 672 altq/altq_rmclass.c CBQTRACE(rmc_init, 'INIT', ifd->cutoff_); ifd 678 altq/altq_rmclass.c ifd->alloc_[i] = 0; ifd 679 altq/altq_rmclass.c ifd->M_[i] = 0; ifd 680 altq/altq_rmclass.c ifd->num_[i] = 0; ifd 681 altq/altq_rmclass.c ifd->na_[i] = 0; ifd 682 altq/altq_rmclass.c ifd->active_[i] = NULL; ifd 688 altq/altq_rmclass.c ifd->qi_ = 0; ifd 689 altq/altq_rmclass.c ifd->qo_ = 0; ifd 691 altq/altq_rmclass.c ifd->class_[i] = NULL; ifd 692 altq/altq_rmclass.c ifd->curlen_[i] = 0; ifd 693 altq/altq_rmclass.c ifd->borrowed_[i] = NULL; ifd 699 altq/altq_rmclass.c if ((ifd->root_ = rmc_newclass(0, ifd, ifd 707 altq/altq_rmclass.c ifd->root_->depth_ = 0; ifd 725 altq/altq_rmclass.c struct rm_ifdat *ifd = cl->ifdat_; ifd 730 altq/altq_rmclass.c if (ifd->cutoff_ > 0) { ifd 732 altq/altq_rmclass.c if (ifd->cutoff_ > cl->depth_) ifd 733 altq/altq_rmclass.c ifd->cutoff_ = cl->depth_; ifd 746 altq/altq_rmclass.c borrow->depth_ < ifd->cutoff_) { ifd 748 altq/altq_rmclass.c ifd->cutoff_ = borrow->depth_; ifd 749 altq/altq_rmclass.c CBQTRACE(rmc_queue_packet, 'ffob', ifd->cutoff_); ifd 756 altq/altq_rmclass.c else if ((ifd->cutoff_ > 1) && cl->borrow_) { ifd 758 altq/altq_rmclass.c ifd->cutoff_ = cl->borrow_->depth_; ifd 772 altq/altq_rmclass.c ifd->na_[cpri]++; ifd 790 altq/altq_rmclass.c rmc_tl_satisfied(struct rm_ifdat *ifd, struct timeval *now) ifd 796 altq/altq_rmclass.c if ((bp = ifd->active_[i]) != NULL) { ifd 800 altq/altq_rmclass.c ifd->cutoff_ = p->depth_; ifd 808 altq/altq_rmclass.c reset_cutoff(ifd); ifd 853 altq/altq_rmclass.c struct rm_ifdat *ifd = cl->ifdat_; ifd 855 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = NULL; ifd 876 altq/altq_rmclass.c (cl->depth_ > ifd->cutoff_)) { ifd 893 altq/altq_rmclass.c CBQTRACE(rmc_under_limit, 'ffou', ifd->cutoff_); ifd 909 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = cl; ifd 931 altq/altq_rmclass.c _rmc_wrr_dequeue_next(struct rm_ifdat *ifd, int op) ifd 945 altq/altq_rmclass.c if (op == ALTDQ_REMOVE && ifd->pollcache_) { ifd 946 altq/altq_rmclass.c cl = ifd->pollcache_; ifd 948 altq/altq_rmclass.c if (ifd->efficient_) { ifd 954 altq/altq_rmclass.c ifd->pollcache_ = NULL; ifd 959 altq/altq_rmclass.c ifd->pollcache_ = NULL; ifd 960 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = NULL; ifd 966 altq/altq_rmclass.c if (ifd->na_[cpri] == 0) ifd 979 altq/altq_rmclass.c cl = ifd->active_[cpri]; ifd 993 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = NULL; ifd 1002 altq/altq_rmclass.c } while (cl != ifd->active_[cpri]); ifd 1017 altq/altq_rmclass.c if (first != NULL && ifd->cutoff_ < ifd->root_->depth_) { ifd 1018 altq/altq_rmclass.c ifd->cutoff_++; ifd 1019 altq/altq_rmclass.c CBQTRACE(_rmc_wrr_dequeue_next, 'ojda', ifd->cutoff_); ifd 1028 altq/altq_rmclass.c reset_cutoff(ifd); ifd 1029 altq/altq_rmclass.c CBQTRACE(_rmc_wrr_dequeue_next, 'otsr', ifd->cutoff_); ifd 1031 altq/altq_rmclass.c if (!ifd->efficient_ || first == NULL) ifd 1042 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = cl->borrow_; ifd 1043 altq/altq_rmclass.c ifd->cutoff_ = cl->borrow_->depth_; ifd 1054 altq/altq_rmclass.c ifd->na_[cpri]--; ifd 1063 altq/altq_rmclass.c ifd->active_[cl->pri_] = cl->peer_; ifd 1065 altq/altq_rmclass.c ifd->active_[cl->pri_] = cl; ifd 1067 altq/altq_rmclass.c ifd->class_[ifd->qi_] = cl; ifd 1068 altq/altq_rmclass.c ifd->curlen_[ifd->qi_] = m_pktlen(m); ifd 1069 altq/altq_rmclass.c ifd->now_[ifd->qi_] = now; ifd 1070 altq/altq_rmclass.c ifd->qi_ = (ifd->qi_ + 1) % ifd->maxqueued_; ifd 1071 altq/altq_rmclass.c ifd->queued_++; ifd 1075 altq/altq_rmclass.c ifd->pollcache_ = cl; ifd 1087 altq/altq_rmclass.c _rmc_prr_dequeue_next(struct rm_ifdat *ifd, int op) ifd 1100 altq/altq_rmclass.c if (op == ALTDQ_REMOVE && ifd->pollcache_) { ifd 1101 altq/altq_rmclass.c cl = ifd->pollcache_; ifd 1103 altq/altq_rmclass.c ifd->pollcache_ = NULL; ifd 1107 altq/altq_rmclass.c ifd->pollcache_ = NULL; ifd 1108 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = NULL; ifd 1114 altq/altq_rmclass.c if (ifd->na_[cpri] == 0) ifd 1116 altq/altq_rmclass.c cl = ifd->active_[cpri]; ifd 1127 altq/altq_rmclass.c } while (cl != ifd->active_[cpri]); ifd 1135 altq/altq_rmclass.c if (first != NULL && ifd->cutoff_ < ifd->root_->depth_) { ifd 1136 altq/altq_rmclass.c ifd->cutoff_++; ifd 1145 altq/altq_rmclass.c reset_cutoff(ifd); ifd 1146 altq/altq_rmclass.c if (!ifd->efficient_ || first == NULL) ifd 1157 altq/altq_rmclass.c ifd->borrowed_[ifd->qi_] = cl->borrow_; ifd 1158 altq/altq_rmclass.c ifd->cutoff_ = cl->borrow_->depth_; ifd 1169 altq/altq_rmclass.c ifd->na_[cpri]--; ifd 1171 altq/altq_rmclass.c ifd->active_[cpri] = cl->peer_; ifd 1173 altq/altq_rmclass.c ifd->class_[ifd->qi_] = cl; ifd 1174 altq/altq_rmclass.c ifd->curlen_[ifd->qi_] = m_pktlen(m); ifd 1175 altq/altq_rmclass.c ifd->now_[ifd->qi_] = now; ifd 1176 altq/altq_rmclass.c ifd->qi_ = (ifd->qi_ + 1) % ifd->maxqueued_; ifd 1177 altq/altq_rmclass.c ifd->queued_++; ifd 1181 altq/altq_rmclass.c ifd->pollcache_ = cl; ifd 1201 altq/altq_rmclass.c rmc_dequeue_next(struct rm_ifdat *ifd, int mode) ifd 1203 altq/altq_rmclass.c if (ifd->queued_ >= ifd->maxqueued_) ifd 1205 altq/altq_rmclass.c else if (ifd->wrr_) ifd 1206 altq/altq_rmclass.c return (_rmc_wrr_dequeue_next(ifd, mode)); ifd 1208 altq/altq_rmclass.c return (_rmc_prr_dequeue_next(ifd, mode)); ifd 1225 altq/altq_rmclass.c rmc_update_class_util(struct rm_ifdat *ifd) ifd 1236 altq/altq_rmclass.c if ((cl = ifd->class_[ifd->qo_]) == NULL) ifd 1239 altq/altq_rmclass.c pktlen = ifd->curlen_[ifd->qo_]; ifd 1240 altq/altq_rmclass.c borrowed = ifd->borrowed_[ifd->qo_]; ifd 1255 altq/altq_rmclass.c nowp = &ifd->now_[ifd->qo_]; ifd 1258 altq/altq_rmclass.c pkt_time = ifd->curlen_[ifd->qo_] * ifd->ns_per_byte_; ifd 1261 altq/altq_rmclass.c pkt_time = ifd->curlen_[ifd->qo_] * ifd->ns_per_byte_ / 1000; ifd 1264 altq/altq_rmclass.c if (TV_LT(nowp, &ifd->ifnow_)) { ifd 1273 altq/altq_rmclass.c TV_DELTA(&ifd->ifnow_, nowp, iftime); ifd 1274 altq/altq_rmclass.c if (iftime+pkt_time < ifd->maxiftime_) { ifd 1275 altq/altq_rmclass.c TV_ADD_DELTA(&ifd->ifnow_, pkt_time, &ifd->ifnow_); ifd 1277 altq/altq_rmclass.c TV_ADD_DELTA(nowp, ifd->maxiftime_, &ifd->ifnow_); ifd 1280 altq/altq_rmclass.c TV_ADD_DELTA(nowp, pkt_time, &ifd->ifnow_); ifd 1283 altq/altq_rmclass.c if (TV_LT(nowp, &ifd->ifnow_)) { ifd 1284 altq/altq_rmclass.c TV_ADD_DELTA(&ifd->ifnow_, pkt_time, &ifd->ifnow_); ifd 1286 altq/altq_rmclass.c TV_ADD_DELTA(nowp, pkt_time, &ifd->ifnow_); ifd 1291 altq/altq_rmclass.c TV_DELTA(&ifd->ifnow_, &cl->last_, idle); ifd 1344 altq/altq_rmclass.c cl->last_ = ifd->ifnow_; ifd 1360 altq/altq_rmclass.c cl = ifd->class_[ifd->qo_]; ifd 1361 altq/altq_rmclass.c if (borrowed && (ifd->cutoff_ >= borrowed->depth_)) { ifd 1364 altq/altq_rmclass.c rmc_tl_satisfied(ifd, nowp); ifd 1365 altq/altq_rmclass.c CBQTRACE(rmc_update_class_util, 'broe', ifd->cutoff_); ifd 1367 altq/altq_rmclass.c ifd->cutoff_ = borrowed->depth_; ifd 1372 altq/altq_rmclass.c reset_cutoff(ifd); ifd 1374 altq/altq_rmclass.c rmc_tl_satisfied(ifd, &now); ifd 1376 altq/altq_rmclass.c CBQTRACE(rmc_update_class_util, 'broe', ifd->cutoff_); ifd 1378 altq/altq_rmclass.c ifd->cutoff_ = borrowed->depth_; ifd 1387 altq/altq_rmclass.c ifd->borrowed_[ifd->qo_] = NULL; ifd 1388 altq/altq_rmclass.c ifd->class_[ifd->qo_] = NULL; ifd 1389 altq/altq_rmclass.c ifd->qo_ = (ifd->qo_ + 1) % ifd->maxqueued_; ifd 1390 altq/altq_rmclass.c ifd->queued_--; ifd 1406 altq/altq_rmclass.c struct rm_ifdat *ifd = cl->ifdat_; ifd 1411 altq/altq_rmclass.c ifd->na_[cl->pri_]--; ifd 1416 altq/altq_rmclass.c struct rm_ifdat *ifd = cl->ifdat_; ifd 1421 altq/altq_rmclass.c ifd->na_[cl->pri_]--; ifd 1518 altq/altq_rmclass.c struct rm_ifdat *ifd = cl->ifdat_; ifd 1526 altq/altq_rmclass.c if (ifd->queued_ < ifd->maxqueued_ && ifd->restart != NULL) { ifd 1528 altq/altq_rmclass.c (ifd->restart)(ifd->ifq_); ifd 263 dev/usb/usbdi_util.c usb_interface_descriptor_t *ifd = usbd_get_interface_descriptor(iface); ifd 268 dev/usb/usbdi_util.c if (ifd == NULL) ifd 274 dev/usb/usbdi_util.c USETW(req.wIndex, ifd->bInterfaceNumber); ifd 283 dev/usb/usbdi_util.c usb_interface_descriptor_t *ifd = usbd_get_interface_descriptor(iface); ifd 288 dev/usb/usbdi_util.c if (ifd == NULL) ifd 294 dev/usb/usbdi_util.c USETW(req.wIndex, ifd->bInterfaceNumber); ifd 303 dev/usb/usbdi_util.c usb_interface_descriptor_t *ifd = usbd_get_interface_descriptor(iface); ifd 308 dev/usb/usbdi_util.c if (ifd == NULL) ifd 314 dev/usb/usbdi_util.c USETW(req.wIndex, ifd->bInterfaceNumber); ifd 322 dev/usb/usbdi_util.c usb_interface_descriptor_t *ifd = usbd_get_interface_descriptor(iface); ifd 327 dev/usb/usbdi_util.c if (ifd == NULL) ifd 333 dev/usb/usbdi_util.c USETW(req.wIndex, ifd->bInterfaceNumber);