crd                87 arch/i386/i386/via.c #define	VIAC3_SID(crd,ses)		(((crd) << 28) | ((ses) & 0x0fffffff))
crd               342 arch/i386/i386/via.c viac3_crypto_swauth(struct cryptop *crp, struct cryptodesc *crd,
crd               352 arch/i386/i386/via.c 	return (swcr_authcompute(crp, crd, sw, buf, type));
crd               356 arch/i386/i386/via.c viac3_crypto_encdec(struct cryptop *crp, struct cryptodesc *crd,
crd               362 arch/i386/i386/via.c 	if ((crd->crd_len % 16) != 0) {
crd               367 arch/i386/i386/via.c 	sc->op_buf = malloc(crd->crd_len, M_DEVBUF, M_NOWAIT);
crd               373 arch/i386/i386/via.c 	if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               376 arch/i386/i386/via.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               377 arch/i386/i386/via.c 			bcopy(crd->crd_iv, sc->op_iv, 16);
crd               381 arch/i386/i386/via.c 		if ((crd->crd_flags & CRD_F_IV_PRESENT) == 0) {
crd               384 arch/i386/i386/via.c 				    crd->crd_inject, 16, sc->op_iv);
crd               387 arch/i386/i386/via.c 				    crd->crd_inject, 16, sc->op_iv);
crd               390 arch/i386/i386/via.c 				    crp->crp_buf + crd->crd_inject, 16);
crd               395 arch/i386/i386/via.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               396 arch/i386/i386/via.c 			bcopy(crd->crd_iv, sc->op_iv, 16);
crd               400 arch/i386/i386/via.c 				    crd->crd_inject, 16, sc->op_iv);
crd               403 arch/i386/i386/via.c 				    crd->crd_inject, 16, sc->op_iv);
crd               405 arch/i386/i386/via.c 				bcopy(crp->crp_buf + crd->crd_inject,
crd               412 arch/i386/i386/via.c 		    crd->crd_skip, crd->crd_len, sc->op_buf);
crd               415 arch/i386/i386/via.c 		    crd->crd_skip, crd->crd_len, sc->op_buf);
crd               417 arch/i386/i386/via.c 		bcopy(crp->crp_buf + crd->crd_skip, sc->op_buf, crd->crd_len);
crd               421 arch/i386/i386/via.c 	    crd->crd_len / 16, sc->op_iv);
crd               425 arch/i386/i386/via.c 		    crd->crd_skip, crd->crd_len, sc->op_buf);
crd               428 arch/i386/i386/via.c 		    crd->crd_skip, crd->crd_len, sc->op_buf);
crd               430 arch/i386/i386/via.c 		bcopy(sc->op_buf, crp->crp_buf + crd->crd_skip,
crd               431 arch/i386/i386/via.c 		    crd->crd_len);
crd               434 arch/i386/i386/via.c 	if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               437 arch/i386/i386/via.c 			    crd->crd_skip + crd->crd_len - 16, 16,
crd               441 arch/i386/i386/via.c 			    crd->crd_skip + crd->crd_len - 16, 16,
crd               444 arch/i386/i386/via.c 			bcopy(crp->crp_buf + crd->crd_skip +
crd               445 arch/i386/i386/via.c 			    crd->crd_len - 16, ses->ses_iv, 16);
crd               449 arch/i386/i386/via.c 		bzero(sc->op_buf, crd->crd_len);
crd               462 arch/i386/i386/via.c 	struct cryptodesc *crd;
crd               477 arch/i386/i386/via.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd               478 arch/i386/i386/via.c 		switch (crd->crd_alg) {
crd               480 arch/i386/i386/via.c 			if ((err = viac3_crypto_encdec(crp, crd, ses, sc,
crd               491 arch/i386/i386/via.c 			if ((err = viac3_crypto_swauth(crp, crd, ses->swd,
crd               191 arch/i386/pci/glxsb.c #define	GLXSB_SID(crd,ses)		(((crd) << 28) | ((ses) & 0x0fffffff))
crd               569 arch/i386/pci/glxsb.c glxsb_crypto_swauth(struct cryptop *crp, struct cryptodesc *crd,
crd               579 arch/i386/pci/glxsb.c 	return (swcr_authcompute(crp, crd, sw, buf, type));
crd               583 arch/i386/pci/glxsb.c glxsb_crypto_encdec(struct cryptop *crp, struct cryptodesc *crd,
crd               594 arch/i386/pci/glxsb.c 	if (crd == NULL || (crd->crd_len % SB_AES_BLOCK_SIZE) != 0) {
crd               600 arch/i386/pci/glxsb.c 	xlen = crd->crd_len > GLXSB_MAX_AES_LEN ?
crd               601 arch/i386/pci/glxsb.c 	    GLXSB_MAX_AES_LEN : crd->crd_len;
crd               613 arch/i386/pci/glxsb.c 	if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               615 arch/i386/pci/glxsb.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               616 arch/i386/pci/glxsb.c 			bcopy(crd->crd_iv, op_iv, sizeof(op_iv));
crd               620 arch/i386/pci/glxsb.c 		if ((crd->crd_flags & CRD_F_IV_PRESENT) == 0) {
crd               623 arch/i386/pci/glxsb.c 				    crd->crd_inject, sizeof(op_iv), op_iv);
crd               626 arch/i386/pci/glxsb.c 				    crd->crd_inject, sizeof(op_iv), op_iv);
crd               629 arch/i386/pci/glxsb.c 				    crp->crp_buf + crd->crd_inject, sizeof(op_iv));
crd               633 arch/i386/pci/glxsb.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               634 arch/i386/pci/glxsb.c 			bcopy(crd->crd_iv, op_iv, sizeof(op_iv));
crd               638 arch/i386/pci/glxsb.c 				    crd->crd_inject, sizeof(op_iv), op_iv);
crd               641 arch/i386/pci/glxsb.c 				    crd->crd_inject, sizeof(op_iv), op_iv);
crd               643 arch/i386/pci/glxsb.c 				bcopy(crp->crp_buf + crd->crd_inject,
crd               649 arch/i386/pci/glxsb.c 	tlen = crd->crd_len;
crd               658 arch/i386/pci/glxsb.c 			    crd->crd_skip + offset, len, op_src);
crd               661 arch/i386/pci/glxsb.c 			    crd->crd_skip + offset, len, op_src);
crd               663 arch/i386/pci/glxsb.c 			bcopy(crp->crp_buf + crd->crd_skip + offset, op_src,
crd               675 arch/i386/pci/glxsb.c 			    crd->crd_skip + offset, len, op_dst);
crd               678 arch/i386/pci/glxsb.c 			    crd->crd_skip + offset, len, op_dst);
crd               680 arch/i386/pci/glxsb.c 			bcopy(op_dst, crp->crp_buf + crd->crd_skip + offset,
crd               698 arch/i386/pci/glxsb.c 		if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               721 arch/i386/pci/glxsb.c 	struct cryptodesc *crd;
crd               731 arch/i386/pci/glxsb.c 	crd = crp->crp_desc;
crd               732 arch/i386/pci/glxsb.c 	if (crd == NULL) {
crd               744 arch/i386/pci/glxsb.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd               745 arch/i386/pci/glxsb.c 		switch (crd->crd_alg) {
crd               747 arch/i386/pci/glxsb.c 			if ((err = glxsb_crypto_encdec(crp, crd, ses, sc,
crd               758 arch/i386/pci/glxsb.c 			if ((err = glxsb_crypto_swauth(crp, crd, ses->ses_swd,
crd               506 crypto/crypto.c 	struct cryptodesc *crd;
crd               554 crypto/crypto.c 	for (crd = crp->crp_desc; crd->crd_next; crd = crd->crd_next)
crd               555 crypto/crypto.c 		crd->CRD_INI.cri_next = &(crd->crd_next->CRD_INI);
crd               571 crypto/crypto.c 	struct cryptodesc *crd;
crd               579 crypto/crypto.c 	while ((crd = crp->crp_desc) != NULL) {
crd               580 crypto/crypto.c 		crp->crp_desc = crd->crd_next;
crd               581 crypto/crypto.c 		pool_put(&cryptodesc_pool, crd);
crd               594 crypto/crypto.c 	struct cryptodesc *crd;
crd               616 crypto/crypto.c 		crd = pool_get(&cryptodesc_pool, PR_NOWAIT);
crd               617 crypto/crypto.c 		if (crd == NULL) {
crd               623 crypto/crypto.c 		bzero(crd, sizeof(struct cryptodesc));
crd               624 crypto/crypto.c 		crd->crd_next = crp->crp_desc;
crd               625 crypto/crypto.c 		crp->crp_desc = crd;
crd                79 crypto/cryptosoft.c swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
crd                94 crypto/cryptosoft.c 	if (crd->crd_len % blks)
crd               103 crypto/cryptosoft.c 	if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               105 crypto/cryptosoft.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               106 crypto/cryptosoft.c 			bcopy(crd->crd_iv, iv, ivlen);
crd               111 crypto/cryptosoft.c 		if (!(crd->crd_flags & CRD_F_IV_PRESENT)) {
crd               112 crypto/cryptosoft.c 			COPYBACK(outtype, buf, crd->crd_inject, ivlen, iv);
crd               117 crypto/cryptosoft.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd               118 crypto/cryptosoft.c 			bcopy(crd->crd_iv, iv, ivlen);
crd               121 crypto/cryptosoft.c 			COPYDATA(outtype, buf, crd->crd_inject, ivlen, iv);
crd               132 crypto/cryptosoft.c 		m = m_getptr(m, crd->crd_skip, &k);
crd               136 crypto/cryptosoft.c 		i = crd->crd_len;
crd               149 crypto/cryptosoft.c 				} else if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               221 crypto/cryptosoft.c 				} else if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               257 crypto/cryptosoft.c 		count = crd->crd_skip;
crd               262 crypto/cryptosoft.c 		i = crd->crd_len;
crd               276 crypto/cryptosoft.c 				} else if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               339 crypto/cryptosoft.c 				} else if (crd->crd_flags & CRD_F_ENCRYPT) {
crd               383 crypto/cryptosoft.c swcr_authcompute(struct cryptop *crp, struct cryptodesc *crd,
crd               399 crypto/cryptosoft.c 		err = m_apply((struct mbuf *) buf, crd->crd_skip, crd->crd_len,
crd               403 crypto/cryptosoft.c 		err = cuio_apply((struct uio *) buf, crd->crd_skip,
crd               404 crypto/cryptosoft.c 		    crd->crd_len,
crd               444 crypto/cryptosoft.c 		COPYBACK(outtype, buf, crd->crd_inject, axf->authsize, aalg);
crd               455 crypto/cryptosoft.c swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
crd               470 crypto/cryptosoft.c 	MALLOC(data, u_int8_t *, crd->crd_len, M_CRYPTO_DATA,  M_NOWAIT);
crd               473 crypto/cryptosoft.c 	COPYDATA(outtype, buf, crd->crd_skip, crd->crd_len, data);
crd               475 crypto/cryptosoft.c 	if (crd->crd_flags & CRD_F_COMP)
crd               476 crypto/cryptosoft.c 		result = cxf->compress(data, crd->crd_len, &out);
crd               478 crypto/cryptosoft.c 		result = cxf->decompress(data, crd->crd_len, &out);
crd               489 crypto/cryptosoft.c 	if (crd->crd_flags & CRD_F_COMP) {
crd               490 crypto/cryptosoft.c 		if (result > crd->crd_len) {
crd               497 crypto/cryptosoft.c 	COPYBACK(outtype, buf, crd->crd_skip, result, out);
crd               498 crypto/cryptosoft.c 	if (result < crd->crd_len) {
crd               499 crypto/cryptosoft.c 		adj = result - crd->crd_len;
crd               501 crypto/cryptosoft.c 			adj = result - crd->crd_len;
crd               507 crypto/cryptosoft.c 			adj = crd->crd_len - result;
crd               834 crypto/cryptosoft.c 	struct cryptodesc *crd;
crd               860 crypto/cryptosoft.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd               872 crypto/cryptosoft.c 		    sw && sw->sw_alg != crd->crd_alg;
crd               892 crypto/cryptosoft.c 			if ((crp->crp_etype = swcr_encdec(crd, sw,
crd               906 crypto/cryptosoft.c 			if ((crp->crp_etype = swcr_authcompute(crp, crd, sw,
crd               912 crypto/cryptosoft.c 			if ((crp->crp_etype = swcr_compdec(crd, sw,
crd              2257 dev/pci/hifn7751.c 	struct cryptodesc *crd;
crd              2323 dev/pci/hifn7751.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              2324 dev/pci/hifn7751.c 			if (crd->crd_alg != CRYPTO_DES_CBC &&
crd              2325 dev/pci/hifn7751.c 			    crd->crd_alg != CRYPTO_3DES_CBC &&
crd              2326 dev/pci/hifn7751.c 			    crd->crd_alg != CRYPTO_AES_CBC)
crd              2328 dev/pci/hifn7751.c 			ivlen = ((crd->crd_alg == CRYPTO_AES_CBC) ?
crd              2332 dev/pci/hifn7751.c 				    crd->crd_skip + crd->crd_len - ivlen, ivlen,
crd              2336 dev/pci/hifn7751.c 				    crd->crd_skip + crd->crd_len - ivlen, ivlen,
crd              2351 dev/pci/hifn7751.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              2354 dev/pci/hifn7751.c 			if (crd->crd_alg == CRYPTO_MD5)
crd              2356 dev/pci/hifn7751.c 			else if (crd->crd_alg == CRYPTO_SHA1)
crd              2358 dev/pci/hifn7751.c 			else if (crd->crd_alg == CRYPTO_MD5_HMAC ||
crd              2359 dev/pci/hifn7751.c 			    crd->crd_alg == CRYPTO_SHA1_HMAC)
crd              2366 dev/pci/hifn7751.c 				    crd->crd_inject, len, macbuf);
crd              2387 dev/pci/hifn7751.c 	struct cryptodesc *crd = crp->crp_desc;
crd              2390 dev/pci/hifn7751.c 	cmd->compcrd = crd;
crd              2402 dev/pci/hifn7751.c 	if ((crd->crd_flags & CRD_F_COMP) == 0)
crd              2404 dev/pci/hifn7751.c 	if (crd->crd_alg == CRYPTO_LZS_COMP)
crd              2434 dev/pci/hifn7751.c 		if ((crd->crd_flags & CRD_F_COMP) == 0)
crd               311 dev/pci/hifn7751var.h #define HIFN_SID(crd,ses)	(((crd) << 28) | ((ses) & 0x7ff))
crd              1141 dev/pci/ises.c 	struct cryptodesc *maccrd, *enccrd, *crd;
crd              1206 dev/pci/ises.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1207 dev/pci/ises.c 		switch (crd->crd_alg) {
crd              1214 dev/pci/ises.c 			maccrd = crd;
crd              1219 dev/pci/ises.c 			    (maccrd && (crd->crd_flags & CRD_F_ENCRYPT)))
crd              1221 dev/pci/ises.c 			enccrd = crd;
crd              1300 dev/pci/ises.c 		switch (crd->crd_alg) {
crd              1465 dev/pci/ises.c 	struct cryptodesc *crd;
crd              1478 dev/pci/ises.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1479 dev/pci/ises.c 			if (crd->crd_alg != CRYPTO_DES_CBC &&
crd              1480 dev/pci/ises.c 			    crd->crd_alg != CRYPTO_3DES_CBC)
crd              1484 dev/pci/ises.c 				    crd->crd_skip + crd->crd_len - 8, 8, sccr);
crd              1487 dev/pci/ises.c 				    crd->crd_skip + crd->crd_len - 8, 8, sccr);
crd              1491 dev/pci/ises.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1492 dev/pci/ises.c 		if (crd->crd_alg != CRYPTO_MD5_HMAC &&
crd              1493 dev/pci/ises.c 		    crd->crd_alg != CRYPTO_SHA1_HMAC &&
crd              1494 dev/pci/ises.c 		    crd->crd_alg != CRYPTO_RIPEMD160_HMAC)
crd              1498 dev/pci/ises.c 			   crd->crd_inject, 12, (u_int8_t *)q->q_macbuf);
crd               348 dev/pci/isesreg.h #define	ISES_SID(crd,ses)	(((crd) << 28) | ((ses) & 0x0fffffff))
crd               878 dev/pci/noct.c 	struct cryptodesc *crd;
crd               893 dev/pci/noct.c 			crd = crp->crp_desc;
crd               894 dev/pci/noct.c 			switch (crd->crd_alg) {
crd               913 dev/pci/noct.c 					    crd->crd_inject, len,
crd               919 dev/pci/noct.c 			if (crd->crd_alg == CRYPTO_DES_CBC ||
crd               920 dev/pci/noct.c 			    crd->crd_alg == CRYPTO_3DES_CBC) {
crd               923 dev/pci/noct.c 					    crd->crd_skip, crd->crd_len,
crd               927 dev/pci/noct.c 					    crd->crd_skip, crd->crd_len,
crd               933 dev/pci/noct.c 			bus_dmamem_unmap(sc->sc_dmat, q->q_buf, crd->crd_len);
crd               962 dev/pci/noct.c 	struct cryptodesc *crd;
crd               966 dev/pci/noct.c 	crd = crp->crp_desc;
crd               969 dev/pci/noct.c 	if (crd->crd_next != NULL) {
crd               974 dev/pci/noct.c 	switch (crd->crd_alg) {
crd               977 dev/pci/noct.c 		noct_ea_start_hash(sc, q, crp, crd);
crd               981 dev/pci/noct.c 		noct_ea_start_des(sc, q, crp, crd);
crd               999 dev/pci/noct.c noct_ea_start_hash(sc, q, crp, crd)
crd              1003 dev/pci/noct.c 	struct cryptodesc *crd;
crd              1009 dev/pci/noct.c 	if (crd->crd_len > 0x4800) {
crd              1014 dev/pci/noct.c 	if ((err = bus_dmamem_alloc(sc->sc_dmat, crd->crd_len, PAGE_SIZE, 0,
crd              1019 dev/pci/noct.c 	    crd->crd_len, (caddr_t *)&q->q_buf, BUS_DMA_WAITOK)) != 0)
crd              1022 dev/pci/noct.c 	if ((err = bus_dmamap_create(sc->sc_dmat, crd->crd_len, 1,
crd              1023 dev/pci/noct.c 	    crd->crd_len, 0, BUS_DMA_WAITOK, &q->q_dmamap)) != 0)
crd              1027 dev/pci/noct.c 	    rseg, crd->crd_len, BUS_DMA_WAITOK)) != 0)
crd              1032 dev/pci/noct.c 		    crd->crd_skip, crd->crd_len, q->q_buf);
crd              1035 dev/pci/noct.c 		    crd->crd_skip, crd->crd_len, q->q_buf);
crd              1055 dev/pci/noct.c 	switch (crd->crd_alg) {
crd              1065 dev/pci/noct.c 	sc->sc_eacmd[wp].buf[1] |= htole32(crd->crd_len);
crd              1096 dev/pci/noct.c 	bus_dmamem_unmap(sc->sc_dmat, q->q_buf, crd->crd_len);
crd              1108 dev/pci/noct.c noct_ea_start_des(sc, q, crp, crd)
crd              1112 dev/pci/noct.c 	struct cryptodesc *crd;
crd              1120 dev/pci/noct.c 	if (crd->crd_len > 0x4800) {
crd              1125 dev/pci/noct.c 	if ((crd->crd_len & 3) != 0) {
crd              1130 dev/pci/noct.c 	if (crd->crd_alg == CRYPTO_DES_CBC) {
crd              1132 dev/pci/noct.c 			key[i] = key[i + 8] = key[i + 16] = crd->crd_key[i];
crd              1135 dev/pci/noct.c 			key[i] = crd->crd_key[i];
crd              1138 dev/pci/noct.c 	if (crd->crd_flags & CRD_F_ENCRYPT) {
crd              1139 dev/pci/noct.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd              1140 dev/pci/noct.c 			bcopy(crd->crd_iv, iv, 8);
crd              1144 dev/pci/noct.c 		if (!(crd->crd_flags & CRD_F_IV_PRESENT)) {
crd              1147 dev/pci/noct.c 				    crd->crd_inject, 8, iv);
crd              1150 dev/pci/noct.c 				    crd->crd_inject, 8, iv);
crd              1153 dev/pci/noct.c 		if (crd->crd_flags & CRD_F_IV_EXPLICIT)
crd              1154 dev/pci/noct.c 			bcopy(crd->crd_iv, iv, 8);
crd              1157 dev/pci/noct.c 			    crd->crd_inject, 8, iv);
crd              1160 dev/pci/noct.c 			    crd->crd_inject, 8, iv);
crd              1163 dev/pci/noct.c 	if ((err = bus_dmamem_alloc(sc->sc_dmat, crd->crd_len, PAGE_SIZE, 0,
crd              1168 dev/pci/noct.c 	    crd->crd_len, (caddr_t *)&q->q_buf, BUS_DMA_WAITOK)) != 0)
crd              1171 dev/pci/noct.c 	if ((err = bus_dmamap_create(sc->sc_dmat, crd->crd_len, 1,
crd              1172 dev/pci/noct.c 	    crd->crd_len, 0, BUS_DMA_WAITOK, &q->q_dmamap)) != 0)
crd              1176 dev/pci/noct.c 	    rseg, crd->crd_len, BUS_DMA_WAITOK)) != 0)
crd              1181 dev/pci/noct.c 		    crd->crd_skip, crd->crd_len, q->q_buf);
crd              1184 dev/pci/noct.c 		    crd->crd_skip, crd->crd_len, q->q_buf);
crd              1207 dev/pci/noct.c 	if (crd->crd_flags & CRD_F_ENCRYPT)
crd              1213 dev/pci/noct.c 	sc->sc_eacmd[wp].buf[1] |= htole32(crd->crd_len);
crd              1257 dev/pci/noct.c 	bus_dmamem_unmap(sc->sc_dmat, q->q_buf, crd->crd_len);
crd               107 dev/pci/noctvar.h #define	NOCT_SID(crd, sesn)	(((crd) << 28) | ((sesn) & 0x0fffffff))
crd              1638 dev/pci/safe.c 	struct cryptodesc *crd;
crd              1675 dev/pci/safe.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1678 dev/pci/safe.c 			if (crd->crd_alg == CRYPTO_DES_CBC ||
crd              1679 dev/pci/safe.c 			    crd->crd_alg == CRYPTO_3DES_CBC) {
crd              1681 dev/pci/safe.c 			} else if (crd->crd_alg == CRYPTO_AES_CBC) {
crd              1687 dev/pci/safe.c 					crd->crd_skip + crd->crd_len - ivsize,
crd              1692 dev/pci/safe.c 					crd->crd_skip + crd->crd_len - ivsize,
crd              1702 dev/pci/safe.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1703 dev/pci/safe.c 			if (!(crd->crd_alg == CRYPTO_MD5_HMAC ||
crd              1704 dev/pci/safe.c 			    crd->crd_alg == CRYPTO_SHA1_HMAC))
crd              1706 dev/pci/safe.c 			if (crd->crd_alg == CRYPTO_SHA1_HMAC) {
crd              1717 dev/pci/safe.c 					crd->crd_inject, 12,
crd                57 dev/pci/safevar.h #define	SAFE_SID(crd, sesn)	(((crd) << 28) | ((sesn) & 0x0fffffff))
crd              1220 dev/pci/ubsec.c 	struct cryptodesc *crd;
crd              1247 dev/pci/ubsec.c 		for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1248 dev/pci/ubsec.c 			if (crd->crd_alg != CRYPTO_DES_CBC &&
crd              1249 dev/pci/ubsec.c 			    crd->crd_alg != CRYPTO_3DES_CBC)
crd              1253 dev/pci/ubsec.c 				    crd->crd_skip + crd->crd_len - 8, 8,
crd              1257 dev/pci/ubsec.c 				    crd->crd_skip + crd->crd_len - 8, 8,
crd              1264 dev/pci/ubsec.c 	for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
crd              1265 dev/pci/ubsec.c 		if (crd->crd_alg != CRYPTO_MD5_HMAC &&
crd              1266 dev/pci/ubsec.c 		    crd->crd_alg != CRYPTO_SHA1_HMAC)
crd              1270 dev/pci/ubsec.c 			    crd->crd_inject, 12,
crd                47 dev/pci/ubsecvar.h #define	UBSEC_SID(crd, sesn)	(((crd) << 28) | ((sesn) & 0x0fffffff))
crd              2908 dev/softraid.c 	struct cryptodesc	*crd;
crd              2938 dev/softraid.c 	for (i = 0, crd = crp->crp_desc; crd; i++, crd = crd->crd_next) {
crd              2939 dev/softraid.c 		crd->crd_skip = 512 * i;
crd              2940 dev/softraid.c 		crd->crd_len = 512;
crd              2941 dev/softraid.c 		crd->crd_inject = 0;
crd              2942 dev/softraid.c 		crd->crd_flags = flags;
crd              2943 dev/softraid.c 		crd->crd_alg = CRYPTO_AES_CBC;
crd              2944 dev/softraid.c 		crd->crd_klen = 256;
crd              2945 dev/softraid.c 		crd->crd_rnd = 14;
crd              2946 dev/softraid.c 		crd->crd_key = sd->mds.mdd_raidc.src_key;
crd              2947 dev/softraid.c 		memset(crd->crd_iv, blk + i, sizeof(crd->crd_iv));
crd               745 netinet/ip_ah.c 	struct cryptodesc *crd;
crd               756 netinet/ip_ah.c 	crd = crp->crp_desc;
crd               215 netinet/ip_ipcomp.c 	struct cryptodesc *crd;
crd               224 netinet/ip_ipcomp.c 	crd = crp->crp_desc;