pm                577 arch/i386/i386/pmap.c pmap_exec_account(struct pmap *pm, vaddr_t va,
pm                580 arch/i386/i386/pmap.c 	if (pm == pmap_kernel())
pm                584 arch/i386/i386/pmap.c 	    pm != vm_map_pmap(&curproc->p_vmspace->vm_map))
pm                588 arch/i386/i386/pmap.c 		pmap_tlb_shootpage(pm, va);
pm                598 arch/i386/i386/pmap.c 	if ((opte & PG_X) && (npte & PG_X) == 0 && va == pm->pm_hiexec) {
pm                602 arch/i386/i386/pmap.c 		pm->pm_hiexec = I386_MAX_EXE_ADDR;
pm                603 arch/i386/i386/pmap.c 		setcslimit(pm, tf, pcb, I386_MAX_EXE_ADDR);
pm                616 arch/i386/i386/pmap.c 	struct pmap *pm = vm_map_pmap(map);
pm                630 arch/i386/i386/pmap.c 	if (va <= pm->pm_hiexec) {
pm                634 arch/i386/i386/pmap.c 	pm->pm_hiexec = va;
pm                641 arch/i386/i386/pmap.c 	setcslimit(pm, tf, pcb, va);
pm                647 arch/i386/i386/pmap.c setcslimit(struct pmap *pm, struct trapframe *tf, struct pcb *pcb,
pm                666 arch/i386/i386/pmap.c 	setsegment(&pm->pm_codeseg, 0, atop(limit),
pm                673 arch/i386/i386/pmap.c 	    pm->pm_codeseg;
pm               2791 arch/i386/i386/pmap.c 	struct pmap *kpm = pmap_kernel(), *pm;
pm               2841 arch/i386/i386/pmap.c 		LIST_FOREACH(pm, &pmaps, pm_list) {
pm               2842 arch/i386/i386/pmap.c 			pm->pm_pdir[PDSLOT_KERN + nkpde] =
pm               2938 arch/i386/i386/pmap.c pmap_tlb_shootpage(struct pmap *pm, vaddr_t va)
pm               2946 arch/i386/i386/pmap.c 		if (ci == self || !pmap_is_active(pm, ci->ci_cpuid) ||
pm               2970 arch/i386/i386/pmap.c 	if (pmap_is_curpmap(pm))
pm               2975 arch/i386/i386/pmap.c pmap_tlb_shootrange(struct pmap *pm, vaddr_t sva, vaddr_t eva)
pm               2984 arch/i386/i386/pmap.c 		if (ci == self || !pmap_is_active(pm, ci->ci_cpuid) ||
pm               3009 arch/i386/i386/pmap.c 	if (pmap_is_curpmap(pm))
pm               3059 arch/i386/i386/pmap.c pmap_tlb_shootpage(struct pmap *pm, vaddr_t va)
pm               3061 arch/i386/i386/pmap.c 	if (pmap_is_curpmap(pm))
pm               3067 arch/i386/i386/pmap.c pmap_tlb_shootrange(struct pmap *pm, vaddr_t sva, vaddr_t eva)
pm                510 arch/i386/i386/pmapae.c #define	PDE(pm,i)	(((pd_entry_t *)(pm)->pm_pdir)[(i)])
pm               2173 arch/i386/i386/pmapae.c 	struct pmap *kpm = pmap_kernel(), *pm;
pm               2225 arch/i386/i386/pmapae.c 		LIST_FOREACH(pm, &pmaps, pm_list) {
pm               2226 arch/i386/i386/pmapae.c 			PDE(pm, PDSLOT_KERN + nkpde) =
pm                351 arch/i386/include/pmap.h #define	pmap_update(pm)			/* nada */
pm                296 arch/i386/stand/libsa/memprobe.c 	bios_memmap_t *pm = bios_memmap, *im;
pm                304 arch/i386/stand/libsa/memprobe.c 	if ((pm = bios_E820(bios_memmap)) == NULL) {
pm                307 arch/i386/stand/libsa/memprobe.c 		pm = bios_E801(im);
pm                308 arch/i386/stand/libsa/memprobe.c 		if (pm == NULL)
pm                310 arch/i386/stand/libsa/memprobe.c 			pm = bios_8800(im);
pm                311 arch/i386/stand/libsa/memprobe.c 		if (pm == NULL)
pm                312 arch/i386/stand/libsa/memprobe.c 			pm = badprobe(im);
pm                313 arch/i386/stand/libsa/memprobe.c 		if (pm == NULL) {
pm                315 arch/i386/stand/libsa/memprobe.c 			pm = im;
pm                318 arch/i386/stand/libsa/memprobe.c 	pm->type = BIOS_MAP_END;
pm               2857 dev/audio.c    	struct mixer_asyncs **pm, *m;
pm               2859 dev/audio.c    	for(pm = &sc->sc_async_mixer; *pm; pm = &(*pm)->next) {
pm               2860 dev/audio.c    		if ((*pm)->proc == p) {
pm               2861 dev/audio.c    			m = *pm;
pm               2862 dev/audio.c    			*pm = m->next;
pm               1215 dev/pci/if_de.c 	const tulip_phy_modedata_t * const pm = &attr->attr_modes[PHY_MODE_100TX];
pm               1216 dev/pci/if_de.c 	data = tulip_mii_readreg(sc, sc->tulip_phyaddr, pm->pm_regno);
pm               1217 dev/pci/if_de.c 	if ((data & pm->pm_mask) == pm->pm_value)
pm               1221 dev/pci/if_de.c 	const tulip_phy_modedata_t * const pm = &attr->attr_modes[PHY_MODE_100T4];
pm               1222 dev/pci/if_de.c 	data = tulip_mii_readreg(sc, sc->tulip_phyaddr, pm->pm_regno);
pm               1223 dev/pci/if_de.c 	if ((data & pm->pm_mask) == pm->pm_value)
pm               1227 dev/pci/if_de.c 	const tulip_phy_modedata_t * const pm = &attr->attr_modes[PHY_MODE_10T];
pm               1228 dev/pci/if_de.c 	data = tulip_mii_readreg(sc, sc->tulip_phyaddr, pm->pm_regno);
pm               1229 dev/pci/if_de.c 	if ((data & pm->pm_mask) == pm->pm_value)
pm               1233 dev/pci/if_de.c 	const tulip_phy_modedata_t * const pm = &attr->attr_modes[PHY_MODE_FULLDUPLEX];
pm               1234 dev/pci/if_de.c 	data = tulip_mii_readreg(sc, sc->tulip_phyaddr, pm->pm_regno);
pm               1235 dev/pci/if_de.c 	idx += ((data & pm->pm_mask) == pm->pm_value ? 4 : 0);
pm                471 dev/pci/pci.c  	const struct pci_matchid *pm;
pm                474 dev/pci/pci.c  	for (i = 0, pm = ids; i < nent; i++, pm++)
pm                475 dev/pci/pci.c  		if (PCI_VENDOR(pa->pa_id) == pm->pm_vid &&
pm                476 dev/pci/pci.c  		    PCI_PRODUCT(pa->pa_id) == pm->pm_pid)
pm               1501 nfs/nfs_socket.c nfs_realign(struct mbuf **pm, int hsiz)
pm               1508 nfs/nfs_socket.c 	while ((m = *pm) != NULL) {
pm               1518 nfs/nfs_socket.c 		pm = &m->m_next;
pm               1539 nfs/nfs_socket.c 		m_freem(*pm);
pm               1540 nfs/nfs_socket.c 		*pm = n;