pve               475 arch/i386/i386/pmap.c pmap_tmpmap_pvepte(struct pv_entry *pve)
pve               478 arch/i386/i386/pmap.c 	if (pve->pv_pmap == pmap_kernel())
pve               483 arch/i386/i386/pmap.c 	if (pmap_is_curpmap(pve->pv_pmap))
pve               484 arch/i386/i386/pmap.c 		return(vtopte(pve->pv_va));
pve               486 arch/i386/i386/pmap.c 	return(((pt_entry_t *)pmap_tmpmap_pa(VM_PAGE_TO_PHYS(pve->pv_ptp)))
pve               487 arch/i386/i386/pmap.c 	       + ptei((unsigned)pve->pv_va));
pve               495 arch/i386/i386/pmap.c pmap_tmpunmap_pvepte(struct pv_entry *pve)
pve               498 arch/i386/i386/pmap.c 	if (pmap_is_curpmap(pve->pv_pmap))
pve              1302 arch/i386/i386/pmap.c pmap_enter_pv(struct vm_page *pg, struct pv_entry *pve, struct pmap *pmap,
pve              1305 arch/i386/i386/pmap.c 	pve->pv_pmap = pmap;
pve              1306 arch/i386/i386/pmap.c 	pve->pv_va = va;
pve              1307 arch/i386/i386/pmap.c 	pve->pv_ptp = ptp;			/* NULL for kernel pmap */
pve              1308 arch/i386/i386/pmap.c 	pve->pv_next = pg->mdpage.pv_list;	/* add to ... */
pve              1309 arch/i386/i386/pmap.c 	pg->mdpage.pv_list = pve;			/* ... locked list */
pve              1324 arch/i386/i386/pmap.c 	struct pv_entry *pve, **prevptr;
pve              1327 arch/i386/i386/pmap.c 	while ((pve = *prevptr) != NULL) {
pve              1328 arch/i386/i386/pmap.c 		if (pve->pv_pmap == pmap && pve->pv_va == va) {	/* match? */
pve              1329 arch/i386/i386/pmap.c 			*prevptr = pve->pv_next;		/* remove it! */
pve              1332 arch/i386/i386/pmap.c 		prevptr = &pve->pv_next;		/* previous pointer */
pve              1334 arch/i386/i386/pmap.c 	return(pve);				/* return removed pve */
pve              1876 arch/i386/i386/pmap.c 	struct pv_entry *pve;
pve              1934 arch/i386/i386/pmap.c 		pve = pmap_remove_pv(pg, pmap, startva);
pve              1935 arch/i386/i386/pmap.c 		if (pve) {
pve              1936 arch/i386/i386/pmap.c 			pve->pv_next = pv_tofree;
pve              1937 arch/i386/i386/pmap.c 			pv_tofree = pve;
pve              1961 arch/i386/i386/pmap.c 	struct pv_entry *pve;
pve              2005 arch/i386/i386/pmap.c 	pve = pmap_remove_pv(pg, pmap, va);
pve              2006 arch/i386/i386/pmap.c 	if (pve)
pve              2007 arch/i386/i386/pmap.c 		pmap_free_pv(pmap, pve);
pve              2242 arch/i386/i386/pmap.c 	struct pv_entry *pve;
pve              2254 arch/i386/i386/pmap.c 	for (pve = pg->mdpage.pv_list ; pve != NULL ; pve = pve->pv_next) {
pve              2255 arch/i386/i386/pmap.c 		ptes = pmap_map_ptes(pve->pv_pmap);	/* locks pmap */
pve              2258 arch/i386/i386/pmap.c 		if (pve->pv_va >= uvm.pager_sva && pve->pv_va < uvm.pager_eva)
pve              2260 arch/i386/i386/pmap.c 		if (pve->pv_ptp && (pve->pv_pmap->pm_pdir[pdei(pve->pv_va)] &
pve              2262 arch/i386/i386/pmap.c 		    != VM_PAGE_TO_PHYS(pve->pv_ptp)) {
pve              2264 arch/i386/i386/pmap.c 			       pg, pve->pv_va, pve->pv_ptp);
pve              2267 arch/i386/i386/pmap.c 			       (pve->pv_pmap->pm_pdir[pdei(pve->pv_va)] &
pve              2268 arch/i386/i386/pmap.c 				PG_FRAME), VM_PAGE_TO_PHYS(pve->pv_ptp));
pve              2274 arch/i386/i386/pmap.c 		opte = i386_atomic_testset_ul(&ptes[atop(pve->pv_va)], 0);
pve              2277 arch/i386/i386/pmap.c 			pve->pv_pmap->pm_stats.wired_count--;
pve              2278 arch/i386/i386/pmap.c 		pve->pv_pmap->pm_stats.resident_count--;
pve              2284 arch/i386/i386/pmap.c 		if (pve->pv_ptp) {
pve              2285 arch/i386/i386/pmap.c 			pve->pv_ptp->wire_count--;
pve              2286 arch/i386/i386/pmap.c 			if (pve->pv_ptp->wire_count <= 1) {
pve              2288 arch/i386/i386/pmap.c 				    &pve->pv_pmap->pm_pdir[pdei(pve->pv_va)],
pve              2291 arch/i386/i386/pmap.c 				    ((vaddr_t)ptes) + pve->pv_ptp->offset);
pve              2297 arch/i386/i386/pmap.c 				pmap_tlb_shootpage(pve->pv_pmap,
pve              2298 arch/i386/i386/pmap.c 				    ((vaddr_t)PTE_BASE) + pve->pv_ptp->offset);
pve              2300 arch/i386/i386/pmap.c 				pve->pv_pmap->pm_stats.resident_count--;
pve              2302 arch/i386/i386/pmap.c 				if (pve->pv_pmap->pm_ptphint == pve->pv_ptp)
pve              2303 arch/i386/i386/pmap.c 					pve->pv_pmap->pm_ptphint =
pve              2304 arch/i386/i386/pmap.c 					    TAILQ_FIRST(&pve->pv_pmap->pm_obj.memq);
pve              2305 arch/i386/i386/pmap.c 				pve->pv_ptp->wire_count = 0;
pve              2307 arch/i386/i386/pmap.c 				uvm_pagerealloc(pve->pv_ptp, NULL, 0);
pve              2308 arch/i386/i386/pmap.c 				TAILQ_INSERT_TAIL(&empty_ptps, pve->pv_ptp,
pve              2313 arch/i386/i386/pmap.c 		pmap_tlb_shootpage(pve->pv_pmap, pve->pv_va);
pve              2315 arch/i386/i386/pmap.c 		pmap_unmap_ptes(pve->pv_pmap);	/* unlocks pmap */
pve              2342 arch/i386/i386/pmap.c 	struct pv_entry *pve;
pve              2353 arch/i386/i386/pmap.c 	for (pve = pg->mdpage.pv_list; pve != NULL && mybits == 0;
pve              2354 arch/i386/i386/pmap.c 	    pve = pve->pv_next) {
pve              2355 arch/i386/i386/pmap.c 		ptes = pmap_map_ptes(pve->pv_pmap);
pve              2356 arch/i386/i386/pmap.c 		pte = ptes[atop(pve->pv_va)];
pve              2357 arch/i386/i386/pmap.c 		pmap_unmap_ptes(pve->pv_pmap);
pve              2379 arch/i386/i386/pmap.c 	struct pv_entry *pve;
pve              2392 arch/i386/i386/pmap.c 	for (pve = pg->mdpage.pv_list; pve != NULL; pve = pve->pv_next) {
pve              2394 arch/i386/i386/pmap.c 		if (!pmap_valid_entry(pve->pv_pmap->pm_pdir[pdei(pve->pv_va)]))
pve              2399 arch/i386/i386/pmap.c 		ptes = pmap_map_ptes(pve->pv_pmap);	/* locks pmap */
pve              2400 arch/i386/i386/pmap.c 		npte = ptes[atop(pve->pv_va)];
pve              2405 arch/i386/i386/pmap.c 			    &ptes[atop(pve->pv_va)], npte);
pve              2406 arch/i386/i386/pmap.c 			pmap_tlb_shootpage(pve->pv_pmap, pve->pv_va);
pve              2408 arch/i386/i386/pmap.c 		pmap_unmap_ptes(pve->pv_pmap);	/* unlocks pmap */
pve              2598 arch/i386/i386/pmap.c 	struct pv_entry *pve = NULL;
pve              2696 arch/i386/i386/pmap.c 			pve = pmap_remove_pv(pg, pmap, va);
pve              2719 arch/i386/i386/pmap.c 		if (pve == NULL) {
pve              2720 arch/i386/i386/pmap.c 			pve = pmap_alloc_pv(pmap, ALLOCPV_NEED);
pve              2721 arch/i386/i386/pmap.c 			if (pve == NULL) {
pve              2733 arch/i386/i386/pmap.c 		pmap_enter_pv(pg, pve, pmap, va, ptp);
pve              2737 arch/i386/i386/pmap.c 		if (pve)
pve              2738 arch/i386/i386/pmap.c 			pmap_free_pv(pmap, pve);
pve               634 arch/i386/i386/pmapae.c pmap_tmpmap_pvepte_pae(struct pv_entry *pve)
pve               637 arch/i386/i386/pmapae.c 	if (pve->pv_pmap == pmap_kernel())
pve               642 arch/i386/i386/pmapae.c 	if (pmap_is_curpmap(pve->pv_pmap))
pve               643 arch/i386/i386/pmapae.c 		return(vtopte(pve->pv_va));
pve               645 arch/i386/i386/pmapae.c 	return(((pt_entry_t *)pmap_tmpmap_pa_pae(VM_PAGE_TO_PHYS(pve->pv_ptp)))
pve               646 arch/i386/i386/pmapae.c 	       + ptei((unsigned)pve->pv_va));
pve               654 arch/i386/i386/pmapae.c pmap_tmpunmap_pvepte_pae(struct pv_entry *pve)
pve               657 arch/i386/i386/pmapae.c 	if (pmap_is_curpmap(pve->pv_pmap))
pve              1210 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              1271 arch/i386/i386/pmapae.c 		pve = pmap_remove_pv(&vm_physmem[bank].pmseg.pvhead[off], pmap,
pve              1275 arch/i386/i386/pmapae.c 		if (pve) {
pve              1276 arch/i386/i386/pmapae.c 			pve->pv_next = pv_tofree;
pve              1277 arch/i386/i386/pmapae.c 			pv_tofree = pve;
pve              1303 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              1352 arch/i386/i386/pmapae.c 	pve = pmap_remove_pv(&vm_physmem[bank].pmseg.pvhead[off], pmap, va);
pve              1355 arch/i386/i386/pmapae.c 	if (pve)
pve              1356 arch/i386/i386/pmapae.c 		pmap_free_pv(pmap, pve);
pve              1574 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              1600 arch/i386/i386/pmapae.c 	for (pve = pvh->pvh_list ; pve != NULL ; pve = pve->pv_next) {
pve              1601 arch/i386/i386/pmapae.c 		ptes = pmap_map_ptes_pae(pve->pv_pmap);	/* locks pmap */
pve              1604 arch/i386/i386/pmapae.c 		if (pve->pv_va >= uvm.pager_sva && pve->pv_va < uvm.pager_eva)
pve              1606 arch/i386/i386/pmapae.c 		if (pve->pv_ptp && (PDE(pve->pv_pmap,
pve              1607 arch/i386/i386/pmapae.c 		    pdei(pve->pv_va)) & PG_FRAME) !=
pve              1608 arch/i386/i386/pmapae.c 		    VM_PAGE_TO_PHYS(pve->pv_ptp)) {
pve              1610 arch/i386/i386/pmapae.c 			       pg, pve->pv_va, pve->pv_ptp);
pve              1613 arch/i386/i386/pmapae.c 			       (PDE(pve->pv_pmap, pdei(pve->pv_va)) &
pve              1614 arch/i386/i386/pmapae.c 				PG_FRAME), VM_PAGE_TO_PHYS(pve->pv_ptp));
pve              1620 arch/i386/i386/pmapae.c 		opte = ptes[atop(pve->pv_va)];
pve              1621 arch/i386/i386/pmapae.c 		ptes[atop(pve->pv_va)] = 0;			/* zap! */
pve              1624 arch/i386/i386/pmapae.c 			pve->pv_pmap->pm_stats.wired_count--;
pve              1625 arch/i386/i386/pmapae.c 		pve->pv_pmap->pm_stats.resident_count--;
pve              1629 arch/i386/i386/pmapae.c 			pmap_tlb_shootdown(pve->pv_pmap, pve->pv_va, opte,
pve              1636 arch/i386/i386/pmapae.c 		if (pve->pv_ptp) {
pve              1637 arch/i386/i386/pmapae.c 			pve->pv_ptp->wire_count--;
pve              1638 arch/i386/i386/pmapae.c 			if (pve->pv_ptp->wire_count <= 1) {
pve              1644 arch/i386/i386/pmapae.c 					pmap_tlb_shootdown(pve->pv_pmap,
pve              1645 arch/i386/i386/pmapae.c 					    pve->pv_va, opte, &cpumask);
pve              1647 arch/i386/i386/pmapae.c 				opte = i386_atomic_testset_uq(&PDE(pve->pv_pmap,
pve              1648 arch/i386/i386/pmapae.c 				    pdei(pve->pv_va)), 0);
pve              1650 arch/i386/i386/pmapae.c 				    ((vaddr_t)ptes) + pve->pv_ptp->offset,
pve              1657 arch/i386/i386/pmapae.c 				pmap_tlb_shootdown(pve->pv_pmap,
pve              1658 arch/i386/i386/pmapae.c 				    ((vaddr_t)PTE_BASE) + pve->pv_ptp->offset,
pve              1661 arch/i386/i386/pmapae.c 				pve->pv_pmap->pm_stats.resident_count--;
pve              1663 arch/i386/i386/pmapae.c 				if (pve->pv_pmap->pm_ptphint == pve->pv_ptp)
pve              1664 arch/i386/i386/pmapae.c 					pve->pv_pmap->pm_ptphint =
pve              1665 arch/i386/i386/pmapae.c 					    TAILQ_FIRST(&pve->pv_pmap->pm_obj.memq);
pve              1666 arch/i386/i386/pmapae.c 				pve->pv_ptp->wire_count = 0;
pve              1668 arch/i386/i386/pmapae.c 				uvm_pagerealloc(pve->pv_ptp, NULL, 0);
pve              1669 arch/i386/i386/pmapae.c 				TAILQ_INSERT_TAIL(&empty_ptps, pve->pv_ptp,
pve              1673 arch/i386/i386/pmapae.c 		pmap_unmap_ptes_pae(pve->pv_pmap);	/* unlocks pmap */
pve              1705 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              1735 arch/i386/i386/pmapae.c 	for (pve = pvh->pvh_list; pve != NULL && (*myattrs & testbits) == 0;
pve              1736 arch/i386/i386/pmapae.c 	     pve = pve->pv_next) {
pve              1737 arch/i386/i386/pmapae.c 		ptes = pmap_map_ptes_pae(pve->pv_pmap);
pve              1738 arch/i386/i386/pmapae.c 		pte = ptes[atop(pve->pv_va)];
pve              1739 arch/i386/i386/pmapae.c 		pmap_unmap_ptes_pae(pve->pv_pmap);
pve              1766 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              1787 arch/i386/i386/pmapae.c 	for (pve = pvh->pvh_list; pve != NULL; pve = pve->pv_next) {
pve              1789 arch/i386/i386/pmapae.c 		if (!pmap_valid_entry(PDE(pve->pv_pmap, pdei(pve->pv_va))))
pve              1794 arch/i386/i386/pmapae.c 		ptes = pmap_map_ptes_pae(pve->pv_pmap);	/* locks pmap */
pve              1795 arch/i386/i386/pmapae.c 		npte = ptes[atop(pve->pv_va)];
pve              1798 arch/i386/i386/pmapae.c 		if (ptes[atop(pve->pv_va)] != npte) {
pve              1799 arch/i386/i386/pmapae.c 			opte = i386_atomic_testset_uq(&ptes[atop(pve->pv_va)],
pve              1801 arch/i386/i386/pmapae.c 			pmap_tlb_shootdown(pve->pv_pmap,
pve              1802 arch/i386/i386/pmapae.c 			    atop(pve->pv_va), opte, &cpumask);
pve              1804 arch/i386/i386/pmapae.c 		pmap_unmap_ptes_pae(pve->pv_pmap);	/* unlocks pmap */
pve              1972 arch/i386/i386/pmapae.c 	struct pv_entry *pve;
pve              2075 arch/i386/i386/pmapae.c 			pve = pmap_remove_pv(pvh, pmap, va);
pve              2079 arch/i386/i386/pmapae.c 			pve = NULL;
pve              2082 arch/i386/i386/pmapae.c 		pve = NULL;
pve              2101 arch/i386/i386/pmapae.c 		if (pve == NULL) {
pve              2102 arch/i386/i386/pmapae.c 			pve = pmap_alloc_pv(pmap, ALLOCPV_NEED);
pve              2103 arch/i386/i386/pmapae.c 			if (pve == NULL) {
pve              2112 arch/i386/i386/pmapae.c 		pmap_enter_pv(pvh, pve, pmap, va, ptp);
pve              2117 arch/i386/i386/pmapae.c 		if (pve)
pve              2118 arch/i386/i386/pmapae.c 			pmap_free_pv(pmap, pve);