Home
last modified time | relevance | path

Searched refs:pfn_to_pa (Results 1 – 23 of 23) sorted by relevance

/illumos-gate/usr/src/uts/i86pc/vm/
H A Dhat_pte.h104 (pa_to_ma(pfn_to_pa(pfn)) | mmu.ptp_bits[(l) + 1])
107 ((pfn_to_pa(pfn & ~PFN_IS_FOREIGN_MFN) | mmu.pte_bits[l]) | \
109 (pa_to_ma(pfn_to_pa(pfn)) | mmu.pte_bits[l]))
112 (pfn_to_pa(pfn) | mmu.ptp_bits[(l) + 1])
114 (pfn_to_pa(pfn) | mmu.pte_bits[l])
295 #define pfn_to_pa(pfn) (mmu_ptob((paddr_t)(pfn))) macro
H A Dhat_kdi.c121 return (pfn_to_pa(mfn) | (pa & MMU_PAGEOFFSET)); in kdi_ptom()
140 return (pfn_to_pa(pfn) | (ma & MMU_PAGEOFFSET)); in kdi_mtop()
171 *pap = pfn_to_pa(pfn) + (vaddr & MMU_PAGEOFFSET); in kdi_vtop()
180 *pap = pfn_to_pa(CPU->cpu_current_hat->hat_htable->ht_pfn); in kdi_vtop()
H A Dhtable.c913 block_zero_no_xmm(kpm_vbase + pfn_to_pa(hat->hat_user_ptable), in htable_alloc()
1562 ptep = kbm_remap_window(pfn_to_pa(pfn), 0); in htable_attach()
1573 ptep = kbm_remap_window(pfn_to_pa(pfn), 0); in htable_attach()
1940 caddr_t va = kbm_remap_window(pfn_to_pa(pfn), 1); in x86pte_mapin()
2165 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry)); in x86pte_cas()
2174 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa( in x86pte_cas()
2236 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry)); in x86pte_inval()
2390 set_pteval(pfn_to_pa(dest->ht_pfn), entry, in x86pte_copy()
2395 pfn_to_pa(dest->ht_hat->hat_user_ptable), in x86pte_copy()
H A Dkboot_mmu.c366 x86pte_t pte_val = pa_to_ma(pfn_to_pa(pfn)) | PT_WRITABLE | in kbm_remap()
H A Dvm_machdep.c1060 return (address_in_memlist(phys_install, pfn_to_pa(pf), 1)); in pf_is_memory()
1212 if (pa_to_ma(pfn_to_pa(pp->p_pagenum)) < in check_dma()
1215 if (pa_to_ma(pfn_to_pa(pp->p_pagenum)) >= in check_dma()
3224 pgaddr = pa_to_ma(pfn_to_pa(pp->p_pagenum)); in page_get_mnode_anylist()
3316 pgaddr = pa_to_ma(pfn_to_pa(pp->p_pagenum)); in page_get_mnode_anylist()
H A Dhat_i86.c2185 !IS_P2ALIGNED(pfn_to_pa(pfn), pgsize)) in hat_memload_array()
4384 p = PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry); in hat_mempte_setup()
/illumos-gate/usr/src/uts/i86xpv/os/
H A Dxen_mmu.c160 mach_addr = pa_to_ma(pfn_to_pa(va_to_pfn( in xen_relocate_start_info()
178 pa_to_ma(pfn_to_pa(va_to_pfn((caddr_t)mfn_list + off))); in xen_relocate_start_info()
228 kbm_map_ma(pfn_to_pa(xen_info->console.domU.mfn), addr, 0); in xen_relocate_start_info()
394 return (pfn_to_pa(pfn) + (ma & MMU_PAGEOFFSET)); in ma_to_pa()
H A Dmach_kdi.c181 gdtpa = pfn_to_pa(va_to_pfn(bgdt)); in boot_kdi_tmpinit()
H A Dballoon.c250 metasz = pfn_to_pa(metapgs); in balloon_init_new_pages()
354 memlist_add(pfn_to_pa(meta_start), num_pages, &mem->memlist, in balloon_init_new_pages()
H A Dxpv_panic.c166 pte = pfn_to_pa(pfn) | PT_VALID; in xpv_panic_map()
/illumos-gate/usr/src/uts/intel/io/vmm/
H A Dvmm_sol_ept.c65 const uint64_t paddr = pfn_to_pa(pfn) & EPT_PA_MASK; in ept_map_table()
72 const uint64_t paddr = pfn_to_pa(pfn) & EPT_PA_MASK; in ept_map_page()
H A Dvmm_sol_rvi.c81 const uint64_t paddr = pfn_to_pa(pfn); in rvi_map_table()
91 const uint64_t paddr = pfn_to_pa(pfn); in rvi_map_page()
/illumos-gate/usr/src/uts/i86pc/io/gfx_private/
H A Dgfxp_vm.c162 *pa = pa_to_ma(pfn_to_pa(hat_getpfnum(as->a_hat, addr))); in gfxp_va2pa()
164 *pa = pfn_to_pa(hat_getpfnum(as->a_hat, addr)); in gfxp_va2pa()
253 return (pfn_to_pa(xen_assign_pfn(btop(paddr)))); in gfxp_convert_addr()
/illumos-gate/usr/src/uts/i86pc/io/
H A Drootnex.c1152 pbase = pfn_to_pa(xen_assign_pfn(mmu_btop(rbase))); in rootnex_map_regspec()
2740 paddr = pfn_to_pa(pp->p_pagenum) + offset; in rootnex_need_bounce_seg()
2750 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_need_bounce_seg()
2786 paddr = pfn_to_pa(pp->p_pagenum); in rootnex_need_bounce_seg()
2791 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_need_bounce_seg()
2882 paddr = pfn_to_pa(pp->p_pagenum) + offset; in rootnex_get_sgl()
2903 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_get_sgl()
2988 paddr = pfn_to_pa(pp->p_pagenum); in rootnex_get_sgl()
2993 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_get_sgl()
3779 paddr = pfn_to_pa(hat_getpfnum(kas.a_hat, in rootnex_setup_cookie()
[all …]
H A Dimmu_qinv.c456 qinv->qinv_table.qinv_mem_paddr = pfn_to_pa( in qinv_setup()
481 qinv->qinv_sync.qinv_mem_paddr = pfn_to_pa( in qinv_setup()
H A Dimmu_dvma.c469 pgtable->hwpg_paddr = pfn_to_pa(hat_getpfnum(kas.a_hat, vaddr)); in pgtable_ctor()
2599 paddr = pfn_to_pa(page->p_pagenum) + offset; in immu_map_dvmaseg()
2609 paddr = pfn_to_pa(pparray[pcnt]->p_pagenum) + offset; in immu_map_dvmaseg()
2613 paddr = pfn_to_pa(hat_getpfnum(vas->a_hat, in immu_map_dvmaseg()
2662 paddr = pfn_to_pa(page->p_pagenum); in immu_map_dvmaseg()
2666 paddr = pfn_to_pa(pparray[pcnt]->p_pagenum); in immu_map_dvmaseg()
2670 paddr = pfn_to_pa(hat_getpfnum(vas->a_hat, vaddr)); in immu_map_dvmaseg()
H A Dimmu_intrmap.c375 intrmap->intrmap_paddr = pfn_to_pa( in init_unit()
H A Dimmu.c1344 paddr = pfn_to_pa(hat_getpfnum(kas.a_hat, vaddr)); in immu_init_inv_wait()
/illumos-gate/usr/src/uts/i86pc/os/
H A Dpci_cfgacc_x86.c64 phys_addr = pfn_to_pa(xen_assign_pfn(mmu_btop(phys_addr))) | in pci_cfgacc_map()
H A Dmp_implfuncs.c224 base = pfn_to_pa(xen_assign_pfn(mmu_btop(addr))) | in psm_map_phys_new()
H A Dfakebop.c213 if (physmem != 0 && high_phys > pfn_to_pa(physmem)) in do_bop_phys_alloc()
214 high_phys = pfn_to_pa(physmem); in do_bop_phys_alloc()
2273 pa = pfn_to_pa(xen_assign_pfn(mmu_btop(pa))) | (pa & MMU_PAGEOFFSET); in vmap_phys()
H A Dstartup.c863 pfn_addr = pfn_to_pa(pfn); in avail_filter()
888 pfn_addr = pfn_to_pa(pfn); in avail_filter()
/illumos-gate/usr/src/uts/intel/os/
H A Ddesctbls.c601 gdtpa = pfn_to_pa(va_to_pfn(gdt0)); in init_gdt()
960 CPU->cpu_m.mcpu_gdtpa = pfn_to_pa(va_to_pfn(gdt)); in init_desctbls()