Lines Matching refs:va

156 		caddr_t va = ktsb_base;  in sfmmu_remap_kernel()  local
160 ASSERT(va >= datava + MMU_PAGESIZE4M); in sfmmu_remap_kernel()
163 ASSERT(IS_P2ALIGNED(va, tsbsz)); in sfmmu_remap_kernel()
167 pfn = va_to_pfn(va); in sfmmu_remap_kernel()
181 sfmmu_tteload(kas.a_hat, &tte, va, NULL, flags); in sfmmu_remap_kernel()
183 va += MMU_PAGESIZE4M; in sfmmu_remap_kernel()
200 caddr_t va; in sfmmu_clear_user_tsbs() local
204 va = utsb_vabase; in sfmmu_clear_user_tsbs()
205 end_va = va + tsb_slab_size; in sfmmu_clear_user_tsbs()
206 while (va < end_va) { in sfmmu_clear_user_tsbs()
207 vtag_flushpage(va, (uint64_t)ksfmmup); in sfmmu_clear_user_tsbs()
208 va += MMU_PAGESIZE; in sfmmu_clear_user_tsbs()
212 va = utsb4m_vabase; in sfmmu_clear_user_tsbs()
213 end_va = va + tsb_slab_size; in sfmmu_clear_user_tsbs()
214 while (va < end_va) { in sfmmu_clear_user_tsbs()
215 vtag_flushpage(va, (uint64_t)ksfmmup); in sfmmu_clear_user_tsbs()
216 va += MMU_PAGESIZE; in sfmmu_clear_user_tsbs()
254 caddr_t va = ktsb_base; in sfmmu_set_tlb() local
260 (void) prom_dtlb_load(index, tte, va); in sfmmu_set_tlb()
261 va += MMU_PAGESIZE4M; in sfmmu_set_tlb()
288 kdi_tlb_page_lock(caddr_t va, int do_dtlb) in kdi_tlb_page_lock() argument
291 pfn_t pfn = va_to_pfn(va); in kdi_tlb_page_lock()
297 vtag_flushpage(va, (uint64_t)ksfmmup); in kdi_tlb_page_lock()
299 sfmmu_itlb_ld_kva(va, &tte); in kdi_tlb_page_lock()
301 sfmmu_dtlb_ld_kva(va, &tte); in kdi_tlb_page_lock()
306 kdi_tlb_page_unlock(caddr_t va, int do_dtlb) in kdi_tlb_page_unlock() argument
308 vtag_flushpage(va, (uint64_t)ksfmmup); in kdi_tlb_page_unlock()