Home
last modified time | relevance | path

Searched refs:gpa (Results 1 – 25 of 42) sorted by relevance

12

/illumos-gate/usr/src/test/bhyve-tests/tests/vmm/
H A Dnpt_ops.c296 #define BMAP_IDX(gpa) ((gpa) / (PAGESZ * 8)) argument
297 #define BMAP_BIT(gpa) (((gpa) / PAGESZ) % 8) argument
309 for (uintptr_t gpa = 0; gpa < (TEST_MEM_SZ / 2); gpa += PAGESZ) { in test_op_reset_dirty() local
310 datap[gpa] = 0xff; in test_op_reset_dirty()
315 for (uintptr_t gpa = 0; gpa < TEST_MEM_SZ; gpa += (2 * PAGESZ)) { in test_op_reset_dirty() local
316 bits[BMAP_IDX(gpa)] |= (1 << BMAP_BIT(gpa)); in test_op_reset_dirty()
329 for (uintptr_t gpa = 0; gpa < TEST_MEM_SZ; gpa += PAGESZ) { in test_op_reset_dirty() local
330 const bool is_even_page = (BMAP_BIT(gpa) % 2) == 0; in test_op_reset_dirty()
332 (bits[BMAP_IDX(gpa)] & (1 << BMAP_BIT(gpa))) != 0; in test_op_reset_dirty()
337 "missing dirty bit set at gpa %08lx", gpa); in test_op_reset_dirty()
[all …]
H A Dmem_partial.c100 for (uintptr_t gpa = 0; gpa < TOTAL_SZ; gpa++) { in main() local
101 uint8_t *ptr = guest_mem + gpa; in main()
113 for (uintptr_t gpa = UPPER_OFF; gpa < UPPER_OFF + UPPER_SZ; gpa++) { in main() local
114 uint8_t *ptr = guest_mem + gpa; in main()
117 gpa, *ptr); in main()
/illumos-gate/usr/src/uts/intel/io/vmm/
H A Dvmm_gpt.c521 gpa += incr; in vmm_gpt_populate_region_lvl()
527 for (; gpa < end; gpa += incr, prev = node) { in vmm_gpt_populate_region_lvl()
530 ASSERT3U(node->vgn_gpa, ==, gpa); in vmm_gpt_populate_region_lvl()
587 uint64_t gpa = addr; in vmm_gpt_populate_region() local
589 while (gpa < end) { in vmm_gpt_populate_region()
604 gpa = next->vgn_gpa; in vmm_gpt_populate_region()
684 uint64_t gpa = addr; in vmm_gpt_vacate_region() local
695 gpa = node->vgn_gpa; in vmm_gpt_vacate_region()
707 gpa = addr; in vmm_gpt_vacate_region()
717 gpa = next->vgn_gpa; in vmm_gpt_vacate_region()
[all …]
H A Dvmm_instruction_emul.c86 uint64_t gpa; member
2239 ASSERT(vie->mmio_req_read.gpa == gpa); in vie_mmio_read()
2256 vie->mmio_req_read.gpa = gpa; in vie_mmio_read()
2263 vie->mmio_req_read.gpa = gpa; in vie_mmio_read()
2284 ASSERT(vie->mmio_req_write.gpa == gpa); in vie_mmio_write()
2297 vie->mmio_req_write.gpa = gpa; in vie_mmio_write()
2303 vie->mmio_req_write.gpa = gpa; in vie_mmio_write()
2321 uint64_t gpa; in vie_emulate_mmio() local
2674 vme->u.mmio.gpa = vie->mmio_req_read.gpa; in vie_exitinfo()
2679 vme->u.mmio.gpa = vie->mmio_req_write.gpa; in vie_exitinfo()
[all …]
H A Dvmm_vm.c363 int err = vmspace_ensure_mapped(vms, gpa, in vmspace_bits_operate()
408 vmspace_clients_invalidate(vms, gpa, len); in vmspace_bits_operate()
554 error = vmspace_map(vmspace, obj, 0, gpa, len, in vmm_mmio_alloc()
871 vmsm = vm_mapping_find(vms, gpa, PAGESIZE); in vmspace_ensure_mapped()
906 ASSERT0(gpa & PAGEOFFSET); in vmspace_lookup_map()
909 vmm_gpt_walk(gpt, gpa, entries, MAX_GPT_LEVEL); in vmspace_lookup_map()
964 for (uintptr_t gpa = addr & PAGEMASK; gpa < end; gpa += PAGESIZE) { in vmspace_populate() local
1284 ASSERT0(gpa & PAGEOFFSET); in vmc_hold_ext()
1304 vmp->vmp_gpa = gpa; in vmc_hold_ext()
1639 const uintptr_t gpa = (uintptr_t)off; in vm_segmap_space() local
[all …]
H A Dvmm.c179 vm_paddr_t gpa; member
842 if (mm->len != 0 && gpa >= mm->gpa && gpa < mm->gpa + mm->len) in vm_mem_allocated()
971 map->gpa = gpa; in vm_mmap_memseg()
988 if (m->gpa == gpa && m->len == len && in vm_munmap_memseg()
1008 if (mm->len == 0 || mm->gpa < *gpa) in vm_mmap_getnext()
1010 if (mmnext == NULL || mm->gpa < mmnext->gpa) in vm_mmap_getnext()
1015 *gpa = mmnext->gpa; in vm_mmap_getnext()
1106 gpa = mm->gpa; in vm_iommu_modify()
1107 while (gpa < mm->gpa + mm->len) { in vm_iommu_modify()
3743 uint64_t gpa; in vm_copy_setup() local
[all …]
/illumos-gate/usr/src/cmd/bhyve/
H A Dbootrom.c76 uint64_t gpa; member
91 offset = addr - var.gpa; in bootrom_var_mem_handler()
136 vm_paddr_t gpa; in bootrom_alloc() local
162 if (gpa < gpa_allocbot) { in bootrom_alloc()
167 gpa = gpa_allocbot; in bootrom_alloc()
174 segoff = gpa - gpa_base; in bootrom_alloc()
182 gpa_alloctop = gpa - 1; in bootrom_alloc()
184 gpa_allocbot = gpa + len; in bootrom_alloc()
188 *gpa_out = gpa; in bootrom_alloc()
299 gpa_alloctop = var.gpa - 1; in bootrom_loadrom()
[all …]
H A Dgdb.c102 uint64_t gpa; member
897 if (bp->gpa == gpa) in find_breakpoint()
908 uint64_t gpa; in gdb_cpu_breakpoint() local
1008 uint64_t gpa, gva, val; in gdb_read_mem() local
1067 gpa++; in gdb_read_mem()
1092 gpa += bytes; in gdb_read_mem()
1120 uint64_t gpa, gva, val; in gdb_write_mem() local
1183 gpa++; in gdb_write_mem()
1208 gpa += bytes; in gdb_write_mem()
1269 uint64_t gpa; in update_sw_breakpoint() local
[all …]
H A Dmem.c154 typedef int (mem_cb_t)(struct vcpu *vcpu, uint64_t gpa, struct mem_range *mr,
158 mem_read(struct vcpu *vcpu, uint64_t gpa, uint64_t *rval, int size, void *arg) in mem_read() argument
163 error = (*mr->handler)(vcpu, MEM_F_READ, gpa, size, rval, mr->arg1, in mem_read()
169 mem_write(struct vcpu *vcpu, uint64_t gpa, uint64_t wval, int size, void *arg) in mem_write() argument
174 error = (*mr->handler)(vcpu, MEM_F_WRITE, gpa, size, &wval, mr->arg1, in mem_write()
258 return (access_memory(vcpu, mmio->gpa, emulate_mem_cb, mmio)); in emulate_mem()
279 read_mem(struct vcpu *vcpu, uint64_t gpa, uint64_t *rval, int size) in read_mem() argument
286 return (access_memory(vcpu, gpa, rw_mem_cb, &rma)); in read_mem()
290 write_mem(struct vcpu *vcpu, uint64_t gpa, uint64_t wval, int size) in write_mem() argument
297 return (access_memory(vcpu, gpa, rw_mem_cb, &rma)); in write_mem()
H A Dbasl.c203 uint64_t gpa; in basl_finish_patch_checksums() local
230 gpa = BHYVE_ACPI_BASE + table->off + checksum->start; in basl_finish_patch_checksums()
231 if ((gpa < BHYVE_ACPI_BASE) || in basl_finish_patch_checksums()
232 (gpa < BHYVE_ACPI_BASE + table->off)) { in basl_finish_patch_checksums()
238 gva = vm_map_gpa(table->ctx, gpa, len); in basl_finish_patch_checksums()
241 __func__, gpa, gpa + len); in basl_finish_patch_checksums()
295 uint64_t gpa, val; local
322 gpa = BHYVE_ACPI_BASE + table->off;
323 if (gpa < BHYVE_ACPI_BASE) {
329 gva = vm_map_gpa(table->ctx, gpa, table->len);
[all …]
H A Dmem.h57 int read_mem(struct vcpu *vpu, uint64_t gpa, uint64_t *rval, int size);
61 int write_mem(struct vcpu *vcpu, uint64_t gpa, uint64_t wval, int size);
/illumos-gate/usr/src/lib/libvmmapi/common/
H A Dvmmapi.c341 memmap.gpa = gpa; in vm_mmap_memseg()
389 munmap.gpa = gpa; in vm_munmap_memseg()
404 memmap.gpa = *gpa; in vm_mmap_getnext()
407 *gpa = memmap.gpa; in vm_mmap_getnext()
1096 .gpa = gpa, in vm_readwrite_kernemu_device()
1267 pptmmio.gpa = gpa; in vm_map_pptdev_mmio()
1284 pptmmio.gpa = gpa; in vm_unmap_pptdev_mmio()
1365 pptmmio.gpa = gpa; in vm_map_pptdev_mmio()
1738 gpapte.gpa = gpa; in vm_get_gpa_pmap()
1780 *gpa = gg.gpa; in vm_gla2gpa()
[all …]
H A Dvmmapi.h115 int vm_mmap_getnext(struct vmctx *ctx, vm_paddr_t *gpa, int *segid,
142 int vm_mmap_memseg(struct vmctx *ctx, vm_paddr_t gpa, int segid,
145 int vm_munmap_memseg(struct vmctx *ctx, vm_paddr_t gpa, size_t len);
171 int vm_get_gpa_pmap(struct vmctx *, uint64_t gpa, uint64_t *pte, int *num);
173 uint64_t gla, int prot, uint64_t *gpa, int *fault);
176 uint64_t *gpa, int *fault);
239 vm_paddr_t gpa, bool write, int size, uint64_t *value);
256 vm_paddr_t gpa, size_t len, vm_paddr_t hpa);
258 vm_paddr_t gpa, size_t len);
268 int vm_map_pptdev_mmio(struct vmctx *ctx, int pptfd, vm_paddr_t gpa,
[all …]
/illumos-gate/usr/src/uts/intel/io/vmm/intel/
H A Dvtd.c681 KASSERT(gpa + len > gpa, ("%s: invalid gpa range %lx/%lx", __func__, in vtd_update_mapping()
682 gpa, len)); in vtd_update_mapping()
684 "domain maxaddr %lx", __func__, gpa, len, dom->maxaddr)); in vtd_update_mapping()
686 if (gpa & PAGE_MASK) in vtd_update_mapping()
687 panic("vtd_create_mapping: unaligned gpa 0x%0lx", gpa); in vtd_update_mapping()
707 (gpa & (spsize - 1)) == 0 && in vtd_update_mapping()
719 ptpindex = (gpa >> ptpshift) & 0x1FF; in vtd_update_mapping()
740 if ((gpa & ((1UL << ptpshift) - 1)) != 0) in vtd_update_mapping()
741 panic("gpa 0x%lx and ptpshift %d mismatch", gpa, ptpshift); in vtd_update_mapping()
766 vtd_remove_mapping(void *arg, vm_paddr_t gpa, uint64_t len) in vtd_remove_mapping() argument
[all …]
/illumos-gate/usr/src/uts/intel/io/vmm/io/
H A Diommu.h50 typedef uint64_t (*iommu_create_mapping_t)(void *domain, vm_paddr_t gpa,
52 typedef uint64_t (*iommu_remove_mapping_t)(void *domain, vm_paddr_t gpa,
78 void iommu_create_mapping(void *domain, vm_paddr_t gpa, vm_paddr_t hpa,
80 void iommu_remove_mapping(void *domain, vm_paddr_t gpa, size_t len);
H A Diommu.c227 iommu_create_mapping(void *domain, vm_paddr_t gpa, vm_paddr_t hpa, size_t len) in iommu_create_mapping() argument
236 mapped = ops->create_mapping(domain, gpa, hpa, remaining); in iommu_create_mapping()
237 gpa += mapped; in iommu_create_mapping()
244 iommu_remove_mapping(void *domain, vm_paddr_t gpa, size_t len) in iommu_remove_mapping() argument
253 unmapped = ops->remove_mapping(domain, gpa, remaining); in iommu_remove_mapping()
254 gpa += unmapped; in iommu_remove_mapping()
H A Dppt.h33 int ppt_map_mmio(struct vm *vm, int pptfd, vm_paddr_t gpa, size_t len,
35 int ppt_unmap_mmio(struct vm *vm, int pptfd, vm_paddr_t gpa, size_t len);
42 boolean_t ppt_is_mmio(struct vm *vm, vm_paddr_t gpa);
H A Dppt.c86 vm_paddr_t gpa; member
880 (void) vm_unmap_mmio(vm, seg->gpa, seg->len); in ppt_unmap_all_mmio()
975 ppt_is_mmio(struct vm *vm, vm_paddr_t gpa) in ppt_is_mmio() argument
992 if (gpa >= seg->gpa && gpa < seg->gpa + seg->len) { in ppt_is_mmio()
1133 ppt_map_mmio(struct vm *vm, int pptfd, vm_paddr_t gpa, size_t len, in ppt_map_mmio() argument
1140 (hpa & PAGEOFFSET) != 0 || gpa + len < gpa || hpa + len < hpa) { in ppt_map_mmio()
1164 err = vm_map_mmio(vm, gpa, len, hpa); in ppt_map_mmio()
1166 seg->gpa = gpa; in ppt_map_mmio()
1197 if (seg->gpa == gpa && seg->len == len) { in ppt_unmap_mmio()
1198 err = vm_unmap_mmio(vm, seg->gpa, seg->len); in ppt_unmap_mmio()
[all …]
H A Dvhpet.h43 int vhpet_mmio_write(struct vm *vm, int vcpuid, uint64_t gpa, uint64_t val,
45 int vhpet_mmio_read(struct vm *vm, int vcpuid, uint64_t gpa, uint64_t *val,
H A Dvioapic.h55 int vioapic_mmio_write(struct vm *vm, int vcpuid, uint64_t gpa, uint64_t wval,
57 int vioapic_mmio_read(struct vm *vm, int vcpuid, uint64_t gpa, uint64_t *rval,
/illumos-gate/usr/src/test/bhyve-tests/tests/inst_emul/
H A Dimul.c45 if (vexit->u.mmio.gpa < MMIO_TEST_BASE || in handle_test_mmio()
46 vexit->u.mmio.gpa >= MMIO_TEST_END) { in handle_test_mmio()
57 const uint16_t addr = vexit->u.mmio.gpa; in handle_test_mmio()
/illumos-gate/usr/src/uts/intel/sys/
H A Dvmm_dev.h63 vm_paddr_t gpa; member
74 vm_paddr_t gpa; member
149 vm_paddr_t gpa; member
202 uint64_t gpa; /* in */ member
235 uint64_t gpa; member
285 uint64_t gpa; member
/illumos-gate/usr/src/test/bhyve-tests/tests/common/
H A Din_guest.c92 uint64_t gpa, pte_loc; in populate_identity_table() local
95 for (gpa = 0, pte_loc = MEM_LOC_PAGE_TABLE_2M; in populate_identity_table()
96 gpa < 0x100000000; in populate_identity_table()
100 for (uint_t i = 0; i < 512; i++, ptep++, gpa += 0x200000) { in populate_identity_table()
101 *ptep = gpa | PT_VALID | PT_WRITABLE | PT_PAGESIZE; in populate_identity_table()
103 if (gpa >= 0xc0000000) { in populate_identity_table()
108 assert(gpa == 0x100000000 && pte_loc == MEM_LOC_PAGE_TABLE_1G); in populate_identity_table()
227 vexit->u.mmio.gpa, in test_fail_vmexit()
650 if (vexit->u.mmio.gpa != addr || in vexit_match_mmio()
/illumos-gate/usr/src/uts/intel/io/vmm/sys/
H A Dvmm_instruction_emul.h55 const struct vm_guest_paging *paging, uint64_t gpa);
95 uint64_t gla, int prot, uint64_t *gpa, int *is_fault);
102 struct vm_guest_paging *paging, uint64_t gla, int prot, uint64_t *gpa,
H A Dvmm_kernel.h162 int vm_mmap_memseg(struct vm *vm, vm_paddr_t gpa, int segid, vm_ooffset_t off,
164 int vm_munmap_memseg(struct vm *vm, vm_paddr_t gpa, size_t len);
167 int vm_map_mmio(struct vm *vm, vm_paddr_t gpa, size_t len, vm_paddr_t hpa);
168 int vm_unmap_mmio(struct vm *vm, vm_paddr_t gpa, size_t len);
177 int vm_mmap_getnext(struct vm *vm, vm_paddr_t *gpa, int *segid,
182 bool vm_mem_allocated(struct vm *vm, int vcpuid, vm_paddr_t gpa);
224 int vm_service_mmio_read(struct vm *vm, int cpuid, uint64_t gpa, uint64_t *rval,
226 int vm_service_mmio_write(struct vm *vm, int cpuid, uint64_t gpa, uint64_t wval,
346 uint64_t gpa; member

12