hv: remove pgentry_present field in struct pgtable

Page table entry present check is page table type
  specific and static, e.g. just need to check bit0
  of page entry for entries of MMU page table and
  bit2~bit0 for EPT page table case. hence no need to
  check it by callback function every time.

  This patch remove 'pgentry_present' callback field and
  add a new bitmask field for this page entry present check.
  It can get better performance especially when this
  check is executed frequently.

Tracked-On: #7327
Signed-off-by: Yonghua Huang <yonghua.huang@intel.com>
Reviewed-by: Eddie Dong <eddie.dong@intel.com>
Reviewed-by: Fei Li <fei1.li@intel.com>
Reviewed-by: Yu Wang <yu1.wang@intel.com>
This commit is contained in:
Yonghua Huang 2022-04-18 15:41:53 +03:00 committed by acrnsi-robot
parent deb35a0de9
commit 80292a482d
4 changed files with 25 additions and 30 deletions

View File

@ -146,11 +146,6 @@ static inline bool use_large_page(enum _page_table_level level, uint64_t prot)
return ret;
}
static inline uint64_t ept_pgentry_present(uint64_t pte)
{
return pte & EPT_RWX;
}
static inline void ept_clflush_pagewalk(const void* etry)
{
iommu_flush_cache(etry, sizeof(uint64_t));
@ -188,7 +183,7 @@ void init_ept_pgtable(struct pgtable *table, uint16_t vm_id)
table->pool = &ept_page_pool[vm_id];
table->default_access_right = EPT_RWX;
table->pgentry_present = ept_pgentry_present;
table->pgentry_present_mask = EPT_RWX;
table->clflush_pagewalk = ept_clflush_pagewalk;
table->large_page_support = ept_large_page_support;
@ -439,12 +434,12 @@ void walk_ept_table(struct acrn_vm *vm, pge_handler cb)
for (i = 0UL; i < PTRS_PER_PML4E; i++) {
pml4e = pml4e_offset((uint64_t *)get_eptp(vm), i << PML4E_SHIFT);
if (table->pgentry_present(*pml4e) == 0UL) {
if (!pgentry_present(table, (*pml4e))) {
continue;
}
for (j = 0UL; j < PTRS_PER_PDPTE; j++) {
pdpte = pdpte_offset(pml4e, j << PDPTE_SHIFT);
if (table->pgentry_present(*pdpte) == 0UL) {
if (!pgentry_present(table, (*pdpte))) {
continue;
}
if (pdpte_large(*pdpte) != 0UL) {
@ -453,7 +448,7 @@ void walk_ept_table(struct acrn_vm *vm, pge_handler cb)
}
for (k = 0UL; k < PTRS_PER_PDE; k++) {
pde = pde_offset(pdpte, k << PDE_SHIFT);
if (table->pgentry_present(*pde) == 0UL) {
if (!pgentry_present(table, (*pde))) {
continue;
}
if (pde_large(*pde) != 0UL) {
@ -462,7 +457,7 @@ void walk_ept_table(struct acrn_vm *vm, pge_handler cb)
}
for (m = 0UL; m < PTRS_PER_PTE; m++) {
pte = pte_offset(pde, m << PTE_SHIFT);
if (table->pgentry_present(*pte) != 0UL) {
if (pgentry_present(table, (*pte))) {
cb(pte, PTE_SIZE);
}
}

View File

@ -80,19 +80,15 @@ static inline void ppt_clflush_pagewalk(const void* entry __attribute__((unused)
{
}
static inline uint64_t ppt_pgentry_present(uint64_t pte)
{
return pte & PAGE_PRESENT;
}
static inline void ppt_nop_tweak_exe_right(uint64_t *entry __attribute__((unused))) {}
static inline void ppt_nop_recover_exe_right(uint64_t *entry __attribute__((unused))) {}
static const struct pgtable ppt_pgtable = {
.default_access_right = (PAGE_PRESENT | PAGE_RW | PAGE_USER),
.pgentry_present_mask = PAGE_PRESENT,
.pool = &ppt_page_pool,
.large_page_support = ppt_large_page_support,
.pgentry_present = ppt_pgentry_present,
.clflush_pagewalk = ppt_clflush_pagewalk,
.tweak_exe_right = ppt_nop_tweak_exe_right,
.recover_exe_right = ppt_nop_recover_exe_right,

View File

@ -47,7 +47,7 @@ static void try_to_free_pgtable_page(const struct pgtable *table,
for (index = 0UL; index < PTRS_PER_PTE; index++) {
uint64_t *pte = pt_page + index;
if ((table->pgentry_present(*pte) != 0UL)) {
if (pgentry_present(table, (*pte))) {
break;
}
}
@ -142,7 +142,7 @@ static void modify_or_del_pte(uint64_t *pde, uint64_t vaddr_start, uint64_t vadd
for (; index < PTRS_PER_PTE; index++) {
uint64_t *pte = pt_page + index;
if ((table->pgentry_present(*pte) == 0UL)) {
if (!pgentry_present(table, (*pte))) {
/*suppress warning message for low memory (< 1MBytes),as service VM
* will update MTTR attributes for this region by default whether it
* is present or not.
@ -182,7 +182,7 @@ static void modify_or_del_pde(uint64_t *pdpte, uint64_t vaddr_start, uint64_t va
uint64_t *pde = pd_page + index;
uint64_t vaddr_next = (vaddr & PDE_MASK) + PDE_SIZE;
if (table->pgentry_present(*pde) == 0UL) {
if (!pgentry_present(table, (*pde))) {
if (type == MR_MODIFY) {
pr_warn("%s, addr: 0x%lx pde is not present.\n", __func__, vaddr);
}
@ -229,7 +229,7 @@ static void modify_or_del_pdpte(const uint64_t *pml4e, uint64_t vaddr_start, uin
uint64_t *pdpte = pdpt_page + index;
uint64_t vaddr_next = (vaddr & PDPTE_MASK) + PDPTE_SIZE;
if (table->pgentry_present(*pdpte) == 0UL) {
if (!pgentry_present(table, (*pdpte))) {
if (type == MR_MODIFY) {
pr_warn("%s, vaddr: 0x%lx pdpte is not present.\n", __func__, vaddr);
}
@ -283,7 +283,7 @@ void pgtable_modify_or_del_map(uint64_t *pml4_page, uint64_t vaddr_base, uint64_
while (vaddr < vaddr_end) {
vaddr_next = (vaddr & PML4E_MASK) + PML4E_SIZE;
pml4e = pml4e_offset(pml4_page, vaddr);
if ((table->pgentry_present(*pml4e) == 0UL) && (type == MR_MODIFY)) {
if ((!pgentry_present(table, (*pml4e))) && (type == MR_MODIFY)) {
ASSERT(false, "invalid op, pml4e not present");
} else {
modify_or_del_pdpte(pml4e, vaddr, vaddr_end, prot_set, prot_clr, table, type);
@ -309,7 +309,7 @@ static void add_pte(const uint64_t *pde, uint64_t paddr_start, uint64_t vaddr_st
for (; index < PTRS_PER_PTE; index++) {
uint64_t *pte = pt_page + index;
if (table->pgentry_present(*pte) != 0UL) {
if (pgentry_present(table, (*pte))) {
pr_fatal("%s, pte 0x%lx is already present!\n", __func__, vaddr);
} else {
set_pgentry(pte, paddr | prot, table);
@ -345,7 +345,7 @@ static void add_pde(const uint64_t *pdpte, uint64_t paddr_start, uint64_t vaddr_
if (pde_large(*pde) != 0UL) {
pr_fatal("%s, pde 0x%lx is already present!\n", __func__, vaddr);
} else {
if (table->pgentry_present(*pde) == 0UL) {
if (!pgentry_present(table, (*pde))) {
if (table->large_page_support(IA32E_PD, prot) &&
mem_aligned_check(paddr, PDE_SIZE) &&
mem_aligned_check(vaddr, PDE_SIZE) &&
@ -394,7 +394,7 @@ static void add_pdpte(const uint64_t *pml4e, uint64_t paddr_start, uint64_t vadd
if (pdpte_large(*pdpte) != 0UL) {
pr_fatal("%s, pdpte 0x%lx is already present!\n", __func__, vaddr);
} else {
if (table->pgentry_present(*pdpte) == 0UL) {
if (!pgentry_present(table, (*pdpte))) {
if (table->large_page_support(IA32E_PDPT, prot) &&
mem_aligned_check(paddr, PDPTE_SIZE) &&
mem_aligned_check(vaddr, PDPTE_SIZE) &&
@ -444,7 +444,7 @@ void pgtable_add_map(uint64_t *pml4_page, uint64_t paddr_base, uint64_t vaddr_ba
while (vaddr < vaddr_end) {
vaddr_next = (vaddr & PML4E_MASK) + PML4E_SIZE;
pml4e = pml4e_offset(pml4_page, vaddr);
if (table->pgentry_present(*pml4e) == 0UL) {
if (!pgentry_present(table, (*pml4e))) {
void *pdpt_page = alloc_page(table->pool);
construct_pgentry(pml4e, pdpt_page, table->default_access_right, table);
}
@ -507,7 +507,6 @@ void *pgtable_create_trusty_root(const struct pgtable *table,
return pml4_base;
}
/**
* @pre (pml4_page != NULL) && (pg_size != NULL)
*/
@ -518,25 +517,25 @@ const uint64_t *pgtable_lookup_entry(uint64_t *pml4_page, uint64_t addr, uint64_
uint64_t *pml4e, *pdpte, *pde, *pte;
pml4e = pml4e_offset(pml4_page, addr);
present = (table->pgentry_present(*pml4e) != 0UL);
present = pgentry_present(table, (*pml4e));
if (present) {
pdpte = pdpte_offset(pml4e, addr);
present = (table->pgentry_present(*pdpte) != 0UL);
present = pgentry_present(table, (*pdpte));
if (present) {
if (pdpte_large(*pdpte) != 0UL) {
*pg_size = PDPTE_SIZE;
pret = pdpte;
} else {
pde = pde_offset(pdpte, addr);
present = (table->pgentry_present(*pde) != 0UL);
present = pgentry_present(table, (*pde));
if (present) {
if (pde_large(*pde) != 0UL) {
*pg_size = PDE_SIZE;
pret = pde;
} else {
pte = pte_offset(pde, addr);
present = (table->pgentry_present(*pte) != 0UL);
present = pgentry_present(table, (*pte));
if (present) {
*pg_size = PTE_SIZE;
pret = pte;

View File

@ -170,14 +170,19 @@ enum _page_table_level {
struct pgtable {
uint64_t default_access_right;
uint64_t pgentry_present_mask;
struct page_pool *pool;
bool (*large_page_support)(enum _page_table_level level, uint64_t prot);
uint64_t (*pgentry_present)(uint64_t pte);
void (*clflush_pagewalk)(const void *p);
void (*tweak_exe_right)(uint64_t *entry);
void (*recover_exe_right)(uint64_t *entry);
};
static inline bool pgentry_present(const struct pgtable *table, uint64_t pte)
{
return ((table->pgentry_present_mask & (pte)) != 0UL);
}
/**
* @brief Address space translation
*