Lines Matching defs:context

3452 				   struct kvm_mmu *context)
3454 context->page_fault = nonpaging_page_fault;
3455 context->gva_to_gpa = nonpaging_gva_to_gpa;
3456 context->sync_page = nonpaging_sync_page;
3457 context->invlpg = nonpaging_invlpg;
3458 context->update_pte = nonpaging_update_pte;
3459 context->root_level = 0;
3460 context->shadow_root_level = PT32E_ROOT_LEVEL;
3461 context->root_hpa = INVALID_PAGE;
3462 context->direct_map = true;
3463 context->nx = false;
3522 struct kvm_mmu *context)
3529 context->bad_mt_xwr = 0;
3531 if (!context->nx)
3543 switch (context->root_level) {
3546 context->rsvd_bits_mask[0][1] = 0;
3547 context->rsvd_bits_mask[0][0] = 0;
3548 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0];
3551 context->rsvd_bits_mask[1][1] = 0;
3557 context->rsvd_bits_mask[1][1] = rsvd_bits(17, 21);
3560 context->rsvd_bits_mask[1][1] = rsvd_bits(13, 21);
3563 context->rsvd_bits_mask[0][2] =
3566 context->rsvd_bits_mask[0][1] = exb_bit_rsvd |
3568 context->rsvd_bits_mask[0][0] = exb_bit_rsvd |
3570 context->rsvd_bits_mask[1][1] = exb_bit_rsvd |
3573 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0];
3576 context->rsvd_bits_mask[0][3] = exb_bit_rsvd |
3578 context->rsvd_bits_mask[0][2] = exb_bit_rsvd |
3580 context->rsvd_bits_mask[0][1] = exb_bit_rsvd |
3582 context->rsvd_bits_mask[0][0] = exb_bit_rsvd |
3584 context->rsvd_bits_mask[1][3] = context->rsvd_bits_mask[0][3];
3585 context->rsvd_bits_mask[1][2] = exb_bit_rsvd |
3588 context->rsvd_bits_mask[1][1] = exb_bit_rsvd |
3591 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0];
3597 struct kvm_mmu *context, bool execonly)
3602 context->rsvd_bits_mask[0][3] =
3604 context->rsvd_bits_mask[0][2] =
3606 context->rsvd_bits_mask[0][1] =
3608 context->rsvd_bits_mask[0][0] = rsvd_bits(maxphyaddr, 51);
3611 context->rsvd_bits_mask[1][3] = context->rsvd_bits_mask[0][3];
3612 context->rsvd_bits_mask[1][2] =
3614 context->rsvd_bits_mask[1][1] =
3616 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0];
3624 context->bad_mt_xwr |= (1ull << pte);
3712 struct kvm_mmu *context,
3715 context->nx = is_nx(vcpu);
3716 context->root_level = level;
3718 reset_rsvds_bits_mask(vcpu, context);
3719 update_permission_bitmask(vcpu, context, false);
3720 update_last_pte_bitmap(vcpu, context);
3723 context->page_fault = paging64_page_fault;
3724 context->gva_to_gpa = paging64_gva_to_gpa;
3725 context->sync_page = paging64_sync_page;
3726 context->invlpg = paging64_invlpg;
3727 context->update_pte = paging64_update_pte;
3728 context->shadow_root_level = level;
3729 context->root_hpa = INVALID_PAGE;
3730 context->direct_map = false;
3734 struct kvm_mmu *context)
3736 paging64_init_context_common(vcpu, context, PT64_ROOT_LEVEL);
3740 struct kvm_mmu *context)
3742 context->nx = false;
3743 context->root_level = PT32_ROOT_LEVEL;
3745 reset_rsvds_bits_mask(vcpu, context);
3746 update_permission_bitmask(vcpu, context, false);
3747 update_last_pte_bitmap(vcpu, context);
3749 context->page_fault = paging32_page_fault;
3750 context->gva_to_gpa = paging32_gva_to_gpa;
3751 context->sync_page = paging32_sync_page;
3752 context->invlpg = paging32_invlpg;
3753 context->update_pte = paging32_update_pte;
3754 context->shadow_root_level = PT32E_ROOT_LEVEL;
3755 context->root_hpa = INVALID_PAGE;
3756 context->direct_map = false;
3760 struct kvm_mmu *context)
3762 paging64_init_context_common(vcpu, context, PT32E_ROOT_LEVEL);
3767 struct kvm_mmu *context = vcpu->arch.walk_mmu;
3769 context->base_role.word = 0;
3770 context->page_fault = tdp_page_fault;
3771 context->sync_page = nonpaging_sync_page;
3772 context->invlpg = nonpaging_invlpg;
3773 context->update_pte = nonpaging_update_pte;
3774 context->shadow_root_level = kvm_x86_ops->get_tdp_level();
3775 context->root_hpa = INVALID_PAGE;
3776 context->direct_map = true;
3777 context->set_cr3 = kvm_x86_ops->set_tdp_cr3;
3778 context->get_cr3 = get_cr3;
3779 context->get_pdptr = kvm_pdptr_read;
3780 context->inject_page_fault = kvm_inject_page_fault;
3783 context->nx = false;
3784 context->gva_to_gpa = nonpaging_gva_to_gpa;
3785 context->root_level = 0;
3787 context->nx = is_nx(vcpu);
3788 context->root_level = PT64_ROOT_LEVEL;
3789 reset_rsvds_bits_mask(vcpu, context);
3790 context->gva_to_gpa = paging64_gva_to_gpa;
3792 context->nx = is_nx(vcpu);
3793 context->root_level = PT32E_ROOT_LEVEL;
3794 reset_rsvds_bits_mask(vcpu, context);
3795 context->gva_to_gpa = paging64_gva_to_gpa;
3797 context->nx = false;
3798 context->root_level = PT32_ROOT_LEVEL;
3799 reset_rsvds_bits_mask(vcpu, context);
3800 context->gva_to_gpa = paging32_gva_to_gpa;
3803 update_permission_bitmask(vcpu, context, false);
3804 update_last_pte_bitmap(vcpu, context);
3807 void kvm_init_shadow_mmu(struct kvm_vcpu *vcpu, struct kvm_mmu *context)
3814 nonpaging_init_context(vcpu, context);
3816 paging64_init_context(vcpu, context);
3818 paging32E_init_context(vcpu, context);
3820 paging32_init_context(vcpu, context);
3830 void kvm_init_shadow_ept_mmu(struct kvm_vcpu *vcpu, struct kvm_mmu *context,
3836 context->shadow_root_level = kvm_x86_ops->get_tdp_level();
3838 context->nx = true;
3839 context->page_fault = ept_page_fault;
3840 context->gva_to_gpa = ept_gva_to_gpa;
3841 context->sync_page = ept_sync_page;
3842 context->invlpg = ept_invlpg;
3843 context->update_pte = ept_update_pte;
3844 context->root_level = context->shadow_root_level;
3845 context->root_hpa = INVALID_PAGE;
3846 context->direct_map = false;
3848 update_permission_bitmask(vcpu, context, true);
3849 reset_rsvds_bits_mask_ept(vcpu, context, execonly);