diff --git a/include/arch/arm/arch/machine/debug.h b/include/arch/arm/arch/machine/debug.h index b16e13e5be4..164d2d4f391 100644 --- a/include/arch/arm/arch/machine/debug.h +++ b/include/arch/arm/arch/machine/debug.h @@ -86,7 +86,9 @@ static inline void setHDCRTrapDebugExceptionState(bool_t enable_trapping) { word_t hdcr; #ifdef CONFIG_ARCH_AARCH64 + word_t hcr; MRS("mdcr_el2", hdcr); + MRS("hcr_el2", hcr); #else MRC(ARM_CP15_HDCR, hdcr); #endif @@ -98,6 +100,11 @@ static inline void setHDCRTrapDebugExceptionState(bool_t enable_trapping) | BIT(HDCR_DEBUG_TDA_SHIFT) | BIT(HDCR_DEBUG_TDRA_SHIFT) | BIT(HDCR_DEBUG_TDOSA_SHIFT)); +#ifdef CONFIG_ARCH_AARCH64 + // @alwin: I think only this is necessary on aarch64 actually + hcr |= (HCR_TGE); +#endif + } else { /* Let the PL1 Guest VM handle debug events on its own */ hdcr &= ~(BIT(HDCR_DEBUG_TDE_SHIFT) @@ -105,8 +112,13 @@ static inline void setHDCRTrapDebugExceptionState(bool_t enable_trapping) | BIT(HDCR_DEBUG_TDRA_SHIFT) | BIT(HDCR_DEBUG_TDOSA_SHIFT)); } +#ifdef CONFIG_ARCH_AARCH64 + // @alwin: I think only this is necessary on aarch64 actually + hcr &= ~HCR_TGE; +#endif #ifdef CONFIG_ARCH_AARCH64 MSR("mdcr_el2", hdcr); + MSR("hcr_el2", hcr); #else MCR(ARM_CP15_HDCR, hdcr); #endif diff --git a/include/arch/arm/armv/armv8-a/64/armv/vcpu.h b/include/arch/arm/armv/armv8-a/64/armv/vcpu.h index 64c1f9e8351..7f8d9403ab7 100644 --- a/include/arch/arm/armv/armv8-a/64/armv/vcpu.h +++ b/include/arch/arm/armv/armv8-a/64/armv/vcpu.h @@ -627,11 +627,21 @@ static inline void armv_vcpu_boot_init(void) /* set the SCTLR_EL1 for running native seL4 threads */ MSR(REG_SCTLR_EL1, SCTLR_EL1_NATIVE); isb(); + + initHDCR(); } static inline void armv_vcpu_save(vcpu_t *vcpu, UNUSED bool_t active) { vcpu_save_reg_range(vcpu, seL4_VCPUReg_TTBR0, seL4_VCPUReg_SPSR_EL1); + +#ifdef ARM_HYP_CP14_SAVE_AND_RESTORE_VCPU_THREADS + /* This is done when we are asked to save and restore the CP14 debug context + * of VCPU threads; the register context is saved into the underlying TCB. + */ + saveAllBreakpointState(vcpu->vcpuTCB); +#endif + isb(); } static inline void vcpu_enable(vcpu_t *vcpu) @@ -641,6 +651,13 @@ static inline void vcpu_enable(vcpu_t *vcpu) isb(); set_gic_vcpu_ctrl_hcr(vcpu->vgic.hcr); +#if !defined(ARM_CP14_SAVE_AND_RESTORE_NATIVE_THREADS) && defined(ARM_HYP_CP14_SAVE_AND_RESTORE_VCPU_THREADS) + restore_user_debug_context(vcpu->vcpuTCB); +#endif +#if defined(ARM_HYP_TRAP_CP14_IN_NATIVE_USER_THREADS) + setHDCRTrapDebugExceptionState(false); +#endif + #ifdef CONFIG_HAVE_FPU vcpu_restore_reg(vcpu, seL4_VCPUReg_CPACR); #endif @@ -672,6 +689,13 @@ static inline void vcpu_disable(vcpu_t *vcpu) setHCR(HCR_NATIVE); isb(); +#if defined(ARM_HYP_CP14_SAVE_AND_RESTORE_VCPU_THREADS) + loadAllDisabledBreakpointState(); +#endif +#if defined(ARM_HYP_TRAP_CP14_IN_NATIVE_USER_THREADS) + setHDCRTrapDebugExceptionState(true); +#endif + #ifdef CONFIG_HAVE_FPU /* Allow FPU instructions in EL0 and EL1 for native * threads by setting the CPACR_EL1. The CPTR_EL2 is