diff --git a/sys/arch/aarch64/aarch64/cpufunc_asm_armv8.S b/sys/arch/aarch64/aarch64/cpufunc_asm_armv8.S index 40efba6dcf46..24be1e4584a2 100644 --- a/sys/arch/aarch64/aarch64/cpufunc_asm_armv8.S +++ b/sys/arch/aarch64/aarch64/cpufunc_asm_armv8.S @@ -42,9 +42,14 @@ /* * Macro to handle the cache. This takes the start address in x0, length - * in x1. It will corrupt x2-x5. + * in x1. It will corrupt x2-x7. */ .macro cache_handle_range dcop = "", icop = "" +#ifdef __HAVE_PREEMPTION +0: mrs x6, tpidr_el1 /* x6 := curlwp */ + ldr x7, [x6, #L_NCSW] /* x7 := curlwp->l_ncsw */ +#endif + mrs x3, ctr_el0 mov x4, #4 /* size of word */ .ifnb \dcop @@ -80,6 +85,16 @@ dsb ish isb .endif + +#ifdef __HAVE_PREEMPTION + ldr x6, [x6, #L_NCSW] /* x6 := curlwp->l_ncsw again */ + cmp x6, x7 /* retry if l_ncsw changed */ + b.ne 2f /* forward branch predict not-taken */ + ret +2: b 0b +#else + ret +#endif .endm @@ -97,7 +112,6 @@ END(aarch64_cpuid) */ ENTRY(aarch64_dcache_wb_range) cache_handle_range dcop = cvac - ret END(aarch64_dcache_wb_range) /* @@ -105,7 +119,6 @@ END(aarch64_dcache_wb_range) */ ENTRY(aarch64_dcache_wbinv_range) cache_handle_range dcop = civac - ret END(aarch64_dcache_wbinv_range) /* @@ -116,7 +129,6 @@ END(aarch64_dcache_wbinv_range) */ ENTRY(aarch64_dcache_inv_range) cache_handle_range dcop = ivac - ret END(aarch64_dcache_inv_range) /* @@ -124,7 +136,6 @@ END(aarch64_dcache_inv_range) */ ENTRY(aarch64_idcache_wbinv_range) cache_handle_range dcop = civac, icop = ivau - ret END(aarch64_idcache_wbinv_range) /* @@ -132,7 +143,6 @@ END(aarch64_idcache_wbinv_range) */ ENTRY(aarch64_icache_sync_range) cache_handle_range dcop = cvau, icop = ivau - ret END(aarch64_icache_sync_range) /* @@ -140,7 +150,6 @@ END(aarch64_icache_sync_range) */ ENTRY(aarch64_icache_inv_range) cache_handle_range icop = ivau - ret END(aarch64_icache_inv_range) /*