From 82ad1b90eb0cdae18c3b7b38ccde93a7b3ece4b7 Mon Sep 17 00:00:00 2001 From: Taylor R Campbell Date: Sat, 12 Feb 2022 15:45:11 +0000 Subject: [PATCH] hppa: __cpu_simple_lock membar audit. --- sys/arch/hppa/include/lock.h | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/sys/arch/hppa/include/lock.h b/sys/arch/hppa/include/lock.h index 6077711a4dc7..290891a8a890 100644 --- a/sys/arch/hppa/include/lock.h +++ b/sys/arch/hppa/include/lock.h @@ -91,6 +91,25 @@ __cpu_simple_lock_init(__cpu_simple_lock_t *alp) __SIMPLELOCK_RAW_UNLOCKED; } +static __inline int +__cpu_simple_lock_try(__cpu_simple_lock_t *alp) +{ + volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp); + + if (__ldcw(__aptr) == __SIMPLELOCK_RAW_LOCKED) + return 0; + + /* + * __cpu_simple_lock_try must be a load-acquire operation, but + * HPPA's LDCW does not appear to guarantee load-acquire + * semantics, so we have to do LDCW and then an explicit SYNC + * to make a load-acquire operation that pairs with a preceding + * store-release in __cpu_simple_unlock. + */ + __sync(); + return 1; +} + static __inline void __cpu_simple_lock(__cpu_simple_lock_t *alp) { @@ -103,24 +122,20 @@ __cpu_simple_lock(__cpu_simple_lock_t *alp) * some work. */ - while (__ldcw(__aptr) == __SIMPLELOCK_RAW_LOCKED) + while (!__cpu_simple_lock_try(alp)) while (*__aptr == __SIMPLELOCK_RAW_LOCKED) ; } -static __inline int -__cpu_simple_lock_try(__cpu_simple_lock_t *alp) -{ - volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp); - - return (__ldcw(__aptr) != __SIMPLELOCK_RAW_LOCKED); -} - static __inline void __cpu_simple_unlock(__cpu_simple_lock_t *alp) { volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp); + /* + * SYNC and then store makes a store-release that pairs with + * the load-acquire in a subsequent __cpu_simple_lock_try. + */ __sync(); *__aptr = __SIMPLELOCK_RAW_UNLOCKED; }