Index: arch/m68k/include/lock.h =================================================================== RCS file: /cvsroot/src/sys/arch/m68k/include/lock.h,v retrieving revision 1.17 diff -u -p -r1.17 lock.h --- arch/m68k/include/lock.h 13 Feb 2022 13:41:44 -0000 1.17 +++ arch/m68k/include/lock.h 15 Dec 2025 15:39:10 -0000 @@ -1,7 +1,7 @@ /* $NetBSD: lock.h,v 1.17 2022/02/13 13:41:44 riastradh Exp $ */ /*- - * Copyright (c) 2000 The NetBSD Foundation, Inc. + * Copyright (c) 2000, 2025 The NetBSD Foundation, Inc. * All rights reserved. * * This code is derived from software contributed to The NetBSD Foundation @@ -68,21 +68,63 @@ __cpu_simple_lock_set(__cpu_simple_lock_ *__ptr = __SIMPLELOCK_LOCKED; } +#if defined(__HAVE_M68K_BROKEN_RMC) && defined(_KERNEL) +#include + +union __cpu_simple_lock_un { + unsigned int __cslu_lw; + unsigned char __cslu_bytes[4]; +}; + +#define __cslu_addr(v) \ + (unsigned int *)((vaddr_t)(v) & ~3U) +#define __cslu_idx(v) \ + (int)((vaddr_t)(v) - ((vaddr_t)(v) & 3U)) + +__CTASSERT(sizeof(__cpu_simple_lock_t) == 1); +#endif /* __HAVE_M68K_BROKEN_RMC && _KERNEL */ + static __inline void __cpu_simple_lock(__cpu_simple_lock_t *alp) { - +#if defined(__HAVE_M68K_BROKEN_RMC) && defined(_KERNEL) + unsigned int *__addr = __cslu_addr(alp); + int __idx = __cslu_idx(alp); + union __cpu_simple_lock_un __old, __new; + + for (;;) { + __old.__cslu_lw = __new.__cslu_lw = atomic_load_relaxed(__addr); + __old.__cslu_bytes[__idx] = __SIMPLELOCK_UNLOCKED; + __new.__cslu_bytes[__idx] = __SIMPLELOCK_LOCKED; + if (atomic_cas_uint(__addr, __old.__cslu_lw, + __new.__cslu_lw) == __old.__cslu_lw) { + return; + } + } +#else __asm volatile( "1: tas %0 \n" " jne 1b \n" : "=m" (*alp) : /* no inputs */ : "cc", "memory"); +#endif /* __HAVE_M68K_BROKEN_RMC && _KERNEL */ } static __inline int __cpu_simple_lock_try(__cpu_simple_lock_t *alp) { +#if defined(__HAVE_M68K_BROKEN_RMC) && defined(_KERNEL) + unsigned int *__addr = __cslu_addr(alp); + int __idx = __cslu_idx(alp); + union __cpu_simple_lock_un __old, __new; + + __old.__cslu_lw = __new.__cslu_lw = atomic_load_relaxed(__addr); + __old.__cslu_bytes[__idx] = __SIMPLELOCK_UNLOCKED; + __new.__cslu_bytes[__idx] = __SIMPLELOCK_LOCKED; + return atomic_cas_uint(__addr, __old.__cslu_lw, + __new.__cslu_lw) == __old.__cslu_lw; +#else int __rv; __asm volatile( @@ -96,6 +138,7 @@ __cpu_simple_lock_try(__cpu_simple_lock_ : "cc", "memory"); return (__rv); +#endif /* __HAVE_M68K_BROKEN_RMC && _KERNEL */ } static __inline void