s390/atomic: Implement arch_atomic_inc() / arch_atomic_dec()

Implement arch_atomic_inc() / arch_atomic_dec() functions which result
in a single instruction if compiled for z196 or newer architectures.

Reduces the kernel image size by ~6K (defconfig):

bloat-o-meter:
add/remove: 0/0 grow/shrink: 12/1005 up/down: 106/-6404 (-6298)

Reviewed-by: Alexander Gordeev <agordeev@linux.ibm.com>
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
Signed-off-by: Alexander Gordeev <agordeev@linux.ibm.com>
This commit is contained in:
Heiko Carstens
2024-12-04 12:30:58 +01:00
committed by Alexander Gordeev
parent 7ad0075005
commit d809df72b5

View File

@@ -45,6 +45,18 @@ static __always_inline void arch_atomic_add(int i, atomic_t *v)
}
#define arch_atomic_add arch_atomic_add
static __always_inline void arch_atomic_inc(atomic_t *v)
{
__atomic_add_const(1, &v->counter);
}
#define arch_atomic_inc arch_atomic_inc
static __always_inline void arch_atomic_dec(atomic_t *v)
{
__atomic_add_const(-1, &v->counter);
}
#define arch_atomic_dec arch_atomic_dec
#define arch_atomic_sub(_i, _v) arch_atomic_add(-(int)(_i), _v)
#define arch_atomic_sub_return(_i, _v) arch_atomic_add_return(-(int)(_i), _v)
#define arch_atomic_fetch_sub(_i, _v) arch_atomic_fetch_add(-(int)(_i), _v)
@@ -122,6 +134,18 @@ static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
}
#define arch_atomic64_add arch_atomic64_add
static __always_inline void arch_atomic64_inc(atomic64_t *v)
{
__atomic64_add_const(1, (long *)&v->counter);
}
#define arch_atomic64_inc arch_atomic64_inc
static __always_inline void arch_atomic64_dec(atomic64_t *v)
{
__atomic64_add_const(-1, (long *)&v->counter);
}
#define arch_atomic64_dec arch_atomic64_dec
static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
{
return arch_xchg(&v->counter, new);