s390/futex: Generate futex atomic op functions

Cleanup the futex atomic op inline assembly and generate a function for
each futex atomic op. This makes the code hopefully a bit more readable.

Acked-by: Ilya Leoshkevich <iii@linux.ibm.com>
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
Signed-off-by: Alexander Gordeev <agordeev@linux.ibm.com>
This commit is contained in:
Heiko Carstens
2025-01-21 15:55:00 +01:00
committed by Alexander Gordeev
parent 884f0582b2
commit 9e8f72f773

View File

@@ -8,56 +8,64 @@
#include <asm/mmu_context.h>
#include <asm/errno.h>
#define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
asm volatile( \
" sacf 256\n" \
"0: l %1,0(%6)\n" \
"1:"insn \
"2: cs %1,%2,0(%6)\n" \
"3: jl 1b\n" \
" lhi %0,0\n" \
"4: sacf 768\n" \
EX_TABLE(0b,4b) EX_TABLE(1b,4b) \
EX_TABLE(2b,4b) EX_TABLE(3b,4b) \
: "=d" (ret), "=&d" (oldval), "=&d" (newval), \
"=m" (*uaddr) \
: "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
"m" (*uaddr) : "cc");
#define FUTEX_OP_FUNC(name, insn) \
static inline int \
__futex_atomic_##name(int oparg, int *old, u32 __user *uaddr) \
{ \
int rc, new; \
\
asm_inline volatile( \
" sacf 256\n" \
"0: l %[old],%[uaddr]\n" \
"1:"insn \
"2: cs %[old],%[new],%[uaddr]\n" \
"3: jl 1b\n" \
" lhi %[rc],0\n" \
"4: sacf 768\n" \
EX_TABLE_UA_FAULT(0b, 4b, %[rc]) \
EX_TABLE_UA_FAULT(1b, 4b, %[rc]) \
EX_TABLE_UA_FAULT(2b, 4b, %[rc]) \
EX_TABLE_UA_FAULT(3b, 4b, %[rc]) \
: [rc] "=d" (rc), [old] "=&d" (*old), \
[new] "=&d" (new), [uaddr] "+Q" (*uaddr) \
: [oparg] "d" (oparg) \
: "cc"); \
return rc; \
}
static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
u32 __user *uaddr)
FUTEX_OP_FUNC(set, "lr %[new],%[oparg]\n")
FUTEX_OP_FUNC(add, "lr %[new],%[old]\n ar %[new],%[oparg]\n")
FUTEX_OP_FUNC(or, "lr %[new],%[old]\n or %[new],%[oparg]\n")
FUTEX_OP_FUNC(and, "lr %[new],%[old]\n nr %[new],%[oparg]\n")
FUTEX_OP_FUNC(xor, "lr %[new],%[old]\n xr %[new],%[oparg]\n")
static inline
int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
{
int oldval = 0, newval, ret;
int old, rc;
switch (op) {
case FUTEX_OP_SET:
__futex_atomic_op("lr %2,%5\n",
ret, oldval, newval, uaddr, oparg);
rc = __futex_atomic_set(oparg, &old, uaddr);
break;
case FUTEX_OP_ADD:
__futex_atomic_op("lr %2,%1\nar %2,%5\n",
ret, oldval, newval, uaddr, oparg);
rc = __futex_atomic_add(oparg, &old, uaddr);
break;
case FUTEX_OP_OR:
__futex_atomic_op("lr %2,%1\nor %2,%5\n",
ret, oldval, newval, uaddr, oparg);
rc = __futex_atomic_or(oparg, &old, uaddr);
break;
case FUTEX_OP_ANDN:
__futex_atomic_op("lr %2,%1\nnr %2,%5\n",
ret, oldval, newval, uaddr, ~oparg);
rc = __futex_atomic_and(~oparg, &old, uaddr);
break;
case FUTEX_OP_XOR:
__futex_atomic_op("lr %2,%1\nxr %2,%5\n",
ret, oldval, newval, uaddr, oparg);
rc = __futex_atomic_xor(oparg, &old, uaddr);
break;
default:
ret = -ENOSYS;
rc = -ENOSYS;
}
if (!ret)
*oval = oldval;
return ret;
if (!rc)
*oval = old;
return rc;
}
static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,