alpha: Improve atomic_add_unless

Use ll/sc loops instead of C loops around cmpxchg.
Update the atomic64_add_unless block comment to match the code.

Reviewed-and-Tested-by: Matt Turner <mattst88@gmail.com>
Signed-off-by: Matt Turner <mattst88@gmail.com>
Signed-off-by: Richard Henderson <rth@twiddle.net>
This commit is contained in:
Richard Henderson 2013-07-11 07:42:14 -07:00 committed by Matt Turner
parent a5c6eae4d6
commit 6da7539734
1 changed files with 37 additions and 23 deletions

View File

@ -186,17 +186,24 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
*/
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c;
int c, new, old;
smp_mb();
__asm__ __volatile__(
"1: ldl_l %[old],%[mem]\n"
" cmpeq %[old],%[u],%[c]\n"
" addl %[old],%[a],%[new]\n"
" bne %[c],2f\n"
" stl_c %[new],%[mem]\n"
" beq %[new],3f\n"
"2:\n"
".subsection 2\n"
"3: br 1b\n"
".previous"
: [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
: [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
: "memory");
smp_mb();
return old;
}
@ -207,21 +214,28 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as it was not @u.
* Returns the old value of @v.
* Returns true iff @v was not @u.
*/
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
long c, old;
c = atomic64_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic64_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c != (u);
long c, tmp;
smp_mb();
__asm__ __volatile__(
"1: ldq_l %[tmp],%[mem]\n"
" cmpeq %[tmp],%[u],%[c]\n"
" addq %[tmp],%[a],%[tmp]\n"
" bne %[c],2f\n"
" stq_c %[tmp],%[mem]\n"
" beq %[tmp],3f\n"
"2:\n"
".subsection 2\n"
"3: br 1b\n"
".previous"
: [tmp] "=&r"(tmp), [c] "=&r"(c)
: [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
: "memory");
smp_mb();
return !c;
}
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)