mirror of
https://github.com/torvalds/linux.git
synced 2024-12-22 02:52:56 +00:00
0f58189d4a
As noticed by Mikulas Patocka, the backoff macros don't completely nop out for UP builds, we still get a branch always and a delay slot nop. Fix this by making the branch to the backoff spin loop selective, then we can nop out the spin loop completely. Signed-off-by: David S. Miller <davem@davemloft.net>
142 lines
2.7 KiB
ArmAsm
142 lines
2.7 KiB
ArmAsm
/* bitops.S: Sparc64 atomic bit operations.
|
|
*
|
|
* Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net)
|
|
*/
|
|
|
|
#include <asm/asi.h>
|
|
#include <asm/backoff.h>
|
|
|
|
.text
|
|
|
|
.globl test_and_set_bit
|
|
.type test_and_set_bit,#function
|
|
test_and_set_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
or %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
and %g7, %o2, %g2
|
|
clr %o0
|
|
movrne %g2, 1, %o0
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size test_and_set_bit, .-test_and_set_bit
|
|
|
|
.globl test_and_clear_bit
|
|
.type test_and_clear_bit,#function
|
|
test_and_clear_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
andn %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
and %g7, %o2, %g2
|
|
clr %o0
|
|
movrne %g2, 1, %o0
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size test_and_clear_bit, .-test_and_clear_bit
|
|
|
|
.globl test_and_change_bit
|
|
.type test_and_change_bit,#function
|
|
test_and_change_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
xor %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
and %g7, %o2, %g2
|
|
clr %o0
|
|
movrne %g2, 1, %o0
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size test_and_change_bit, .-test_and_change_bit
|
|
|
|
.globl set_bit
|
|
.type set_bit,#function
|
|
set_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
or %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
nop
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size set_bit, .-set_bit
|
|
|
|
.globl clear_bit
|
|
.type clear_bit,#function
|
|
clear_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
andn %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
nop
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size clear_bit, .-clear_bit
|
|
|
|
.globl change_bit
|
|
.type change_bit,#function
|
|
change_bit: /* %o0=nr, %o1=addr */
|
|
BACKOFF_SETUP(%o3)
|
|
srlx %o0, 6, %g1
|
|
mov 1, %o2
|
|
sllx %g1, 3, %g3
|
|
and %o0, 63, %g2
|
|
sllx %o2, %g2, %o2
|
|
add %o1, %g3, %o1
|
|
1: ldx [%o1], %g7
|
|
xor %g7, %o2, %g1
|
|
casx [%o1], %g7, %g1
|
|
cmp %g7, %g1
|
|
bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
|
|
nop
|
|
retl
|
|
nop
|
|
2: BACKOFF_SPIN(%o3, %o4, 1b)
|
|
.size change_bit, .-change_bit
|