CFI_ADJUST_CFA_OFFSET -4
.endm
.macro BEGIN func reg
#define BEGIN(op) \
$v = \reg
.macro endp; \
CFI_ENDPROC; \
ENTRY(atomic64_\func\()_386)
ENDPROC(atomic64_##op##_386); \
CFI_STARTPROC
.purgem endp; \
LOCK $v
.endm; \
ENTRY(atomic64_##op##_386); \
.macro RETURN
CFI_STARTPROC; \
UNLOCK $v
LOCK v;
#define ENDP endp
#define RET \
UNLOCK v; \
ret
.macro END_
CFI_ENDPROC
ENDPROC(atomic64_\func\()_386)
.purgem RETURN
.purgem END_
.purgem END
.macro END
RETURN
END_
BEGIN read %ecx
#define RET_ENDP \
movl ($v), %eax
RET; \
movl 4($v), %edx
ENDP
END
#define v %ecx
BEGIN set %esi
BEGIN(read)
movl %ebx, ($v)
movl (v), %eax
movl %ecx, 4($v)
movl 4(v), %edx
RET_ENDP
#undef v
BEGIN xchg %esi
#define v %esi
BEGIN(set)
movl %ebx, (v)
movl %ecx, 4(v)
BEGIN add %ecx
addl %eax, ($v)
adcl %edx, 4($v)
BEGIN(xchg)
BEGIN add_return %ecx
addl ($v), %eax
adcl 4($v), %edx
movl %eax, ($v)
movl %edx, 4($v)
BEGIN(add)
BEGIN sub %ecx
addl %eax, (v)
subl %eax, ($v)
adcl %edx, 4(v)
sbbl %edx, 4($v)
BEGIN sub_return %ecx
BEGIN(add_return)
addl (v), %eax
adcl 4(v), %edx
movl %eax, (v)
movl %edx, 4(v)
BEGIN(sub)
subl %eax, (v)
sbbl %edx, 4(v)
BEGIN(sub_return)
negl %edx
negl %eax
sbbl $0, %edx
BEGIN inc %esi
addl $1, ($v)
adcl $0, 4($v)
BEGIN(inc)
addl $1, (v)
adcl $0, 4(v)
BEGIN inc_return %esi
BEGIN(inc_return)
addl $1, %eax
adcl $0, %edx
BEGIN dec %esi
subl $1, ($v)
sbbl $0, 4($v)
BEGIN(dec)
subl $1, (v)
sbbl $0, 4(v)
BEGIN dec_return %esi
BEGIN(dec_return)
subl $1, %eax
BEGIN add_unless %ecx
BEGIN(add_unless)
addl %eax, %esi
adcl %edx, %edi
cmpl %eax, %esi
je 3f
1:
movl $1, %eax
2:
RET
3:
cmpl %edx, %edi
jne 1b
xorl %eax, %eax
jmp 2b
BEGIN inc_not_zero %esi
BEGIN(inc_not_zero)
testl %eax, %eax
testl %edx, %edx
BEGIN dec_if_positive %esi
BEGIN(dec_if_positive)
js 1f
-