version 1.9, 2008/02/10 13:25:46 |
version 1.9.4.2, 2008/06/04 02:02:58 |
|
|
* 2. Redistributions in binary form must reproduce the above copyright |
* 2. Redistributions in binary form must reproduce the above copyright |
* notice, this list of conditions and the following disclaimer in the |
* notice, this list of conditions and the following disclaimer in the |
* documentation and/or other materials provided with the distribution. |
* documentation and/or other materials provided with the distribution. |
* 3. All advertising materials mentioning features or use of this software |
|
* must display the following acknowledgement: |
|
* This product includes software developed by the NetBSD |
|
* Foundation, Inc. and its contributors. |
|
* 4. Neither the name of The NetBSD Foundation nor the names of its |
|
* contributors may be used to endorse or promote products derived |
|
* from this software without specific prior written permission. |
|
* |
* |
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS |
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS |
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED |
|
|
#ifdef _KERNEL |
#ifdef _KERNEL |
#define LOCK(n) .Lpatch/**/n: lock |
#define LOCK(n) .Lpatch/**/n: lock |
#define ALIAS(f, t) STRONG_ALIAS(f,t) |
#define ALIAS(f, t) STRONG_ALIAS(f,t) |
#define END(a) _ALIGN_TEXT; LABEL(a) |
#define ENDLABEL(a) _ALIGN_TEXT; LABEL(a) |
#else |
#else |
#define LOCK(n) lock |
#define LOCK(n) lock |
#define ALIAS(f, t) WEAK_ALIAS(f,t) |
#define ALIAS(f, t) WEAK_ALIAS(f,t) |
#define END(a) /* nothing */ |
#define ENDLABEL(a) /* nothing */ |
#endif |
#endif |
|
|
.text |
.text |
|
|
/* 32-bit */ |
/* 32-bit */ |
|
|
NENTRY(_atomic_add_32) |
ENTRY(_atomic_add_32) |
LOCK(1) |
LOCK(1) |
addl %esi, (%rdi) |
addl %esi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_add_32_nv) |
ENTRY(_atomic_add_32_nv) |
movl %esi, %eax |
movl %esi, %eax |
LOCK(2) |
LOCK(2) |
xaddl %eax, (%rdi) |
xaddl %eax, (%rdi) |
addl %esi, %eax |
addl %esi, %eax |
ret |
ret |
|
|
NENTRY(_atomic_and_32) |
ENTRY(_atomic_and_32) |
LOCK(3) |
LOCK(3) |
andl %esi, (%rdi) |
andl %esi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_and_32_nv) |
ENTRY(_atomic_and_32_nv) |
movl (%rdi), %eax |
movl (%rdi), %eax |
1: |
1: |
movl %eax, %ecx |
movl %eax, %ecx |
Line 80 NENTRY(_atomic_and_32_nv) |
|
Line 73 NENTRY(_atomic_and_32_nv) |
|
movl %ecx, %eax |
movl %ecx, %eax |
ret |
ret |
|
|
NENTRY(_atomic_dec_32) |
ENTRY(_atomic_dec_32) |
LOCK(5) |
LOCK(5) |
decl (%rdi) |
decl (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_dec_32_nv) |
ENTRY(_atomic_dec_32_nv) |
movl $-1, %eax |
movl $-1, %eax |
LOCK(6) |
LOCK(6) |
xaddl %eax, (%rdi) |
xaddl %eax, (%rdi) |
decl %eax |
decl %eax |
ret |
ret |
|
|
NENTRY(_atomic_inc_32) |
ENTRY(_atomic_inc_32) |
LOCK(7) |
LOCK(7) |
incl (%rdi) |
incl (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_inc_32_nv) |
ENTRY(_atomic_inc_32_nv) |
movl $1, %eax |
movl $1, %eax |
LOCK(8) |
LOCK(8) |
xaddl %eax, (%rdi) |
xaddl %eax, (%rdi) |
incl %eax |
incl %eax |
ret |
ret |
|
|
NENTRY(_atomic_or_32) |
ENTRY(_atomic_or_32) |
LOCK(9) |
LOCK(9) |
orl %esi, (%rdi) |
orl %esi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_or_32_nv) |
ENTRY(_atomic_or_32_nv) |
movl (%rdi), %eax |
movl (%rdi), %eax |
1: |
1: |
movl %eax, %ecx |
movl %eax, %ecx |
Line 120 NENTRY(_atomic_or_32_nv) |
|
Line 113 NENTRY(_atomic_or_32_nv) |
|
movl %ecx, %eax |
movl %ecx, %eax |
ret |
ret |
|
|
NENTRY(_atomic_swap_32) |
ENTRY(_atomic_swap_32) |
movl %esi, %eax |
movl %esi, %eax |
xchgl %eax, (%rdi) |
xchgl %eax, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_cas_32) |
ENTRY(_atomic_cas_32) |
movl %esi, %eax |
movl %esi, %eax |
LOCK(12) |
LOCK(12) |
cmpxchgl %edx, (%rdi) |
cmpxchgl %edx, (%rdi) |
/* %eax now contains the old value */ |
/* %eax now contains the old value */ |
ret |
ret |
|
|
NENTRY(_atomic_cas_32_ni) |
ENTRY(_atomic_cas_32_ni) |
movl %esi, %eax |
movl %esi, %eax |
cmpxchgl %edx, (%rdi) |
cmpxchgl %edx, (%rdi) |
/* %eax now contains the old value */ |
/* %eax now contains the old value */ |
Line 140 NENTRY(_atomic_cas_32_ni) |
|
Line 133 NENTRY(_atomic_cas_32_ni) |
|
|
|
/* 64-bit */ |
/* 64-bit */ |
|
|
NENTRY(_atomic_add_64) |
ENTRY(_atomic_add_64) |
LOCK(13) |
LOCK(13) |
addq %rsi, (%rdi) |
addq %rsi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_add_64_nv) |
ENTRY(_atomic_add_64_nv) |
movq %rsi, %rax |
movq %rsi, %rax |
LOCK(14) |
LOCK(14) |
xaddq %rax, (%rdi) |
xaddq %rax, (%rdi) |
addq %rsi, %rax |
addq %rsi, %rax |
ret |
ret |
|
|
NENTRY(_atomic_and_64) |
ENTRY(_atomic_and_64) |
LOCK(15) |
LOCK(15) |
andq %rsi, (%rdi) |
andq %rsi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_and_64_nv) |
ENTRY(_atomic_and_64_nv) |
movq (%rdi), %rax |
movq (%rdi), %rax |
1: |
1: |
movq %rax, %rcx |
movq %rax, %rcx |
Line 168 NENTRY(_atomic_and_64_nv) |
|
Line 161 NENTRY(_atomic_and_64_nv) |
|
movq %rcx, %rax |
movq %rcx, %rax |
ret |
ret |
|
|
NENTRY(_atomic_dec_64) |
ENTRY(_atomic_dec_64) |
LOCK(17) |
LOCK(17) |
decq (%rdi) |
decq (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_dec_64_nv) |
ENTRY(_atomic_dec_64_nv) |
movq $-1, %rax |
movq $-1, %rax |
LOCK(18) |
LOCK(18) |
xaddq %rax, (%rdi) |
xaddq %rax, (%rdi) |
decq %rax |
decq %rax |
ret |
ret |
|
|
NENTRY(_atomic_inc_64) |
ENTRY(_atomic_inc_64) |
LOCK(19) |
LOCK(19) |
incq (%rdi) |
incq (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_inc_64_nv) |
ENTRY(_atomic_inc_64_nv) |
movq $1, %rax |
movq $1, %rax |
LOCK(20) |
LOCK(20) |
xaddq %rax, (%rdi) |
xaddq %rax, (%rdi) |
incq %rax |
incq %rax |
ret |
ret |
|
|
NENTRY(_atomic_or_64) |
ENTRY(_atomic_or_64) |
LOCK(21) |
LOCK(21) |
orq %rsi, (%rdi) |
orq %rsi, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_or_64_nv) |
ENTRY(_atomic_or_64_nv) |
movq (%rdi), %rax |
movq (%rdi), %rax |
1: |
1: |
movq %rax, %rcx |
movq %rax, %rcx |
Line 208 NENTRY(_atomic_or_64_nv) |
|
Line 201 NENTRY(_atomic_or_64_nv) |
|
movq %rcx, %rax |
movq %rcx, %rax |
ret |
ret |
|
|
NENTRY(_atomic_swap_64) |
ENTRY(_atomic_swap_64) |
movq %rsi, %rax |
movq %rsi, %rax |
xchgq %rax, (%rdi) |
xchgq %rax, (%rdi) |
ret |
ret |
|
|
NENTRY(_atomic_cas_64) |
ENTRY(_atomic_cas_64) |
movq %rsi, %rax |
movq %rsi, %rax |
LOCK(24) |
LOCK(24) |
cmpxchgq %rdx, (%rdi) |
cmpxchgq %rdx, (%rdi) |
/* %eax now contains the old value */ |
/* %eax now contains the old value */ |
ret |
ret |
|
|
NENTRY(_atomic_cas_64_ni) |
ENTRY(_atomic_cas_64_ni) |
movq %rsi, %rax |
movq %rsi, %rax |
cmpxchgq %rdx, (%rdi) |
cmpxchgq %rdx, (%rdi) |
/* %eax now contains the old value */ |
/* %eax now contains the old value */ |
Line 228 NENTRY(_atomic_cas_64_ni) |
|
Line 221 NENTRY(_atomic_cas_64_ni) |
|
|
|
/* memory barriers */ |
/* memory barriers */ |
|
|
NENTRY(_membar_consumer) |
ENTRY(_membar_consumer) |
LOCK(25) |
LOCK(25) |
addq $0, -8(%rsp) |
addq $0, -8(%rsp) |
ret |
ret |
END(membar_consumer_end) |
ENDLABEL(membar_consumer_end) |
|
|
NENTRY(_membar_producer) |
ENTRY(_membar_producer) |
/* A store is enough */ |
/* A store is enough */ |
movq $0, -8(%rsp) |
movq $0, -8(%rsp) |
ret |
ret |
END(membar_producer_end) |
ENDLABEL(membar_producer_end) |
|
|
NENTRY(_membar_sync) |
ENTRY(_membar_sync) |
LOCK(26) |
LOCK(26) |
addq $0, -8(%rsp) |
addq $0, -8(%rsp) |
ret |
ret |
END(membar_sync_end) |
ENDLABEL(membar_sync_end) |
|
|
#ifdef _KERNEL |
#ifdef _KERNEL |
NENTRY(sse2_lfence) |
ENTRY(sse2_lfence) |
lfence |
lfence |
ret |
ret |
END(sse2_lfence_end) |
ENDLABEL(sse2_lfence_end) |
|
|
NENTRY(sse2_mfence) |
ENTRY(sse2_mfence) |
mfence |
mfence |
ret |
ret |
END(sse2_mfence_end) |
ENDLABEL(sse2_mfence_end) |
|
|
atomic_lockpatch: |
atomic_lockpatch: |
.globl atomic_lockpatch |
.globl atomic_lockpatch |