Annotation of src/sys/arch/powerpc/include/lock.h, Revision 1.12.44.1
1.12.44.1! jdolecek 1: /* $NetBSD$ */
1.1 thorpej 2:
3: /*-
1.9 ad 4: * Copyright (c) 2000, 2007 The NetBSD Foundation, Inc.
1.1 thorpej 5: * All rights reserved.
6: *
7: * This code is derived from software contributed to The NetBSD Foundation
1.9 ad 8: * by Jason R. Thorpe and Andrew Doran.
1.1 thorpej 9: *
10: * Redistribution and use in source and binary forms, with or without
11: * modification, are permitted provided that the following conditions
12: * are met:
13: * 1. Redistributions of source code must retain the above copyright
14: * notice, this list of conditions and the following disclaimer.
15: * 2. Redistributions in binary form must reproduce the above copyright
16: * notice, this list of conditions and the following disclaimer in the
17: * documentation and/or other materials provided with the distribution.
18: *
19: * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20: * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21: * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22: * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23: * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24: * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25: * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26: * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27: * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28: * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29: * POSSIBILITY OF SUCH DAMAGE.
30: */
31:
32: /*
33: * Machine-dependent spin lock operations.
34: */
35:
36: #ifndef _POWERPC_LOCK_H_
1.3 tsubai 37: #define _POWERPC_LOCK_H_
38:
1.10 skrll 39: static __inline int
1.12.44.1! jdolecek 40: __SIMPLELOCK_LOCKED_P(const __cpu_simple_lock_t *__ptr)
1.10 skrll 41: {
42: return *__ptr == __SIMPLELOCK_LOCKED;
43: }
44:
45: static __inline int
1.12.44.1! jdolecek 46: __SIMPLELOCK_UNLOCKED_P(const __cpu_simple_lock_t *__ptr)
1.10 skrll 47: {
48: return *__ptr == __SIMPLELOCK_UNLOCKED;
49: }
50:
51: static __inline void
52: __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
53: {
54: *__ptr = __SIMPLELOCK_UNLOCKED;
55: }
56:
57: static __inline void
58: __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
59: {
60: *__ptr = __SIMPLELOCK_LOCKED;
61: }
62:
1.8 perry 63: static __inline void
1.3 tsubai 64: __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
65: {
66: *alp = __SIMPLELOCK_UNLOCKED;
1.7 perry 67: __asm volatile ("sync");
1.3 tsubai 68: }
69:
1.8 perry 70: static __inline void
1.3 tsubai 71: __cpu_simple_lock(__cpu_simple_lock_t *alp)
72: {
73: int old;
74:
1.7 perry 75: __asm volatile (" \
1.3 tsubai 76: \n\
77: 1: lwarx %0,0,%1 \n\
78: cmpwi %0,%2 \n\
79: beq+ 3f \n\
1.4 tsubai 80: 2: lwzx %0,0,%1 \n\
1.3 tsubai 81: cmpwi %0,%2 \n\
82: beq+ 1b \n\
83: b 2b \n\
84: 3: stwcx. %3,0,%1 \n\
85: bne- 1b \n\
86: isync \n\
87: \n"
88: : "=&r"(old)
89: : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED)
90: : "memory");
91: }
92:
1.8 perry 93: static __inline int
1.3 tsubai 94: __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
95: {
1.4 tsubai 96: int old, dummy;
1.3 tsubai 97:
1.7 perry 98: __asm volatile (" \
1.3 tsubai 99: \n\
100: 1: lwarx %0,0,%1 \n\
101: cmpwi %0,%2 \n\
102: bne 2f \n\
103: stwcx. %3,0,%1 \n\
104: bne- 1b \n\
1.4 tsubai 105: 2: stwcx. %3,0,%4 \n\
1.3 tsubai 106: isync \n\
1.4 tsubai 107: \n"
1.3 tsubai 108: : "=&r"(old)
1.4 tsubai 109: : "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED),
110: "r"(&dummy)
1.3 tsubai 111: : "memory");
112:
113: return (old == __SIMPLELOCK_UNLOCKED);
114: }
115:
1.8 perry 116: static __inline void
1.3 tsubai 117: __cpu_simple_unlock(__cpu_simple_lock_t *alp)
118: {
1.7 perry 119: __asm volatile ("sync");
1.3 tsubai 120: *alp = __SIMPLELOCK_UNLOCKED;
121: }
1.1 thorpej 122:
1.9 ad 123: static __inline void
124: mb_read(void)
125: {
126: __asm volatile ("isync" ::: "memory");
127: }
128:
129: static __inline void
130: mb_write(void)
131: {
132: __asm volatile ("sync" ::: "memory");
133: }
134:
135: static __inline void
136: mb_memory(void)
137: {
138: __asm volatile ("sync" ::: "memory");
139: }
140:
1.1 thorpej 141: #endif /* _POWERPC_LOCK_H_ */
CVSweb <webmaster@jp.NetBSD.org>