xref: /PHP-5.5/sapi/fpm/fpm/fpm_atomic.h (revision f5b05ce1)
1 
2 	/* $Id: fpm_atomic.h,v 1.3 2008/09/18 23:34:11 anight Exp $ */
3 	/* (c) 2007,2008 Andrei Nigmatulin */
4 
5 #ifndef FPM_ATOMIC_H
6 #define FPM_ATOMIC_H 1
7 
8 #if HAVE_INTTYPES_H
9 # include <inttypes.h>
10 #else
11 # include <stdint.h>
12 #endif
13 #include <sched.h>
14 
15 #ifdef HAVE_BUILTIN_ATOMIC
16 
17 /**
18  * all the cases below (as provided by upstream) define:
19  * word as atomic_int_t, and
20  * unsigned word as atomic_uint_t
21  * and only use volatile atomic_uint_t as atomic_t
22  */
23 
24 typedef volatile unsigned long atomic_t;
25 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
26 
27 #elif ( __i386__ || __i386 )
28 
29 typedef int32_t                     atomic_int_t;
30 typedef uint32_t                    atomic_uint_t;
31 typedef volatile atomic_uint_t      atomic_t;
32 
33 
atomic_fetch_add(atomic_t * value,atomic_int_t add)34 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
35 {
36 	__asm__ volatile ( "lock;" "xaddl %0, %1;" :
37 		"+r" (add) : "m" (*value) : "memory");
38 
39 	return add;
40 }
41 /* }}} */
42 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)43 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
44 {
45 	unsigned char res;
46 
47 	__asm__ volatile ( "lock;" "cmpxchgl %3, %1;" "sete %0;" :
48 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
49 
50 	return res;
51 }
52 /* }}} */
53 
54 #elif ( __amd64__ || __amd64 || __x86_64__ )
55 
56 typedef int64_t                     atomic_int_t;
57 typedef uint64_t                    atomic_uint_t;
58 typedef volatile atomic_uint_t      atomic_t;
59 
atomic_fetch_add(atomic_t * value,atomic_int_t add)60 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
61 {
62 	__asm__ volatile ( "lock;" "xaddq %0, %1;" :
63 		"+r" (add) : "m" (*value) : "memory");
64 
65 	return add;
66 }
67 /* }}} */
68 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)69 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
70 {
71 	unsigned char res;
72 
73 	__asm__ volatile ( "lock;" "cmpxchgq %3, %1;" "sete %0;" :
74 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
75 
76 	return res;
77 }
78 /* }}} */
79 
80 #if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
81 
82 #elif ( __arm__ || __arm ) /* W-Mark Kubacki */
83 
84 #if (__arch64__ || __arch64)
85 typedef int64_t                     atomic_int_t;
86 typedef uint64_t                    atomic_uint_t;
87 #else
88 typedef int32_t                     atomic_int_t;
89 typedef uint32_t                    atomic_uint_t;
90 #endif
91 
92 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
93 
94 #endif /* defined (__GNUC__) &&... */
95 
96 #elif ( __sparc__ || __sparc ) /* Marcin Ochab */
97 
98 #if (__sparcv9 || __sparcv9__)
99 
100 #if (__arch64__ || __arch64)
101 typedef uint64_t                    atomic_uint_t;
102 typedef volatile atomic_uint_t      atomic_t;
103 
atomic_cas_64(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)104 static inline int atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
105 {
106 	__asm__ __volatile__("casx [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
107 
108 	return new;
109 }
110 /* }}} */
111 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)112 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
113 {
114 	return (atomic_cas_64(lock, old, set)==old);
115 }
116 /* }}} */
117 #else
118 typedef uint32_t                    atomic_uint_t;
119 typedef volatile atomic_uint_t      atomic_t;
120 
atomic_cas_32(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)121 static inline int atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
122 {
123 	__asm__ __volatile__("cas [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
124 
125 	return new;
126 }
127 /* }}} */
128 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)129 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
130 {
131 	return (atomic_cas_32(lock, old, set)==old);
132 }
133 /* }}} */
134 #endif
135 
136 #else /* #if (__sparcv9 || __sparcv9__) */
137 #error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
138 #endif /* #if (__sparcv9 || __sparcv9__) */
139 
140 #else
141 
142 #error Unsupported processor. Please open a bug report (bugs.php.net).
143 
144 #endif
145 
fpm_spinlock(atomic_t * lock,int try_once)146 static inline int fpm_spinlock(atomic_t *lock, int try_once) /* {{{ */
147 {
148 	if (try_once) {
149 		return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
150 	}
151 
152 	for (;;) {
153 
154 		if (atomic_cmp_set(lock, 0, 1)) {
155 			break;
156 		}
157 
158 		sched_yield();
159 	}
160 
161 	return 1;
162 }
163 /* }}} */
164 
165 #define fpm_unlock(lock) lock = 0
166 
167 #endif
168 
169