xref: /PHP-7.1/sapi/fpm/fpm/fpm_atomic.h (revision 03f3b847)
1 	/* $Id: fpm_atomic.h,v 1.3 2008/09/18 23:34:11 anight Exp $ */
2 	/* (c) 2007,2008 Andrei Nigmatulin */
3 
4 #ifndef FPM_ATOMIC_H
5 #define FPM_ATOMIC_H 1
6 
7 #if HAVE_INTTYPES_H
8 # include <inttypes.h>
9 #else
10 # include <stdint.h>
11 #endif
12 #include <sched.h>
13 
14 #ifdef HAVE_BUILTIN_ATOMIC
15 
16 /**
17  * all the cases below (as provided by upstream) define:
18  * word as atomic_int_t, and
19  * unsigned word as atomic_uint_t
20  * and only use volatile atomic_uint_t as atomic_t
21  */
22 
23 typedef volatile unsigned long atomic_t;
24 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
25 
26 #elif ( __i386__ || __i386 )
27 
28 typedef int32_t                     atomic_int_t;
29 typedef uint32_t                    atomic_uint_t;
30 typedef volatile atomic_uint_t      atomic_t;
31 
32 
atomic_fetch_add(atomic_t * value,atomic_int_t add)33 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
34 {
35 	__asm__ volatile ( "lock;" "xaddl %0, %1;" :
36 		"+r" (add) : "m" (*value) : "memory");
37 
38 	return add;
39 }
40 /* }}} */
41 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)42 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
43 {
44 	unsigned char res;
45 
46 	__asm__ volatile ( "lock;" "cmpxchgl %3, %1;" "sete %0;" :
47 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
48 
49 	return res;
50 }
51 /* }}} */
52 
53 #elif ( __amd64__ || __amd64 || __x86_64__ )
54 
55 typedef int64_t                     atomic_int_t;
56 typedef uint64_t                    atomic_uint_t;
57 typedef volatile atomic_uint_t      atomic_t;
58 
atomic_fetch_add(atomic_t * value,atomic_int_t add)59 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
60 {
61 	__asm__ volatile ( "lock;" "xaddq %0, %1;" :
62 		"+r" (add) : "m" (*value) : "memory");
63 
64 	return add;
65 }
66 /* }}} */
67 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)68 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
69 {
70 	unsigned char res;
71 
72 	__asm__ volatile ( "lock;" "cmpxchgq %3, %1;" "sete %0;" :
73 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
74 
75 	return res;
76 }
77 /* }}} */
78 
79 #if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
80 
81 #elif ( __arm__ || __arm ) /* W-Mark Kubacki */
82 
83 #if (__arch64__ || __arch64)
84 typedef int64_t                     atomic_int_t;
85 typedef uint64_t                    atomic_uint_t;
86 #else
87 typedef int32_t                     atomic_int_t;
88 typedef uint32_t                    atomic_uint_t;
89 #endif
90 
91 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
92 
93 #endif /* defined (__GNUC__) &&... */
94 
95 #elif ( __sparc__ || __sparc ) /* Marcin Ochab */
96 
97 #if (__sparcv9 || __sparcv9__)
98 
99 #if (__arch64__ || __arch64)
100 typedef uint64_t                    atomic_uint_t;
101 typedef volatile atomic_uint_t      atomic_t;
102 
atomic_cas_64(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)103 static inline int atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
104 {
105 	__asm__ __volatile__("casx [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
106 
107 	return new;
108 }
109 /* }}} */
110 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)111 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
112 {
113 	return (atomic_cas_64(lock, old, set)==old);
114 }
115 /* }}} */
116 #else
117 typedef uint32_t                    atomic_uint_t;
118 typedef volatile atomic_uint_t      atomic_t;
119 
atomic_cas_32(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)120 static inline int atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
121 {
122 	__asm__ __volatile__("cas [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
123 
124 	return new;
125 }
126 /* }}} */
127 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)128 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
129 {
130 	return (atomic_cas_32(lock, old, set)==old);
131 }
132 /* }}} */
133 #endif
134 
135 #else /* #if (__sparcv9 || __sparcv9__) */
136 #error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
137 #endif /* #if (__sparcv9 || __sparcv9__) */
138 
139 #else
140 
141 #error Unsupported processor. Please open a bug report (bugs.php.net).
142 
143 #endif
144 
fpm_spinlock(atomic_t * lock,int try_once)145 static inline int fpm_spinlock(atomic_t *lock, int try_once) /* {{{ */
146 {
147 	if (try_once) {
148 		return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
149 	}
150 
151 	for (;;) {
152 
153 		if (atomic_cmp_set(lock, 0, 1)) {
154 			break;
155 		}
156 
157 		sched_yield();
158 	}
159 
160 	return 1;
161 }
162 /* }}} */
163 
164 #define fpm_unlock(lock) lock = 0
165 
166 #endif
167