xref: /php-src/sapi/fpm/fpm/fpm_atomic.h (revision 81d1a1b4)
1 	/* (c) 2007,2008 Andrei Nigmatulin */
2 
3 #ifndef FPM_ATOMIC_H
4 #define FPM_ATOMIC_H 1
5 
6 #include <inttypes.h>
7 #include <sched.h>
8 
9 #ifdef HAVE_BUILTIN_ATOMIC
10 
11 /**
12  * all the cases below (as provided by upstream) define:
13  * word as atomic_int_t, and
14  * unsigned word as atomic_uint_t
15  * and only use volatile atomic_uint_t as atomic_t
16  */
17 
18 typedef volatile unsigned long atomic_t;
19 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
20 
21 #elif ( __i386__ || __i386 )
22 
23 typedef int32_t                     atomic_int_t;
24 typedef uint32_t                    atomic_uint_t;
25 typedef volatile atomic_uint_t      atomic_t;
26 
27 
atomic_fetch_add(atomic_t * value,atomic_int_t add)28 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
29 {
30 	__asm__ volatile ( "lock;" "xaddl %0, %1;" :
31 		"+r" (add) : "m" (*value) : "memory");
32 
33 	return add;
34 }
35 /* }}} */
36 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)37 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
38 {
39 	unsigned char res;
40 
41 	__asm__ volatile ( "lock;" "cmpxchgl %3, %1;" "sete %0;" :
42 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
43 
44 	return res;
45 }
46 /* }}} */
47 
48 #elif ( __amd64__ || __amd64 || __x86_64__ )
49 
50 typedef int64_t                     atomic_int_t;
51 typedef uint64_t                    atomic_uint_t;
52 typedef volatile atomic_uint_t      atomic_t;
53 
atomic_fetch_add(atomic_t * value,atomic_int_t add)54 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
55 {
56 	__asm__ volatile ( "lock;" "xaddq %0, %1;" :
57 		"+r" (add) : "m" (*value) : "memory");
58 
59 	return add;
60 }
61 /* }}} */
62 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)63 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
64 {
65 	unsigned char res;
66 
67 	__asm__ volatile ( "lock;" "cmpxchgq %3, %1;" "sete %0;" :
68 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
69 
70 	return res;
71 }
72 /* }}} */
73 
74 #if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
75 
76 #elif ( __arm__ || __arm ) /* W-Mark Kubacki */
77 
78 #if (__arch64__ || __arch64)
79 typedef int64_t                     atomic_int_t;
80 typedef uint64_t                    atomic_uint_t;
81 #else
82 typedef int32_t                     atomic_int_t;
83 typedef uint32_t                    atomic_uint_t;
84 #endif
85 
86 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
87 
88 #endif /* defined (__GNUC__) &&... */
89 
90 #elif ( __sparc__ || __sparc ) /* Marcin Ochab */
91 
92 #if (__sparcv9 || __sparcv9__)
93 
94 #if (__arch64__ || __arch64)
95 typedef uint64_t                    atomic_uint_t;
96 typedef volatile atomic_uint_t      atomic_t;
97 
atomic_cas_64(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)98 static inline atomic_uint_t atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
99 {
100 	__asm__ __volatile__("casx [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
101 
102 	return new;
103 }
104 /* }}} */
105 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)106 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
107 {
108 	return (atomic_cas_64(lock, old, set)==old);
109 }
110 /* }}} */
111 #else
112 typedef uint32_t                    atomic_uint_t;
113 typedef volatile atomic_uint_t      atomic_t;
114 
atomic_cas_32(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)115 static inline atomic_uint_t atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
116 {
117 	__asm__ __volatile__("cas [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
118 
119 	return new;
120 }
121 /* }}} */
122 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)123 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
124 {
125 	return (atomic_cas_32(lock, old, set)==old);
126 }
127 /* }}} */
128 #endif
129 
130 #else /* #if (__sparcv9 || __sparcv9__) */
131 #error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
132 #endif /* #if (__sparcv9 || __sparcv9__) */
133 
134 #else
135 
136 #error Unsupported processor. Please open a bug report (https://github.com/php/php-src/issues).
137 
138 #endif
139 
fpm_spinlock(atomic_t * lock,int try_once)140 static inline int fpm_spinlock(atomic_t *lock, int try_once) /* {{{ */
141 {
142 	if (try_once) {
143 		return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
144 	}
145 
146 	for (;;) {
147 
148 		if (atomic_cmp_set(lock, 0, 1)) {
149 			break;
150 		}
151 
152 		sched_yield();
153 	}
154 
155 	return 1;
156 }
157 /* }}} */
158 
159 #define fpm_unlock(lock) lock = 0
160 
161 #endif
162