xref: /PHP-7.4/sapi/fpm/fpm/fpm_atomic.h (revision 5ccb5fd9)
1 	/* (c) 2007,2008 Andrei Nigmatulin */
2 
3 #ifndef FPM_ATOMIC_H
4 #define FPM_ATOMIC_H 1
5 
6 #if HAVE_INTTYPES_H
7 # include <inttypes.h>
8 #else
9 # include <stdint.h>
10 #endif
11 #include <sched.h>
12 
13 #ifdef HAVE_BUILTIN_ATOMIC
14 
15 /**
16  * all the cases below (as provided by upstream) define:
17  * word as atomic_int_t, and
18  * unsigned word as atomic_uint_t
19  * and only use volatile atomic_uint_t as atomic_t
20  */
21 
22 typedef volatile unsigned long atomic_t;
23 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
24 
25 #elif ( __i386__ || __i386 )
26 
27 typedef int32_t                     atomic_int_t;
28 typedef uint32_t                    atomic_uint_t;
29 typedef volatile atomic_uint_t      atomic_t;
30 
31 
atomic_fetch_add(atomic_t * value,atomic_int_t add)32 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
33 {
34 	__asm__ volatile ( "lock;" "xaddl %0, %1;" :
35 		"+r" (add) : "m" (*value) : "memory");
36 
37 	return add;
38 }
39 /* }}} */
40 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)41 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
42 {
43 	unsigned char res;
44 
45 	__asm__ volatile ( "lock;" "cmpxchgl %3, %1;" "sete %0;" :
46 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
47 
48 	return res;
49 }
50 /* }}} */
51 
52 #elif ( __amd64__ || __amd64 || __x86_64__ )
53 
54 typedef int64_t                     atomic_int_t;
55 typedef uint64_t                    atomic_uint_t;
56 typedef volatile atomic_uint_t      atomic_t;
57 
atomic_fetch_add(atomic_t * value,atomic_int_t add)58 static inline atomic_int_t atomic_fetch_add(atomic_t *value, atomic_int_t add) /* {{{ */
59 {
60 	__asm__ volatile ( "lock;" "xaddq %0, %1;" :
61 		"+r" (add) : "m" (*value) : "memory");
62 
63 	return add;
64 }
65 /* }}} */
66 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)67 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
68 {
69 	unsigned char res;
70 
71 	__asm__ volatile ( "lock;" "cmpxchgq %3, %1;" "sete %0;" :
72 		"=a" (res) : "m" (*lock), "a" (old), "r" (set) : "memory");
73 
74 	return res;
75 }
76 /* }}} */
77 
78 #if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
79 
80 #elif ( __arm__ || __arm ) /* W-Mark Kubacki */
81 
82 #if (__arch64__ || __arch64)
83 typedef int64_t                     atomic_int_t;
84 typedef uint64_t                    atomic_uint_t;
85 #else
86 typedef int32_t                     atomic_int_t;
87 typedef uint32_t                    atomic_uint_t;
88 #endif
89 
90 #define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
91 
92 #endif /* defined (__GNUC__) &&... */
93 
94 #elif ( __sparc__ || __sparc ) /* Marcin Ochab */
95 
96 #if (__sparcv9 || __sparcv9__)
97 
98 #if (__arch64__ || __arch64)
99 typedef uint64_t                    atomic_uint_t;
100 typedef volatile atomic_uint_t      atomic_t;
101 
atomic_cas_64(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)102 static inline atomic_uint_t atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
103 {
104 	__asm__ __volatile__("casx [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
105 
106 	return new;
107 }
108 /* }}} */
109 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)110 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
111 {
112 	return (atomic_cas_64(lock, old, set)==old);
113 }
114 /* }}} */
115 #else
116 typedef uint32_t                    atomic_uint_t;
117 typedef volatile atomic_uint_t      atomic_t;
118 
atomic_cas_32(atomic_t * lock,atomic_uint_t old,atomic_uint_t new)119 static inline atomic_uint_t atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t new) /* {{{ */
120 {
121 	__asm__ __volatile__("cas [%2], %3, %0 " : "=&r"(new)  : "0"(new), "r"(lock), "r"(old): "memory");
122 
123 	return new;
124 }
125 /* }}} */
126 
atomic_cmp_set(atomic_t * lock,atomic_uint_t old,atomic_uint_t set)127 static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set) /* {{{ */
128 {
129 	return (atomic_cas_32(lock, old, set)==old);
130 }
131 /* }}} */
132 #endif
133 
134 #else /* #if (__sparcv9 || __sparcv9__) */
135 #error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
136 #endif /* #if (__sparcv9 || __sparcv9__) */
137 
138 #else
139 
140 #error Unsupported processor. Please open a bug report (bugs.php.net).
141 
142 #endif
143 
fpm_spinlock(atomic_t * lock,int try_once)144 static inline int fpm_spinlock(atomic_t *lock, int try_once) /* {{{ */
145 {
146 	if (try_once) {
147 		return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
148 	}
149 
150 	for (;;) {
151 
152 		if (atomic_cmp_set(lock, 0, 1)) {
153 			break;
154 		}
155 
156 		sched_yield();
157 	}
158 
159 	return 1;
160 }
161 /* }}} */
162 
163 #define fpm_unlock(lock) lock = 0
164 
165 #endif
166