1 /*
2 +----------------------------------------------------------------------+
3 | Zend OPcache |
4 +----------------------------------------------------------------------+
5 | Copyright (c) The PHP Group |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | https://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: Andi Gutmans <andi@php.net> |
16 | Zeev Suraski <zeev@php.net> |
17 | Stanislav Malyshev <stas@zend.com> |
18 | Dmitry Stogov <dmitry@php.net> |
19 +----------------------------------------------------------------------+
20 */
21
22 /* pass 3: (Jump optimization)
23 * - optimize series of JMPs
24 */
25
26 #include "Optimizer/zend_optimizer.h"
27 #include "Optimizer/zend_optimizer_internal.h"
28 #include "zend_API.h"
29 #include "zend_constants.h"
30 #include "zend_execute.h"
31 #include "zend_vm.h"
32
33 /* we use "jmp_hitlist" to avoid infinity loops during jmp optimization */
in_hitlist(zend_op * target,zend_op ** jmp_hitlist,int jmp_hitlist_count)34 static zend_always_inline bool in_hitlist(zend_op *target, zend_op **jmp_hitlist, int jmp_hitlist_count)
35 {
36 int i;
37
38 for (i = 0; i < jmp_hitlist_count; i++) {
39 if (jmp_hitlist[i] == target) {
40 return 1;
41 }
42 }
43 return 0;
44 }
45
46 #define CHECK_LOOP(target) \
47 if (EXPECTED(!in_hitlist(target, jmp_hitlist, jmp_hitlist_count))) { \
48 jmp_hitlist[jmp_hitlist_count++] = target; \
49 } else { \
50 break; \
51 }
52
zend_optimizer_pass3(zend_op_array * op_array,zend_optimizer_ctx * ctx)53 void zend_optimizer_pass3(zend_op_array *op_array, zend_optimizer_ctx *ctx)
54 {
55 zend_op *opline;
56 zend_op *end;
57 zend_op *target;
58 zend_op **jmp_hitlist;
59 int jmp_hitlist_count;
60 ALLOCA_FLAG(use_heap);
61
62 jmp_hitlist = (zend_op**)do_alloca(sizeof(zend_op*)*op_array->last, use_heap);
63 opline = op_array->opcodes;
64 end = opline + op_array->last;
65
66 while (opline < end) {
67
68 switch (opline->opcode) {
69 case ZEND_JMP:
70 jmp_hitlist_count = 0;
71
72 target = ZEND_OP1_JMP_ADDR(opline);
73 while (1) {
74 if (target->opcode == ZEND_JMP) {
75 /* convert JMP L1 ... L1: JMP L2 to JMP L2 .. L1: JMP L2 */
76 target = ZEND_OP1_JMP_ADDR(target);
77 CHECK_LOOP(target);
78 } else if (target->opcode == ZEND_NOP) {
79 target = target + 1;
80 } else {
81 break;
82 }
83 ZEND_SET_OP_JMP_ADDR(opline, opline->op1, target);
84 }
85
86 if (target == opline + 1) {
87 /* convert L: JMP L+1 to NOP */
88 MAKE_NOP(opline);
89 } else if ((target->opcode == ZEND_RETURN ||
90 target->opcode == ZEND_RETURN_BY_REF ||
91 target->opcode == ZEND_GENERATOR_RETURN ||
92 target->opcode == ZEND_EXIT) &&
93 !(op_array->fn_flags & ZEND_ACC_HAS_FINALLY_BLOCK)) {
94 /* JMP L, L: RETURN to immediate RETURN */
95 *opline = *target;
96 if (opline->op1_type == IS_CONST) {
97 zval zv;
98 ZVAL_COPY(&zv, &ZEND_OP1_LITERAL(opline));
99 opline->op1.constant = zend_optimizer_add_literal(op_array, &zv);
100 }
101 } else if (opline > op_array->opcodes &&
102 ((opline-1)->opcode == ZEND_JMPZ ||
103 (opline-1)->opcode == ZEND_JMPNZ)) {
104 if (ZEND_OP2_JMP_ADDR(opline-1) == target) {
105 /* JMPZ(X,L1), JMP(L1) -> NOP, JMP(L1) */
106 zend_optimizer_convert_to_free_op1(op_array, opline - 1);
107 }
108 }
109 break;
110
111 case ZEND_JMP_SET:
112 case ZEND_COALESCE:
113 jmp_hitlist_count = 0;
114
115 target = ZEND_OP2_JMP_ADDR(opline);
116 while (1) {
117 if (target->opcode == ZEND_JMP) {
118 target = ZEND_OP1_JMP_ADDR(target);
119 CHECK_LOOP(target);
120 } else if (target->opcode == ZEND_NOP) {
121 target = target + 1;
122 } else {
123 break;
124 }
125 ZEND_SET_OP_JMP_ADDR(opline, opline->op2, target);
126 }
127 break;
128
129 case ZEND_JMPZ:
130 case ZEND_JMPNZ:
131 jmp_hitlist_count = 0;
132
133 target = ZEND_OP2_JMP_ADDR(opline);
134 while (1) {
135 if (target->opcode == ZEND_JMP) {
136 /* plain JMP */
137 /* JMPZ(X,L1), L1: JMP(L2) => JMPZ(X,L2), L1: JMP(L2) */
138 target = ZEND_OP1_JMP_ADDR(target);
139 CHECK_LOOP(target);
140 } else if (target->opcode == opline->opcode &&
141 SAME_VAR(opline->op1, target->op1)) {
142 /* same opcode and same var as this opcode */
143 /* JMPZ(X,L1), L1: JMPZ(X,L2) => JMPZ(X,L2), L1: JMPZ(X,L2) */
144 target = ZEND_OP2_JMP_ADDR(target);
145 CHECK_LOOP(target);
146 } else if (target->opcode == INV_COND(opline->opcode) &&
147 SAME_VAR(opline->op1, target->op1)) {
148 /* convert JMPZ(X,L1), L1: JMPNZ(X,L2) to
149 JMPZ(X,L1+1) */
150 target = target + 1;
151 } else if (target->opcode == ZEND_NOP) {
152 target = target + 1;
153 } else {
154 break;
155 }
156 ZEND_SET_OP_JMP_ADDR(opline, opline->op2, target);
157 }
158
159 /* convert L: JMPZ L+1 to NOP */
160 if (target == opline + 1) {
161 zend_optimizer_convert_to_free_op1(op_array, opline);
162 }
163 break;
164
165 case ZEND_JMPZ_EX:
166 case ZEND_JMPNZ_EX:
167 jmp_hitlist_count = 0;
168
169 target = ZEND_OP2_JMP_ADDR(opline);
170 while (1) {
171 if (target->opcode == ZEND_JMP) {
172 /* plain JMP */
173 /* JMPZ_EX(X,L1), L1: JMP(L2) => JMPZ_EX(X,L2), L1: JMP(L2) */
174 target = ZEND_OP1_JMP_ADDR(target);
175 CHECK_LOOP(target);
176 } else if (target->opcode == opline->opcode-3 &&
177 (SAME_VAR(target->op1, opline->result) ||
178 SAME_VAR(target->op1, opline->op1))) {
179 /* convert T=JMPZ_EX(X,L1), L1: JMPZ(T,L2) to
180 JMPZ_EX(X,L2) */
181 target = ZEND_OP2_JMP_ADDR(target);
182 CHECK_LOOP(target);
183 } else if (target->opcode == opline->opcode &&
184 target->result.var == opline->result.var &&
185 (SAME_VAR(target->op1, opline->result) ||
186 SAME_VAR(target->op1, opline->op1))) {
187 /* convert T=JMPZ_EX(X,L1), L1: T=JMPZ_EX(T,L2) to
188 JMPZ_EX(X,L2) */
189 target = ZEND_OP2_JMP_ADDR(target);
190 CHECK_LOOP(target);
191 } else if (target->opcode == INV_EX_COND(opline->opcode) &&
192 (SAME_VAR(target->op1, opline->result) ||
193 SAME_VAR(target->op1, opline->op1))) {
194 /* convert T=JMPZ_EX(X,L1), L1: JMPNZ(T,L2) to
195 JMPZ_EX(X,L1+1) */
196 target = target + 1;
197 } else if (target->opcode == INV_EX_COND_EX(opline->opcode) &&
198 target->result.var == opline->result.var &&
199 (SAME_VAR(target->op1, opline->result) ||
200 SAME_VAR(target->op1, opline->op1))) {
201 /* convert T=JMPZ_EX(X,L1), L1: T=JMPNZ_EX(T,L2) to
202 JMPZ_EX(X,L1+1) */
203 target = target + 1;
204 } else if (target->opcode == ZEND_BOOL &&
205 (SAME_VAR(target->op1, opline->result) ||
206 SAME_VAR(target->op1, opline->op1))) {
207 /* convert Y = JMPZ_EX(X,L1), L1: Z = BOOL(Y) to
208 Z = JMPZ_EX(X,L1+1) */
209
210 /* NOTE: This optimization pattern is not safe, but works, */
211 /* because result of JMPZ_EX instruction */
212 /* is not used on the following path and */
213 /* should be used once on the branch path. */
214 /* */
215 /* The pattern works well only if jumps processed in */
216 /* direct order, otherwise it breaks JMPZ_EX */
217 /* sequences too early. */
218 opline->result.var = target->result.var;
219 target = target + 1;
220 CHECK_LOOP(target);
221 } else if (target->opcode == ZEND_NOP) {
222 target = target + 1;
223 } else {
224 break;
225 }
226 ZEND_SET_OP_JMP_ADDR(opline, opline->op2, target);
227 }
228
229 /* convert L: T = JMPZ_EX X,L+1 to T = BOOL(X) */
230 if (target == opline + 1) {
231 opline->opcode = ZEND_BOOL;
232 opline->op2.num = 0;
233 }
234 break;
235 }
236 opline++;
237 }
238 free_alloca(jmp_hitlist, use_heap);
239 }
240