1 | /* Subroutines used by or related to instruction recognition. |
---|
2 | Copyright (C) 1987, 1988, 91-6, 1997 Free Software Foundation, Inc. |
---|
3 | |
---|
4 | This file is part of GNU CC. |
---|
5 | |
---|
6 | GNU CC is free software; you can redistribute it and/or modify |
---|
7 | it under the terms of the GNU General Public License as published by |
---|
8 | the Free Software Foundation; either version 2, or (at your option) |
---|
9 | any later version. |
---|
10 | |
---|
11 | GNU CC is distributed in the hope that it will be useful, |
---|
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
14 | GNU General Public License for more details. |
---|
15 | |
---|
16 | You should have received a copy of the GNU General Public License |
---|
17 | along with GNU CC; see the file COPYING. If not, write to |
---|
18 | the Free Software Foundation, 59 Temple Place - Suite 330, |
---|
19 | Boston, MA 02111-1307, USA. */ |
---|
20 | |
---|
21 | |
---|
22 | #include "config.h" |
---|
23 | #include <stdio.h> |
---|
24 | #include "rtl.h" |
---|
25 | #include "insn-config.h" |
---|
26 | #include "insn-attr.h" |
---|
27 | #include "insn-flags.h" |
---|
28 | #include "insn-codes.h" |
---|
29 | #include "recog.h" |
---|
30 | #include "regs.h" |
---|
31 | #include "hard-reg-set.h" |
---|
32 | #include "flags.h" |
---|
33 | #include "real.h" |
---|
34 | |
---|
35 | #ifndef STACK_PUSH_CODE |
---|
36 | #ifdef STACK_GROWS_DOWNWARD |
---|
37 | #define STACK_PUSH_CODE PRE_DEC |
---|
38 | #else |
---|
39 | #define STACK_PUSH_CODE PRE_INC |
---|
40 | #endif |
---|
41 | #endif |
---|
42 | |
---|
43 | /* Import from final.c: */ |
---|
44 | extern rtx alter_subreg (); |
---|
45 | |
---|
46 | static rtx *find_single_use_1 PROTO((rtx, rtx *)); |
---|
47 | |
---|
48 | /* Nonzero means allow operands to be volatile. |
---|
49 | This should be 0 if you are generating rtl, such as if you are calling |
---|
50 | the functions in optabs.c and expmed.c (most of the time). |
---|
51 | This should be 1 if all valid insns need to be recognized, |
---|
52 | such as in regclass.c and final.c and reload.c. |
---|
53 | |
---|
54 | init_recog and init_recog_no_volatile are responsible for setting this. */ |
---|
55 | |
---|
56 | int volatile_ok; |
---|
57 | |
---|
58 | /* On return from `constrain_operands', indicate which alternative |
---|
59 | was satisfied. */ |
---|
60 | |
---|
61 | int which_alternative; |
---|
62 | |
---|
63 | /* Nonzero after end of reload pass. |
---|
64 | Set to 1 or 0 by toplev.c. |
---|
65 | Controls the significance of (SUBREG (MEM)). */ |
---|
66 | |
---|
67 | int reload_completed; |
---|
68 | |
---|
69 | /* Initialize data used by the function `recog'. |
---|
70 | This must be called once in the compilation of a function |
---|
71 | before any insn recognition may be done in the function. */ |
---|
72 | |
---|
73 | void |
---|
74 | init_recog_no_volatile () |
---|
75 | { |
---|
76 | volatile_ok = 0; |
---|
77 | } |
---|
78 | |
---|
79 | void |
---|
80 | init_recog () |
---|
81 | { |
---|
82 | volatile_ok = 1; |
---|
83 | } |
---|
84 | |
---|
85 | /* Try recognizing the instruction INSN, |
---|
86 | and return the code number that results. |
---|
87 | Remember the code so that repeated calls do not |
---|
88 | need to spend the time for actual rerecognition. |
---|
89 | |
---|
90 | This function is the normal interface to instruction recognition. |
---|
91 | The automatically-generated function `recog' is normally called |
---|
92 | through this one. (The only exception is in combine.c.) */ |
---|
93 | |
---|
94 | int |
---|
95 | recog_memoized (insn) |
---|
96 | rtx insn; |
---|
97 | { |
---|
98 | if (INSN_CODE (insn) < 0) |
---|
99 | INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR); |
---|
100 | return INSN_CODE (insn); |
---|
101 | } |
---|
102 | |
---|
103 | /* Check that X is an insn-body for an `asm' with operands |
---|
104 | and that the operands mentioned in it are legitimate. */ |
---|
105 | |
---|
106 | int |
---|
107 | check_asm_operands (x) |
---|
108 | rtx x; |
---|
109 | { |
---|
110 | int noperands = asm_noperands (x); |
---|
111 | rtx *operands; |
---|
112 | int i; |
---|
113 | |
---|
114 | if (noperands < 0) |
---|
115 | return 0; |
---|
116 | if (noperands == 0) |
---|
117 | return 1; |
---|
118 | |
---|
119 | operands = (rtx *) alloca (noperands * sizeof (rtx)); |
---|
120 | decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR); |
---|
121 | |
---|
122 | for (i = 0; i < noperands; i++) |
---|
123 | if (!general_operand (operands[i], VOIDmode)) |
---|
124 | return 0; |
---|
125 | |
---|
126 | return 1; |
---|
127 | } |
---|
128 | |
---|
129 | /* Static data for the next two routines. |
---|
130 | |
---|
131 | The maximum number of changes supported is defined as the maximum |
---|
132 | number of operands times 5. This allows for repeated substitutions |
---|
133 | inside complex indexed address, or, alternatively, changes in up |
---|
134 | to 5 insns. */ |
---|
135 | |
---|
136 | #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5) |
---|
137 | |
---|
138 | static rtx change_objects[MAX_CHANGE_LOCS]; |
---|
139 | static int change_old_codes[MAX_CHANGE_LOCS]; |
---|
140 | static rtx *change_locs[MAX_CHANGE_LOCS]; |
---|
141 | static rtx change_olds[MAX_CHANGE_LOCS]; |
---|
142 | |
---|
143 | static int num_changes = 0; |
---|
144 | |
---|
145 | /* Validate a proposed change to OBJECT. LOC is the location in the rtl for |
---|
146 | at which NEW will be placed. If OBJECT is zero, no validation is done, |
---|
147 | the change is simply made. |
---|
148 | |
---|
149 | Two types of objects are supported: If OBJECT is a MEM, memory_address_p |
---|
150 | will be called with the address and mode as parameters. If OBJECT is |
---|
151 | an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with |
---|
152 | the change in place. |
---|
153 | |
---|
154 | IN_GROUP is non-zero if this is part of a group of changes that must be |
---|
155 | performed as a group. In that case, the changes will be stored. The |
---|
156 | function `apply_change_group' will validate and apply the changes. |
---|
157 | |
---|
158 | If IN_GROUP is zero, this is a single change. Try to recognize the insn |
---|
159 | or validate the memory reference with the change applied. If the result |
---|
160 | is not valid for the machine, suppress the change and return zero. |
---|
161 | Otherwise, perform the change and return 1. */ |
---|
162 | |
---|
163 | int |
---|
164 | validate_change (object, loc, new, in_group) |
---|
165 | rtx object; |
---|
166 | rtx *loc; |
---|
167 | rtx new; |
---|
168 | int in_group; |
---|
169 | { |
---|
170 | rtx old = *loc; |
---|
171 | |
---|
172 | if (old == new || rtx_equal_p (old, new)) |
---|
173 | return 1; |
---|
174 | |
---|
175 | if (num_changes >= MAX_CHANGE_LOCS |
---|
176 | || (in_group == 0 && num_changes != 0)) |
---|
177 | abort (); |
---|
178 | |
---|
179 | *loc = new; |
---|
180 | |
---|
181 | /* Save the information describing this change. */ |
---|
182 | change_objects[num_changes] = object; |
---|
183 | change_locs[num_changes] = loc; |
---|
184 | change_olds[num_changes] = old; |
---|
185 | |
---|
186 | if (object && GET_CODE (object) != MEM) |
---|
187 | { |
---|
188 | /* Set INSN_CODE to force rerecognition of insn. Save old code in |
---|
189 | case invalid. */ |
---|
190 | change_old_codes[num_changes] = INSN_CODE (object); |
---|
191 | INSN_CODE (object) = -1; |
---|
192 | } |
---|
193 | |
---|
194 | num_changes++; |
---|
195 | |
---|
196 | /* If we are making a group of changes, return 1. Otherwise, validate the |
---|
197 | change group we made. */ |
---|
198 | |
---|
199 | if (in_group) |
---|
200 | return 1; |
---|
201 | else |
---|
202 | return apply_change_group (); |
---|
203 | } |
---|
204 | |
---|
205 | /* Apply a group of changes previously issued with `validate_change'. |
---|
206 | Return 1 if all changes are valid, zero otherwise. */ |
---|
207 | |
---|
208 | int |
---|
209 | apply_change_group () |
---|
210 | { |
---|
211 | int i; |
---|
212 | |
---|
213 | /* The changes have been applied and all INSN_CODEs have been reset to force |
---|
214 | rerecognition. |
---|
215 | |
---|
216 | The changes are valid if we aren't given an object, or if we are |
---|
217 | given a MEM and it still is a valid address, or if this is in insn |
---|
218 | and it is recognized. In the latter case, if reload has completed, |
---|
219 | we also require that the operands meet the constraints for |
---|
220 | the insn. We do not allow modifying an ASM_OPERANDS after reload |
---|
221 | has completed because verifying the constraints is too difficult. */ |
---|
222 | |
---|
223 | for (i = 0; i < num_changes; i++) |
---|
224 | { |
---|
225 | rtx object = change_objects[i]; |
---|
226 | |
---|
227 | if (object == 0) |
---|
228 | continue; |
---|
229 | |
---|
230 | if (GET_CODE (object) == MEM) |
---|
231 | { |
---|
232 | if (! memory_address_p (GET_MODE (object), XEXP (object, 0))) |
---|
233 | break; |
---|
234 | } |
---|
235 | else if ((recog_memoized (object) < 0 |
---|
236 | && (asm_noperands (PATTERN (object)) < 0 |
---|
237 | || ! check_asm_operands (PATTERN (object)) |
---|
238 | || reload_completed)) |
---|
239 | || (reload_completed |
---|
240 | && (insn_extract (object), |
---|
241 | ! constrain_operands (INSN_CODE (object), 1)))) |
---|
242 | { |
---|
243 | rtx pat = PATTERN (object); |
---|
244 | |
---|
245 | /* Perhaps we couldn't recognize the insn because there were |
---|
246 | extra CLOBBERs at the end. If so, try to re-recognize |
---|
247 | without the last CLOBBER (later iterations will cause each of |
---|
248 | them to be eliminated, in turn). But don't do this if we |
---|
249 | have an ASM_OPERAND. */ |
---|
250 | if (GET_CODE (pat) == PARALLEL |
---|
251 | && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER |
---|
252 | && asm_noperands (PATTERN (object)) < 0) |
---|
253 | { |
---|
254 | rtx newpat; |
---|
255 | |
---|
256 | if (XVECLEN (pat, 0) == 2) |
---|
257 | newpat = XVECEXP (pat, 0, 0); |
---|
258 | else |
---|
259 | { |
---|
260 | int j; |
---|
261 | |
---|
262 | newpat = gen_rtx (PARALLEL, VOIDmode, |
---|
263 | gen_rtvec (XVECLEN (pat, 0) - 1)); |
---|
264 | for (j = 0; j < XVECLEN (newpat, 0); j++) |
---|
265 | XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j); |
---|
266 | } |
---|
267 | |
---|
268 | /* Add a new change to this group to replace the pattern |
---|
269 | with this new pattern. Then consider this change |
---|
270 | as having succeeded. The change we added will |
---|
271 | cause the entire call to fail if things remain invalid. |
---|
272 | |
---|
273 | Note that this can lose if a later change than the one |
---|
274 | we are processing specified &XVECEXP (PATTERN (object), 0, X) |
---|
275 | but this shouldn't occur. */ |
---|
276 | |
---|
277 | validate_change (object, &PATTERN (object), newpat, 1); |
---|
278 | } |
---|
279 | else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) |
---|
280 | /* If this insn is a CLOBBER or USE, it is always valid, but is |
---|
281 | never recognized. */ |
---|
282 | continue; |
---|
283 | else |
---|
284 | break; |
---|
285 | } |
---|
286 | } |
---|
287 | |
---|
288 | if (i == num_changes) |
---|
289 | { |
---|
290 | num_changes = 0; |
---|
291 | return 1; |
---|
292 | } |
---|
293 | else |
---|
294 | { |
---|
295 | cancel_changes (0); |
---|
296 | return 0; |
---|
297 | } |
---|
298 | } |
---|
299 | |
---|
300 | /* Return the number of changes so far in the current group. */ |
---|
301 | |
---|
302 | int |
---|
303 | num_validated_changes () |
---|
304 | { |
---|
305 | return num_changes; |
---|
306 | } |
---|
307 | |
---|
308 | /* Retract the changes numbered NUM and up. */ |
---|
309 | |
---|
310 | void |
---|
311 | cancel_changes (num) |
---|
312 | int num; |
---|
313 | { |
---|
314 | int i; |
---|
315 | |
---|
316 | /* Back out all the changes. Do this in the opposite order in which |
---|
317 | they were made. */ |
---|
318 | for (i = num_changes - 1; i >= num; i--) |
---|
319 | { |
---|
320 | *change_locs[i] = change_olds[i]; |
---|
321 | if (change_objects[i] && GET_CODE (change_objects[i]) != MEM) |
---|
322 | INSN_CODE (change_objects[i]) = change_old_codes[i]; |
---|
323 | } |
---|
324 | num_changes = num; |
---|
325 | } |
---|
326 | |
---|
327 | /* Replace every occurrence of FROM in X with TO. Mark each change with |
---|
328 | validate_change passing OBJECT. */ |
---|
329 | |
---|
330 | static void |
---|
331 | validate_replace_rtx_1 (loc, from, to, object) |
---|
332 | rtx *loc; |
---|
333 | rtx from, to, object; |
---|
334 | { |
---|
335 | register int i, j; |
---|
336 | register char *fmt; |
---|
337 | register rtx x = *loc; |
---|
338 | enum rtx_code code = GET_CODE (x); |
---|
339 | |
---|
340 | /* X matches FROM if it is the same rtx or they are both referring to the |
---|
341 | same register in the same mode. Avoid calling rtx_equal_p unless the |
---|
342 | operands look similar. */ |
---|
343 | |
---|
344 | if (x == from |
---|
345 | || (GET_CODE (x) == REG && GET_CODE (from) == REG |
---|
346 | && GET_MODE (x) == GET_MODE (from) |
---|
347 | && REGNO (x) == REGNO (from)) |
---|
348 | || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from) |
---|
349 | && rtx_equal_p (x, from))) |
---|
350 | { |
---|
351 | validate_change (object, loc, to, 1); |
---|
352 | return; |
---|
353 | } |
---|
354 | |
---|
355 | /* For commutative or comparison operations, try replacing each argument |
---|
356 | separately and seeing if we made any changes. If so, put a constant |
---|
357 | argument last.*/ |
---|
358 | if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c') |
---|
359 | { |
---|
360 | int prev_changes = num_changes; |
---|
361 | |
---|
362 | validate_replace_rtx_1 (&XEXP (x, 0), from, to, object); |
---|
363 | validate_replace_rtx_1 (&XEXP (x, 1), from, to, object); |
---|
364 | if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0))) |
---|
365 | { |
---|
366 | validate_change (object, loc, |
---|
367 | gen_rtx (GET_RTX_CLASS (code) == 'c' ? code |
---|
368 | : swap_condition (code), |
---|
369 | GET_MODE (x), XEXP (x, 1), XEXP (x, 0)), |
---|
370 | 1); |
---|
371 | x = *loc; |
---|
372 | code = GET_CODE (x); |
---|
373 | } |
---|
374 | } |
---|
375 | |
---|
376 | /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already |
---|
377 | done the substitution, otherwise we won't. */ |
---|
378 | |
---|
379 | switch (code) |
---|
380 | { |
---|
381 | case PLUS: |
---|
382 | /* If we have have a PLUS whose second operand is now a CONST_INT, use |
---|
383 | plus_constant to try to simplify it. */ |
---|
384 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to) |
---|
385 | validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)), |
---|
386 | 1); |
---|
387 | return; |
---|
388 | |
---|
389 | case MINUS: |
---|
390 | if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from) |
---|
391 | { |
---|
392 | validate_change (object, loc, |
---|
393 | plus_constant (XEXP (x, 0), - INTVAL (to)), |
---|
394 | 1); |
---|
395 | return; |
---|
396 | } |
---|
397 | break; |
---|
398 | |
---|
399 | case ZERO_EXTEND: |
---|
400 | case SIGN_EXTEND: |
---|
401 | /* In these cases, the operation to be performed depends on the mode |
---|
402 | of the operand. If we are replacing the operand with a VOIDmode |
---|
403 | constant, we lose the information. So try to simplify the operation |
---|
404 | in that case. If it fails, substitute in something that we know |
---|
405 | won't be recognized. */ |
---|
406 | if (GET_MODE (to) == VOIDmode |
---|
407 | && (XEXP (x, 0) == from |
---|
408 | || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG |
---|
409 | && GET_MODE (XEXP (x, 0)) == GET_MODE (from) |
---|
410 | && REGNO (XEXP (x, 0)) == REGNO (from)))) |
---|
411 | { |
---|
412 | rtx new = simplify_unary_operation (code, GET_MODE (x), to, |
---|
413 | GET_MODE (from)); |
---|
414 | if (new == 0) |
---|
415 | new = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx); |
---|
416 | |
---|
417 | validate_change (object, loc, new, 1); |
---|
418 | return; |
---|
419 | } |
---|
420 | break; |
---|
421 | |
---|
422 | case SUBREG: |
---|
423 | /* If we have a SUBREG of a register that we are replacing and we are |
---|
424 | replacing it with a MEM, make a new MEM and try replacing the |
---|
425 | SUBREG with it. Don't do this if the MEM has a mode-dependent address |
---|
426 | or if we would be widening it. */ |
---|
427 | |
---|
428 | if (SUBREG_REG (x) == from |
---|
429 | && GET_CODE (from) == REG |
---|
430 | && GET_CODE (to) == MEM |
---|
431 | && ! mode_dependent_address_p (XEXP (to, 0)) |
---|
432 | && ! MEM_VOLATILE_P (to) |
---|
433 | && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to))) |
---|
434 | { |
---|
435 | int offset = SUBREG_WORD (x) * UNITS_PER_WORD; |
---|
436 | enum machine_mode mode = GET_MODE (x); |
---|
437 | rtx new; |
---|
438 | |
---|
439 | if (BYTES_BIG_ENDIAN) |
---|
440 | offset += (MIN (UNITS_PER_WORD, |
---|
441 | GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) |
---|
442 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); |
---|
443 | |
---|
444 | new = gen_rtx (MEM, mode, plus_constant (XEXP (to, 0), offset)); |
---|
445 | MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to); |
---|
446 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to); |
---|
447 | MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to); |
---|
448 | validate_change (object, loc, new, 1); |
---|
449 | return; |
---|
450 | } |
---|
451 | break; |
---|
452 | |
---|
453 | case ZERO_EXTRACT: |
---|
454 | case SIGN_EXTRACT: |
---|
455 | /* If we are replacing a register with memory, try to change the memory |
---|
456 | to be the mode required for memory in extract operations (this isn't |
---|
457 | likely to be an insertion operation; if it was, nothing bad will |
---|
458 | happen, we might just fail in some cases). */ |
---|
459 | |
---|
460 | if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM |
---|
461 | && GET_CODE (XEXP (x, 1)) == CONST_INT |
---|
462 | && GET_CODE (XEXP (x, 2)) == CONST_INT |
---|
463 | && ! mode_dependent_address_p (XEXP (to, 0)) |
---|
464 | && ! MEM_VOLATILE_P (to)) |
---|
465 | { |
---|
466 | enum machine_mode wanted_mode = VOIDmode; |
---|
467 | enum machine_mode is_mode = GET_MODE (to); |
---|
468 | int width = INTVAL (XEXP (x, 1)); |
---|
469 | int pos = INTVAL (XEXP (x, 2)); |
---|
470 | |
---|
471 | #ifdef HAVE_extzv |
---|
472 | if (code == ZERO_EXTRACT) |
---|
473 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; |
---|
474 | #endif |
---|
475 | #ifdef HAVE_extv |
---|
476 | if (code == SIGN_EXTRACT) |
---|
477 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; |
---|
478 | #endif |
---|
479 | |
---|
480 | /* If we have a narrower mode, we can do something. */ |
---|
481 | if (wanted_mode != VOIDmode |
---|
482 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
---|
483 | { |
---|
484 | int offset = pos / BITS_PER_UNIT; |
---|
485 | rtx newmem; |
---|
486 | |
---|
487 | /* If the bytes and bits are counted differently, we |
---|
488 | must adjust the offset. */ |
---|
489 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
---|
490 | offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) |
---|
491 | - offset); |
---|
492 | |
---|
493 | pos %= GET_MODE_BITSIZE (wanted_mode); |
---|
494 | |
---|
495 | newmem = gen_rtx (MEM, wanted_mode, |
---|
496 | plus_constant (XEXP (to, 0), offset)); |
---|
497 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to); |
---|
498 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to); |
---|
499 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to); |
---|
500 | |
---|
501 | validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1); |
---|
502 | validate_change (object, &XEXP (x, 0), newmem, 1); |
---|
503 | } |
---|
504 | } |
---|
505 | |
---|
506 | break; |
---|
507 | |
---|
508 | default: |
---|
509 | break; |
---|
510 | } |
---|
511 | |
---|
512 | fmt = GET_RTX_FORMAT (code); |
---|
513 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
---|
514 | { |
---|
515 | if (fmt[i] == 'e') |
---|
516 | validate_replace_rtx_1 (&XEXP (x, i), from, to, object); |
---|
517 | else if (fmt[i] == 'E') |
---|
518 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
---|
519 | validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object); |
---|
520 | } |
---|
521 | } |
---|
522 | |
---|
523 | /* Try replacing every occurrence of FROM in INSN with TO. After all |
---|
524 | changes have been made, validate by seeing if INSN is still valid. */ |
---|
525 | |
---|
526 | int |
---|
527 | validate_replace_rtx (from, to, insn) |
---|
528 | rtx from, to, insn; |
---|
529 | { |
---|
530 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn); |
---|
531 | return apply_change_group (); |
---|
532 | } |
---|
533 | |
---|
534 | #ifdef HAVE_cc0 |
---|
535 | /* Return 1 if the insn using CC0 set by INSN does not contain |
---|
536 | any ordered tests applied to the condition codes. |
---|
537 | EQ and NE tests do not count. */ |
---|
538 | |
---|
539 | int |
---|
540 | next_insn_tests_no_inequality (insn) |
---|
541 | rtx insn; |
---|
542 | { |
---|
543 | register rtx next = next_cc0_user (insn); |
---|
544 | |
---|
545 | /* If there is no next insn, we have to take the conservative choice. */ |
---|
546 | if (next == 0) |
---|
547 | return 0; |
---|
548 | |
---|
549 | return ((GET_CODE (next) == JUMP_INSN |
---|
550 | || GET_CODE (next) == INSN |
---|
551 | || GET_CODE (next) == CALL_INSN) |
---|
552 | && ! inequality_comparisons_p (PATTERN (next))); |
---|
553 | } |
---|
554 | |
---|
555 | #if 0 /* This is useless since the insn that sets the cc's |
---|
556 | must be followed immediately by the use of them. */ |
---|
557 | /* Return 1 if the CC value set up by INSN is not used. */ |
---|
558 | |
---|
559 | int |
---|
560 | next_insns_test_no_inequality (insn) |
---|
561 | rtx insn; |
---|
562 | { |
---|
563 | register rtx next = NEXT_INSN (insn); |
---|
564 | |
---|
565 | for (; next != 0; next = NEXT_INSN (next)) |
---|
566 | { |
---|
567 | if (GET_CODE (next) == CODE_LABEL |
---|
568 | || GET_CODE (next) == BARRIER) |
---|
569 | return 1; |
---|
570 | if (GET_CODE (next) == NOTE) |
---|
571 | continue; |
---|
572 | if (inequality_comparisons_p (PATTERN (next))) |
---|
573 | return 0; |
---|
574 | if (sets_cc0_p (PATTERN (next)) == 1) |
---|
575 | return 1; |
---|
576 | if (! reg_mentioned_p (cc0_rtx, PATTERN (next))) |
---|
577 | return 1; |
---|
578 | } |
---|
579 | return 1; |
---|
580 | } |
---|
581 | #endif |
---|
582 | #endif |
---|
583 | |
---|
584 | /* This is used by find_single_use to locate an rtx that contains exactly one |
---|
585 | use of DEST, which is typically either a REG or CC0. It returns a |
---|
586 | pointer to the innermost rtx expression containing DEST. Appearances of |
---|
587 | DEST that are being used to totally replace it are not counted. */ |
---|
588 | |
---|
589 | static rtx * |
---|
590 | find_single_use_1 (dest, loc) |
---|
591 | rtx dest; |
---|
592 | rtx *loc; |
---|
593 | { |
---|
594 | rtx x = *loc; |
---|
595 | enum rtx_code code = GET_CODE (x); |
---|
596 | rtx *result = 0; |
---|
597 | rtx *this_result; |
---|
598 | int i; |
---|
599 | char *fmt; |
---|
600 | |
---|
601 | switch (code) |
---|
602 | { |
---|
603 | case CONST_INT: |
---|
604 | case CONST: |
---|
605 | case LABEL_REF: |
---|
606 | case SYMBOL_REF: |
---|
607 | case CONST_DOUBLE: |
---|
608 | case CLOBBER: |
---|
609 | return 0; |
---|
610 | |
---|
611 | case SET: |
---|
612 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG |
---|
613 | of a REG that occupies all of the REG, the insn uses DEST if |
---|
614 | it is mentioned in the destination or the source. Otherwise, we |
---|
615 | need just check the source. */ |
---|
616 | if (GET_CODE (SET_DEST (x)) != CC0 |
---|
617 | && GET_CODE (SET_DEST (x)) != PC |
---|
618 | && GET_CODE (SET_DEST (x)) != REG |
---|
619 | && ! (GET_CODE (SET_DEST (x)) == SUBREG |
---|
620 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG |
---|
621 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x)))) |
---|
622 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) |
---|
623 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x))) |
---|
624 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))) |
---|
625 | break; |
---|
626 | |
---|
627 | return find_single_use_1 (dest, &SET_SRC (x)); |
---|
628 | |
---|
629 | case MEM: |
---|
630 | case SUBREG: |
---|
631 | return find_single_use_1 (dest, &XEXP (x, 0)); |
---|
632 | |
---|
633 | default: |
---|
634 | break; |
---|
635 | } |
---|
636 | |
---|
637 | /* If it wasn't one of the common cases above, check each expression and |
---|
638 | vector of this code. Look for a unique usage of DEST. */ |
---|
639 | |
---|
640 | fmt = GET_RTX_FORMAT (code); |
---|
641 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
---|
642 | { |
---|
643 | if (fmt[i] == 'e') |
---|
644 | { |
---|
645 | if (dest == XEXP (x, i) |
---|
646 | || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG |
---|
647 | && REGNO (dest) == REGNO (XEXP (x, i)))) |
---|
648 | this_result = loc; |
---|
649 | else |
---|
650 | this_result = find_single_use_1 (dest, &XEXP (x, i)); |
---|
651 | |
---|
652 | if (result == 0) |
---|
653 | result = this_result; |
---|
654 | else if (this_result) |
---|
655 | /* Duplicate usage. */ |
---|
656 | return 0; |
---|
657 | } |
---|
658 | else if (fmt[i] == 'E') |
---|
659 | { |
---|
660 | int j; |
---|
661 | |
---|
662 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
---|
663 | { |
---|
664 | if (XVECEXP (x, i, j) == dest |
---|
665 | || (GET_CODE (dest) == REG |
---|
666 | && GET_CODE (XVECEXP (x, i, j)) == REG |
---|
667 | && REGNO (XVECEXP (x, i, j)) == REGNO (dest))) |
---|
668 | this_result = loc; |
---|
669 | else |
---|
670 | this_result = find_single_use_1 (dest, &XVECEXP (x, i, j)); |
---|
671 | |
---|
672 | if (result == 0) |
---|
673 | result = this_result; |
---|
674 | else if (this_result) |
---|
675 | return 0; |
---|
676 | } |
---|
677 | } |
---|
678 | } |
---|
679 | |
---|
680 | return result; |
---|
681 | } |
---|
682 | |
---|
683 | /* See if DEST, produced in INSN, is used only a single time in the |
---|
684 | sequel. If so, return a pointer to the innermost rtx expression in which |
---|
685 | it is used. |
---|
686 | |
---|
687 | If PLOC is non-zero, *PLOC is set to the insn containing the single use. |
---|
688 | |
---|
689 | This routine will return usually zero either before flow is called (because |
---|
690 | there will be no LOG_LINKS notes) or after reload (because the REG_DEAD |
---|
691 | note can't be trusted). |
---|
692 | |
---|
693 | If DEST is cc0_rtx, we look only at the next insn. In that case, we don't |
---|
694 | care about REG_DEAD notes or LOG_LINKS. |
---|
695 | |
---|
696 | Otherwise, we find the single use by finding an insn that has a |
---|
697 | LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is |
---|
698 | only referenced once in that insn, we know that it must be the first |
---|
699 | and last insn referencing DEST. */ |
---|
700 | |
---|
701 | rtx * |
---|
702 | find_single_use (dest, insn, ploc) |
---|
703 | rtx dest; |
---|
704 | rtx insn; |
---|
705 | rtx *ploc; |
---|
706 | { |
---|
707 | rtx next; |
---|
708 | rtx *result; |
---|
709 | rtx link; |
---|
710 | |
---|
711 | #ifdef HAVE_cc0 |
---|
712 | if (dest == cc0_rtx) |
---|
713 | { |
---|
714 | next = NEXT_INSN (insn); |
---|
715 | if (next == 0 |
---|
716 | || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN)) |
---|
717 | return 0; |
---|
718 | |
---|
719 | result = find_single_use_1 (dest, &PATTERN (next)); |
---|
720 | if (result && ploc) |
---|
721 | *ploc = next; |
---|
722 | return result; |
---|
723 | } |
---|
724 | #endif |
---|
725 | |
---|
726 | if (reload_completed || reload_in_progress || GET_CODE (dest) != REG) |
---|
727 | return 0; |
---|
728 | |
---|
729 | for (next = next_nonnote_insn (insn); |
---|
730 | next != 0 && GET_CODE (next) != CODE_LABEL; |
---|
731 | next = next_nonnote_insn (next)) |
---|
732 | if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest)) |
---|
733 | { |
---|
734 | for (link = LOG_LINKS (next); link; link = XEXP (link, 1)) |
---|
735 | if (XEXP (link, 0) == insn) |
---|
736 | break; |
---|
737 | |
---|
738 | if (link) |
---|
739 | { |
---|
740 | result = find_single_use_1 (dest, &PATTERN (next)); |
---|
741 | if (ploc) |
---|
742 | *ploc = next; |
---|
743 | return result; |
---|
744 | } |
---|
745 | } |
---|
746 | |
---|
747 | return 0; |
---|
748 | } |
---|
749 | |
---|
750 | /* Return 1 if OP is a valid general operand for machine mode MODE. |
---|
751 | This is either a register reference, a memory reference, |
---|
752 | or a constant. In the case of a memory reference, the address |
---|
753 | is checked for general validity for the target machine. |
---|
754 | |
---|
755 | Register and memory references must have mode MODE in order to be valid, |
---|
756 | but some constants have no machine mode and are valid for any mode. |
---|
757 | |
---|
758 | If MODE is VOIDmode, OP is checked for validity for whatever mode |
---|
759 | it has. |
---|
760 | |
---|
761 | The main use of this function is as a predicate in match_operand |
---|
762 | expressions in the machine description. |
---|
763 | |
---|
764 | For an explanation of this function's behavior for registers of |
---|
765 | class NO_REGS, see the comment for `register_operand'. */ |
---|
766 | |
---|
767 | int |
---|
768 | general_operand (op, mode) |
---|
769 | register rtx op; |
---|
770 | enum machine_mode mode; |
---|
771 | { |
---|
772 | register enum rtx_code code = GET_CODE (op); |
---|
773 | int mode_altering_drug = 0; |
---|
774 | |
---|
775 | if (mode == VOIDmode) |
---|
776 | mode = GET_MODE (op); |
---|
777 | |
---|
778 | /* Don't accept CONST_INT or anything similar |
---|
779 | if the caller wants something floating. */ |
---|
780 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode |
---|
781 | && GET_MODE_CLASS (mode) != MODE_INT |
---|
782 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
---|
783 | return 0; |
---|
784 | |
---|
785 | if (CONSTANT_P (op)) |
---|
786 | return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode) |
---|
787 | #ifdef LEGITIMATE_PIC_OPERAND_P |
---|
788 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
---|
789 | #endif |
---|
790 | && LEGITIMATE_CONSTANT_P (op)); |
---|
791 | |
---|
792 | /* Except for certain constants with VOIDmode, already checked for, |
---|
793 | OP's mode must match MODE if MODE specifies a mode. */ |
---|
794 | |
---|
795 | if (GET_MODE (op) != mode) |
---|
796 | return 0; |
---|
797 | |
---|
798 | if (code == SUBREG) |
---|
799 | { |
---|
800 | #ifdef INSN_SCHEDULING |
---|
801 | /* On machines that have insn scheduling, we want all memory |
---|
802 | reference to be explicit, so outlaw paradoxical SUBREGs. */ |
---|
803 | if (GET_CODE (SUBREG_REG (op)) == MEM |
---|
804 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))) |
---|
805 | return 0; |
---|
806 | #endif |
---|
807 | |
---|
808 | op = SUBREG_REG (op); |
---|
809 | code = GET_CODE (op); |
---|
810 | #if 0 |
---|
811 | /* No longer needed, since (SUBREG (MEM...)) |
---|
812 | will load the MEM into a reload reg in the MEM's own mode. */ |
---|
813 | mode_altering_drug = 1; |
---|
814 | #endif |
---|
815 | } |
---|
816 | |
---|
817 | if (code == REG) |
---|
818 | /* A register whose class is NO_REGS is not a general operand. */ |
---|
819 | return (REGNO (op) >= FIRST_PSEUDO_REGISTER |
---|
820 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS); |
---|
821 | |
---|
822 | if (code == MEM) |
---|
823 | { |
---|
824 | register rtx y = XEXP (op, 0); |
---|
825 | if (! volatile_ok && MEM_VOLATILE_P (op)) |
---|
826 | return 0; |
---|
827 | if (GET_CODE (y) == ADDRESSOF) |
---|
828 | return 1; |
---|
829 | /* Use the mem's mode, since it will be reloaded thus. */ |
---|
830 | mode = GET_MODE (op); |
---|
831 | GO_IF_LEGITIMATE_ADDRESS (mode, y, win); |
---|
832 | } |
---|
833 | |
---|
834 | /* Pretend this is an operand for now; we'll run force_operand |
---|
835 | on its replacement in fixup_var_refs_1. */ |
---|
836 | if (code == ADDRESSOF) |
---|
837 | return 1; |
---|
838 | |
---|
839 | return 0; |
---|
840 | |
---|
841 | win: |
---|
842 | if (mode_altering_drug) |
---|
843 | return ! mode_dependent_address_p (XEXP (op, 0)); |
---|
844 | return 1; |
---|
845 | } |
---|
846 | |
---|
847 | /* Return 1 if OP is a valid memory address for a memory reference |
---|
848 | of mode MODE. |
---|
849 | |
---|
850 | The main use of this function is as a predicate in match_operand |
---|
851 | expressions in the machine description. */ |
---|
852 | |
---|
853 | int |
---|
854 | address_operand (op, mode) |
---|
855 | register rtx op; |
---|
856 | enum machine_mode mode; |
---|
857 | { |
---|
858 | return memory_address_p (mode, op); |
---|
859 | } |
---|
860 | |
---|
861 | /* Return 1 if OP is a register reference of mode MODE. |
---|
862 | If MODE is VOIDmode, accept a register in any mode. |
---|
863 | |
---|
864 | The main use of this function is as a predicate in match_operand |
---|
865 | expressions in the machine description. |
---|
866 | |
---|
867 | As a special exception, registers whose class is NO_REGS are |
---|
868 | not accepted by `register_operand'. The reason for this change |
---|
869 | is to allow the representation of special architecture artifacts |
---|
870 | (such as a condition code register) without extending the rtl |
---|
871 | definitions. Since registers of class NO_REGS cannot be used |
---|
872 | as registers in any case where register classes are examined, |
---|
873 | it is most consistent to keep this function from accepting them. */ |
---|
874 | |
---|
875 | int |
---|
876 | register_operand (op, mode) |
---|
877 | register rtx op; |
---|
878 | enum machine_mode mode; |
---|
879 | { |
---|
880 | if (GET_MODE (op) != mode && mode != VOIDmode) |
---|
881 | return 0; |
---|
882 | |
---|
883 | if (GET_CODE (op) == SUBREG) |
---|
884 | { |
---|
885 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand |
---|
886 | because it is guaranteed to be reloaded into one. |
---|
887 | Just make sure the MEM is valid in itself. |
---|
888 | (Ideally, (SUBREG (MEM)...) should not exist after reload, |
---|
889 | but currently it does result from (SUBREG (REG)...) where the |
---|
890 | reg went on the stack.) */ |
---|
891 | if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM) |
---|
892 | return general_operand (op, mode); |
---|
893 | |
---|
894 | #ifdef CLASS_CANNOT_CHANGE_SIZE |
---|
895 | if (GET_CODE (SUBREG_REG (op)) == REG |
---|
896 | && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER |
---|
897 | && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE], |
---|
898 | REGNO (SUBREG_REG (op))) |
---|
899 | && (GET_MODE_SIZE (mode) |
---|
900 | != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))) |
---|
901 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT |
---|
902 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT) |
---|
903 | return 0; |
---|
904 | #endif |
---|
905 | |
---|
906 | op = SUBREG_REG (op); |
---|
907 | } |
---|
908 | |
---|
909 | /* We don't consider registers whose class is NO_REGS |
---|
910 | to be a register operand. */ |
---|
911 | return (GET_CODE (op) == REG |
---|
912 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
---|
913 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS)); |
---|
914 | } |
---|
915 | |
---|
916 | /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH |
---|
917 | or a hard register. */ |
---|
918 | |
---|
919 | int |
---|
920 | scratch_operand (op, mode) |
---|
921 | register rtx op; |
---|
922 | enum machine_mode mode; |
---|
923 | { |
---|
924 | return (GET_MODE (op) == mode |
---|
925 | && (GET_CODE (op) == SCRATCH |
---|
926 | || (GET_CODE (op) == REG |
---|
927 | && REGNO (op) < FIRST_PSEUDO_REGISTER))); |
---|
928 | } |
---|
929 | |
---|
930 | /* Return 1 if OP is a valid immediate operand for mode MODE. |
---|
931 | |
---|
932 | The main use of this function is as a predicate in match_operand |
---|
933 | expressions in the machine description. */ |
---|
934 | |
---|
935 | int |
---|
936 | immediate_operand (op, mode) |
---|
937 | register rtx op; |
---|
938 | enum machine_mode mode; |
---|
939 | { |
---|
940 | /* Don't accept CONST_INT or anything similar |
---|
941 | if the caller wants something floating. */ |
---|
942 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode |
---|
943 | && GET_MODE_CLASS (mode) != MODE_INT |
---|
944 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
---|
945 | return 0; |
---|
946 | |
---|
947 | return (CONSTANT_P (op) |
---|
948 | && (GET_MODE (op) == mode || mode == VOIDmode |
---|
949 | || GET_MODE (op) == VOIDmode) |
---|
950 | #ifdef LEGITIMATE_PIC_OPERAND_P |
---|
951 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
---|
952 | #endif |
---|
953 | && LEGITIMATE_CONSTANT_P (op)); |
---|
954 | } |
---|
955 | |
---|
956 | /* Returns 1 if OP is an operand that is a CONST_INT. */ |
---|
957 | |
---|
958 | int |
---|
959 | const_int_operand (op, mode) |
---|
960 | register rtx op; |
---|
961 | enum machine_mode mode; |
---|
962 | { |
---|
963 | return GET_CODE (op) == CONST_INT; |
---|
964 | } |
---|
965 | |
---|
966 | /* Returns 1 if OP is an operand that is a constant integer or constant |
---|
967 | floating-point number. */ |
---|
968 | |
---|
969 | int |
---|
970 | const_double_operand (op, mode) |
---|
971 | register rtx op; |
---|
972 | enum machine_mode mode; |
---|
973 | { |
---|
974 | /* Don't accept CONST_INT or anything similar |
---|
975 | if the caller wants something floating. */ |
---|
976 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode |
---|
977 | && GET_MODE_CLASS (mode) != MODE_INT |
---|
978 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
---|
979 | return 0; |
---|
980 | |
---|
981 | return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT) |
---|
982 | && (mode == VOIDmode || GET_MODE (op) == mode |
---|
983 | || GET_MODE (op) == VOIDmode)); |
---|
984 | } |
---|
985 | |
---|
986 | /* Return 1 if OP is a general operand that is not an immediate operand. */ |
---|
987 | |
---|
988 | int |
---|
989 | nonimmediate_operand (op, mode) |
---|
990 | register rtx op; |
---|
991 | enum machine_mode mode; |
---|
992 | { |
---|
993 | return (general_operand (op, mode) && ! CONSTANT_P (op)); |
---|
994 | } |
---|
995 | |
---|
996 | /* Return 1 if OP is a register reference or immediate value of mode MODE. */ |
---|
997 | |
---|
998 | int |
---|
999 | nonmemory_operand (op, mode) |
---|
1000 | register rtx op; |
---|
1001 | enum machine_mode mode; |
---|
1002 | { |
---|
1003 | if (CONSTANT_P (op)) |
---|
1004 | { |
---|
1005 | /* Don't accept CONST_INT or anything similar |
---|
1006 | if the caller wants something floating. */ |
---|
1007 | if (GET_MODE (op) == VOIDmode && mode != VOIDmode |
---|
1008 | && GET_MODE_CLASS (mode) != MODE_INT |
---|
1009 | && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
---|
1010 | return 0; |
---|
1011 | |
---|
1012 | return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode) |
---|
1013 | #ifdef LEGITIMATE_PIC_OPERAND_P |
---|
1014 | && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) |
---|
1015 | #endif |
---|
1016 | && LEGITIMATE_CONSTANT_P (op)); |
---|
1017 | } |
---|
1018 | |
---|
1019 | if (GET_MODE (op) != mode && mode != VOIDmode) |
---|
1020 | return 0; |
---|
1021 | |
---|
1022 | if (GET_CODE (op) == SUBREG) |
---|
1023 | { |
---|
1024 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand |
---|
1025 | because it is guaranteed to be reloaded into one. |
---|
1026 | Just make sure the MEM is valid in itself. |
---|
1027 | (Ideally, (SUBREG (MEM)...) should not exist after reload, |
---|
1028 | but currently it does result from (SUBREG (REG)...) where the |
---|
1029 | reg went on the stack.) */ |
---|
1030 | if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM) |
---|
1031 | return general_operand (op, mode); |
---|
1032 | op = SUBREG_REG (op); |
---|
1033 | } |
---|
1034 | |
---|
1035 | /* We don't consider registers whose class is NO_REGS |
---|
1036 | to be a register operand. */ |
---|
1037 | return (GET_CODE (op) == REG |
---|
1038 | && (REGNO (op) >= FIRST_PSEUDO_REGISTER |
---|
1039 | || REGNO_REG_CLASS (REGNO (op)) != NO_REGS)); |
---|
1040 | } |
---|
1041 | |
---|
1042 | /* Return 1 if OP is a valid operand that stands for pushing a |
---|
1043 | value of mode MODE onto the stack. |
---|
1044 | |
---|
1045 | The main use of this function is as a predicate in match_operand |
---|
1046 | expressions in the machine description. */ |
---|
1047 | |
---|
1048 | int |
---|
1049 | push_operand (op, mode) |
---|
1050 | rtx op; |
---|
1051 | enum machine_mode mode; |
---|
1052 | { |
---|
1053 | if (GET_CODE (op) != MEM) |
---|
1054 | return 0; |
---|
1055 | |
---|
1056 | if (GET_MODE (op) != mode) |
---|
1057 | return 0; |
---|
1058 | |
---|
1059 | op = XEXP (op, 0); |
---|
1060 | |
---|
1061 | if (GET_CODE (op) != STACK_PUSH_CODE) |
---|
1062 | return 0; |
---|
1063 | |
---|
1064 | return XEXP (op, 0) == stack_pointer_rtx; |
---|
1065 | } |
---|
1066 | |
---|
1067 | /* Return 1 if ADDR is a valid memory address for mode MODE. */ |
---|
1068 | |
---|
1069 | int |
---|
1070 | memory_address_p (mode, addr) |
---|
1071 | enum machine_mode mode; |
---|
1072 | register rtx addr; |
---|
1073 | { |
---|
1074 | if (GET_CODE (addr) == ADDRESSOF) |
---|
1075 | return 1; |
---|
1076 | |
---|
1077 | GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
---|
1078 | return 0; |
---|
1079 | |
---|
1080 | win: |
---|
1081 | return 1; |
---|
1082 | } |
---|
1083 | |
---|
1084 | /* Return 1 if OP is a valid memory reference with mode MODE, |
---|
1085 | including a valid address. |
---|
1086 | |
---|
1087 | The main use of this function is as a predicate in match_operand |
---|
1088 | expressions in the machine description. */ |
---|
1089 | |
---|
1090 | int |
---|
1091 | memory_operand (op, mode) |
---|
1092 | register rtx op; |
---|
1093 | enum machine_mode mode; |
---|
1094 | { |
---|
1095 | rtx inner; |
---|
1096 | |
---|
1097 | if (! reload_completed) |
---|
1098 | /* Note that no SUBREG is a memory operand before end of reload pass, |
---|
1099 | because (SUBREG (MEM...)) forces reloading into a register. */ |
---|
1100 | return GET_CODE (op) == MEM && general_operand (op, mode); |
---|
1101 | |
---|
1102 | if (mode != VOIDmode && GET_MODE (op) != mode) |
---|
1103 | return 0; |
---|
1104 | |
---|
1105 | inner = op; |
---|
1106 | if (GET_CODE (inner) == SUBREG) |
---|
1107 | inner = SUBREG_REG (inner); |
---|
1108 | |
---|
1109 | return (GET_CODE (inner) == MEM && general_operand (op, mode)); |
---|
1110 | } |
---|
1111 | |
---|
1112 | /* Return 1 if OP is a valid indirect memory reference with mode MODE; |
---|
1113 | that is, a memory reference whose address is a general_operand. */ |
---|
1114 | |
---|
1115 | int |
---|
1116 | indirect_operand (op, mode) |
---|
1117 | register rtx op; |
---|
1118 | enum machine_mode mode; |
---|
1119 | { |
---|
1120 | /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */ |
---|
1121 | if (! reload_completed |
---|
1122 | && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM) |
---|
1123 | { |
---|
1124 | register int offset = SUBREG_WORD (op) * UNITS_PER_WORD; |
---|
1125 | rtx inner = SUBREG_REG (op); |
---|
1126 | |
---|
1127 | if (BYTES_BIG_ENDIAN) |
---|
1128 | offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op))) |
---|
1129 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner)))); |
---|
1130 | |
---|
1131 | if (mode != VOIDmode && GET_MODE (op) != mode) |
---|
1132 | return 0; |
---|
1133 | |
---|
1134 | /* The only way that we can have a general_operand as the resulting |
---|
1135 | address is if OFFSET is zero and the address already is an operand |
---|
1136 | or if the address is (plus Y (const_int -OFFSET)) and Y is an |
---|
1137 | operand. */ |
---|
1138 | |
---|
1139 | return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode)) |
---|
1140 | || (GET_CODE (XEXP (inner, 0)) == PLUS |
---|
1141 | && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT |
---|
1142 | && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset |
---|
1143 | && general_operand (XEXP (XEXP (inner, 0), 0), Pmode))); |
---|
1144 | } |
---|
1145 | |
---|
1146 | return (GET_CODE (op) == MEM |
---|
1147 | && memory_operand (op, mode) |
---|
1148 | && general_operand (XEXP (op, 0), Pmode)); |
---|
1149 | } |
---|
1150 | |
---|
1151 | /* Return 1 if this is a comparison operator. This allows the use of |
---|
1152 | MATCH_OPERATOR to recognize all the branch insns. */ |
---|
1153 | |
---|
1154 | int |
---|
1155 | comparison_operator (op, mode) |
---|
1156 | register rtx op; |
---|
1157 | enum machine_mode mode; |
---|
1158 | { |
---|
1159 | return ((mode == VOIDmode || GET_MODE (op) == mode) |
---|
1160 | && GET_RTX_CLASS (GET_CODE (op)) == '<'); |
---|
1161 | } |
---|
1162 | |
---|
1163 | /* If BODY is an insn body that uses ASM_OPERANDS, |
---|
1164 | return the number of operands (both input and output) in the insn. |
---|
1165 | Otherwise return -1. */ |
---|
1166 | |
---|
1167 | int |
---|
1168 | asm_noperands (body) |
---|
1169 | rtx body; |
---|
1170 | { |
---|
1171 | if (GET_CODE (body) == ASM_OPERANDS) |
---|
1172 | /* No output operands: return number of input operands. */ |
---|
1173 | return ASM_OPERANDS_INPUT_LENGTH (body); |
---|
1174 | if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
---|
1175 | /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ |
---|
1176 | return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1; |
---|
1177 | else if (GET_CODE (body) == PARALLEL |
---|
1178 | && GET_CODE (XVECEXP (body, 0, 0)) == SET |
---|
1179 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) |
---|
1180 | { |
---|
1181 | /* Multiple output operands, or 1 output plus some clobbers: |
---|
1182 | body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ |
---|
1183 | int i; |
---|
1184 | int n_sets; |
---|
1185 | |
---|
1186 | /* Count backwards through CLOBBERs to determine number of SETs. */ |
---|
1187 | for (i = XVECLEN (body, 0); i > 0; i--) |
---|
1188 | { |
---|
1189 | if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET) |
---|
1190 | break; |
---|
1191 | if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER) |
---|
1192 | return -1; |
---|
1193 | } |
---|
1194 | |
---|
1195 | /* N_SETS is now number of output operands. */ |
---|
1196 | n_sets = i; |
---|
1197 | |
---|
1198 | /* Verify that all the SETs we have |
---|
1199 | came from a single original asm_operands insn |
---|
1200 | (so that invalid combinations are blocked). */ |
---|
1201 | for (i = 0; i < n_sets; i++) |
---|
1202 | { |
---|
1203 | rtx elt = XVECEXP (body, 0, i); |
---|
1204 | if (GET_CODE (elt) != SET) |
---|
1205 | return -1; |
---|
1206 | if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS) |
---|
1207 | return -1; |
---|
1208 | /* If these ASM_OPERANDS rtx's came from different original insns |
---|
1209 | then they aren't allowed together. */ |
---|
1210 | if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt)) |
---|
1211 | != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0)))) |
---|
1212 | return -1; |
---|
1213 | } |
---|
1214 | return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0))) |
---|
1215 | + n_sets); |
---|
1216 | } |
---|
1217 | else if (GET_CODE (body) == PARALLEL |
---|
1218 | && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) |
---|
1219 | { |
---|
1220 | /* 0 outputs, but some clobbers: |
---|
1221 | body is [(asm_operands ...) (clobber (reg ...))...]. */ |
---|
1222 | int i; |
---|
1223 | |
---|
1224 | /* Make sure all the other parallel things really are clobbers. */ |
---|
1225 | for (i = XVECLEN (body, 0) - 1; i > 0; i--) |
---|
1226 | if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) |
---|
1227 | return -1; |
---|
1228 | |
---|
1229 | return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0)); |
---|
1230 | } |
---|
1231 | else |
---|
1232 | return -1; |
---|
1233 | } |
---|
1234 | |
---|
1235 | /* Assuming BODY is an insn body that uses ASM_OPERANDS, |
---|
1236 | copy its operands (both input and output) into the vector OPERANDS, |
---|
1237 | the locations of the operands within the insn into the vector OPERAND_LOCS, |
---|
1238 | and the constraints for the operands into CONSTRAINTS. |
---|
1239 | Write the modes of the operands into MODES. |
---|
1240 | Return the assembler-template. |
---|
1241 | |
---|
1242 | If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, |
---|
1243 | we don't store that info. */ |
---|
1244 | |
---|
1245 | char * |
---|
1246 | decode_asm_operands (body, operands, operand_locs, constraints, modes) |
---|
1247 | rtx body; |
---|
1248 | rtx *operands; |
---|
1249 | rtx **operand_locs; |
---|
1250 | char **constraints; |
---|
1251 | enum machine_mode *modes; |
---|
1252 | { |
---|
1253 | register int i; |
---|
1254 | int noperands; |
---|
1255 | char *template = 0; |
---|
1256 | |
---|
1257 | if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
---|
1258 | { |
---|
1259 | rtx asmop = SET_SRC (body); |
---|
1260 | /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */ |
---|
1261 | |
---|
1262 | noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1; |
---|
1263 | |
---|
1264 | for (i = 1; i < noperands; i++) |
---|
1265 | { |
---|
1266 | if (operand_locs) |
---|
1267 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1); |
---|
1268 | if (operands) |
---|
1269 | operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1); |
---|
1270 | if (constraints) |
---|
1271 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1); |
---|
1272 | if (modes) |
---|
1273 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1); |
---|
1274 | } |
---|
1275 | |
---|
1276 | /* The output is in the SET. |
---|
1277 | Its constraint is in the ASM_OPERANDS itself. */ |
---|
1278 | if (operands) |
---|
1279 | operands[0] = SET_DEST (body); |
---|
1280 | if (operand_locs) |
---|
1281 | operand_locs[0] = &SET_DEST (body); |
---|
1282 | if (constraints) |
---|
1283 | constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop); |
---|
1284 | if (modes) |
---|
1285 | modes[0] = GET_MODE (SET_DEST (body)); |
---|
1286 | template = ASM_OPERANDS_TEMPLATE (asmop); |
---|
1287 | } |
---|
1288 | else if (GET_CODE (body) == ASM_OPERANDS) |
---|
1289 | { |
---|
1290 | rtx asmop = body; |
---|
1291 | /* No output operands: BODY is (asm_operands ....). */ |
---|
1292 | |
---|
1293 | noperands = ASM_OPERANDS_INPUT_LENGTH (asmop); |
---|
1294 | |
---|
1295 | /* The input operands are found in the 1st element vector. */ |
---|
1296 | /* Constraints for inputs are in the 2nd element vector. */ |
---|
1297 | for (i = 0; i < noperands; i++) |
---|
1298 | { |
---|
1299 | if (operand_locs) |
---|
1300 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); |
---|
1301 | if (operands) |
---|
1302 | operands[i] = ASM_OPERANDS_INPUT (asmop, i); |
---|
1303 | if (constraints) |
---|
1304 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
---|
1305 | if (modes) |
---|
1306 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
---|
1307 | } |
---|
1308 | template = ASM_OPERANDS_TEMPLATE (asmop); |
---|
1309 | } |
---|
1310 | else if (GET_CODE (body) == PARALLEL |
---|
1311 | && GET_CODE (XVECEXP (body, 0, 0)) == SET) |
---|
1312 | { |
---|
1313 | rtx asmop = SET_SRC (XVECEXP (body, 0, 0)); |
---|
1314 | int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ |
---|
1315 | int nin = ASM_OPERANDS_INPUT_LENGTH (asmop); |
---|
1316 | int nout = 0; /* Does not include CLOBBERs. */ |
---|
1317 | |
---|
1318 | /* At least one output, plus some CLOBBERs. */ |
---|
1319 | |
---|
1320 | /* The outputs are in the SETs. |
---|
1321 | Their constraints are in the ASM_OPERANDS itself. */ |
---|
1322 | for (i = 0; i < nparallel; i++) |
---|
1323 | { |
---|
1324 | if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) |
---|
1325 | break; /* Past last SET */ |
---|
1326 | |
---|
1327 | if (operands) |
---|
1328 | operands[i] = SET_DEST (XVECEXP (body, 0, i)); |
---|
1329 | if (operand_locs) |
---|
1330 | operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); |
---|
1331 | if (constraints) |
---|
1332 | constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); |
---|
1333 | if (modes) |
---|
1334 | modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); |
---|
1335 | nout++; |
---|
1336 | } |
---|
1337 | |
---|
1338 | for (i = 0; i < nin; i++) |
---|
1339 | { |
---|
1340 | if (operand_locs) |
---|
1341 | operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i); |
---|
1342 | if (operands) |
---|
1343 | operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i); |
---|
1344 | if (constraints) |
---|
1345 | constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
---|
1346 | if (modes) |
---|
1347 | modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
---|
1348 | } |
---|
1349 | |
---|
1350 | template = ASM_OPERANDS_TEMPLATE (asmop); |
---|
1351 | } |
---|
1352 | else if (GET_CODE (body) == PARALLEL |
---|
1353 | && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) |
---|
1354 | { |
---|
1355 | /* No outputs, but some CLOBBERs. */ |
---|
1356 | |
---|
1357 | rtx asmop = XVECEXP (body, 0, 0); |
---|
1358 | int nin = ASM_OPERANDS_INPUT_LENGTH (asmop); |
---|
1359 | |
---|
1360 | for (i = 0; i < nin; i++) |
---|
1361 | { |
---|
1362 | if (operand_locs) |
---|
1363 | operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); |
---|
1364 | if (operands) |
---|
1365 | operands[i] = ASM_OPERANDS_INPUT (asmop, i); |
---|
1366 | if (constraints) |
---|
1367 | constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
---|
1368 | if (modes) |
---|
1369 | modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
---|
1370 | } |
---|
1371 | |
---|
1372 | template = ASM_OPERANDS_TEMPLATE (asmop); |
---|
1373 | } |
---|
1374 | |
---|
1375 | return template; |
---|
1376 | } |
---|
1377 | |
---|
1378 | /* Given an rtx *P, if it is a sum containing an integer constant term, |
---|
1379 | return the location (type rtx *) of the pointer to that constant term. |
---|
1380 | Otherwise, return a null pointer. */ |
---|
1381 | |
---|
1382 | static rtx * |
---|
1383 | find_constant_term_loc (p) |
---|
1384 | rtx *p; |
---|
1385 | { |
---|
1386 | register rtx *tem; |
---|
1387 | register enum rtx_code code = GET_CODE (*p); |
---|
1388 | |
---|
1389 | /* If *P IS such a constant term, P is its location. */ |
---|
1390 | |
---|
1391 | if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF |
---|
1392 | || code == CONST) |
---|
1393 | return p; |
---|
1394 | |
---|
1395 | /* Otherwise, if not a sum, it has no constant term. */ |
---|
1396 | |
---|
1397 | if (GET_CODE (*p) != PLUS) |
---|
1398 | return 0; |
---|
1399 | |
---|
1400 | /* If one of the summands is constant, return its location. */ |
---|
1401 | |
---|
1402 | if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0)) |
---|
1403 | && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1))) |
---|
1404 | return p; |
---|
1405 | |
---|
1406 | /* Otherwise, check each summand for containing a constant term. */ |
---|
1407 | |
---|
1408 | if (XEXP (*p, 0) != 0) |
---|
1409 | { |
---|
1410 | tem = find_constant_term_loc (&XEXP (*p, 0)); |
---|
1411 | if (tem != 0) |
---|
1412 | return tem; |
---|
1413 | } |
---|
1414 | |
---|
1415 | if (XEXP (*p, 1) != 0) |
---|
1416 | { |
---|
1417 | tem = find_constant_term_loc (&XEXP (*p, 1)); |
---|
1418 | if (tem != 0) |
---|
1419 | return tem; |
---|
1420 | } |
---|
1421 | |
---|
1422 | return 0; |
---|
1423 | } |
---|
1424 | |
---|
1425 | /* Return 1 if OP is a memory reference |
---|
1426 | whose address contains no side effects |
---|
1427 | and remains valid after the addition |
---|
1428 | of a positive integer less than the |
---|
1429 | size of the object being referenced. |
---|
1430 | |
---|
1431 | We assume that the original address is valid and do not check it. |
---|
1432 | |
---|
1433 | This uses strict_memory_address_p as a subroutine, so |
---|
1434 | don't use it before reload. */ |
---|
1435 | |
---|
1436 | int |
---|
1437 | offsettable_memref_p (op) |
---|
1438 | rtx op; |
---|
1439 | { |
---|
1440 | return ((GET_CODE (op) == MEM) |
---|
1441 | && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0))); |
---|
1442 | } |
---|
1443 | |
---|
1444 | /* Similar, but don't require a strictly valid mem ref: |
---|
1445 | consider pseudo-regs valid as index or base regs. */ |
---|
1446 | |
---|
1447 | int |
---|
1448 | offsettable_nonstrict_memref_p (op) |
---|
1449 | rtx op; |
---|
1450 | { |
---|
1451 | return ((GET_CODE (op) == MEM) |
---|
1452 | && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0))); |
---|
1453 | } |
---|
1454 | |
---|
1455 | /* Return 1 if Y is a memory address which contains no side effects |
---|
1456 | and would remain valid after the addition of a positive integer |
---|
1457 | less than the size of that mode. |
---|
1458 | |
---|
1459 | We assume that the original address is valid and do not check it. |
---|
1460 | We do check that it is valid for narrower modes. |
---|
1461 | |
---|
1462 | If STRICTP is nonzero, we require a strictly valid address, |
---|
1463 | for the sake of use in reload.c. */ |
---|
1464 | |
---|
1465 | int |
---|
1466 | offsettable_address_p (strictp, mode, y) |
---|
1467 | int strictp; |
---|
1468 | enum machine_mode mode; |
---|
1469 | register rtx y; |
---|
1470 | { |
---|
1471 | register enum rtx_code ycode = GET_CODE (y); |
---|
1472 | register rtx z; |
---|
1473 | rtx y1 = y; |
---|
1474 | rtx *y2; |
---|
1475 | int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p); |
---|
1476 | |
---|
1477 | if (CONSTANT_ADDRESS_P (y)) |
---|
1478 | return 1; |
---|
1479 | |
---|
1480 | /* Adjusting an offsettable address involves changing to a narrower mode. |
---|
1481 | Make sure that's OK. */ |
---|
1482 | |
---|
1483 | if (mode_dependent_address_p (y)) |
---|
1484 | return 0; |
---|
1485 | |
---|
1486 | /* If the expression contains a constant term, |
---|
1487 | see if it remains valid when max possible offset is added. */ |
---|
1488 | |
---|
1489 | if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1))) |
---|
1490 | { |
---|
1491 | int good; |
---|
1492 | |
---|
1493 | y1 = *y2; |
---|
1494 | *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1); |
---|
1495 | /* Use QImode because an odd displacement may be automatically invalid |
---|
1496 | for any wider mode. But it should be valid for a single byte. */ |
---|
1497 | good = (*addressp) (QImode, y); |
---|
1498 | |
---|
1499 | /* In any case, restore old contents of memory. */ |
---|
1500 | *y2 = y1; |
---|
1501 | return good; |
---|
1502 | } |
---|
1503 | |
---|
1504 | if (ycode == PRE_DEC || ycode == PRE_INC |
---|
1505 | || ycode == POST_DEC || ycode == POST_INC) |
---|
1506 | return 0; |
---|
1507 | |
---|
1508 | /* The offset added here is chosen as the maximum offset that |
---|
1509 | any instruction could need to add when operating on something |
---|
1510 | of the specified mode. We assume that if Y and Y+c are |
---|
1511 | valid addresses then so is Y+d for all 0<d<c. */ |
---|
1512 | |
---|
1513 | z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1); |
---|
1514 | |
---|
1515 | /* Use QImode because an odd displacement may be automatically invalid |
---|
1516 | for any wider mode. But it should be valid for a single byte. */ |
---|
1517 | return (*addressp) (QImode, z); |
---|
1518 | } |
---|
1519 | |
---|
1520 | /* Return 1 if ADDR is an address-expression whose effect depends |
---|
1521 | on the mode of the memory reference it is used in. |
---|
1522 | |
---|
1523 | Autoincrement addressing is a typical example of mode-dependence |
---|
1524 | because the amount of the increment depends on the mode. */ |
---|
1525 | |
---|
1526 | int |
---|
1527 | mode_dependent_address_p (addr) |
---|
1528 | rtx addr; |
---|
1529 | { |
---|
1530 | GO_IF_MODE_DEPENDENT_ADDRESS (addr, win); |
---|
1531 | return 0; |
---|
1532 | win: |
---|
1533 | return 1; |
---|
1534 | } |
---|
1535 | |
---|
1536 | /* Return 1 if OP is a general operand |
---|
1537 | other than a memory ref with a mode dependent address. */ |
---|
1538 | |
---|
1539 | int |
---|
1540 | mode_independent_operand (op, mode) |
---|
1541 | enum machine_mode mode; |
---|
1542 | rtx op; |
---|
1543 | { |
---|
1544 | rtx addr; |
---|
1545 | |
---|
1546 | if (! general_operand (op, mode)) |
---|
1547 | return 0; |
---|
1548 | |
---|
1549 | if (GET_CODE (op) != MEM) |
---|
1550 | return 1; |
---|
1551 | |
---|
1552 | addr = XEXP (op, 0); |
---|
1553 | GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose); |
---|
1554 | return 1; |
---|
1555 | lose: |
---|
1556 | return 0; |
---|
1557 | } |
---|
1558 | |
---|
1559 | /* Given an operand OP that is a valid memory reference |
---|
1560 | which satisfies offsettable_memref_p, |
---|
1561 | return a new memory reference whose address has been adjusted by OFFSET. |
---|
1562 | OFFSET should be positive and less than the size of the object referenced. |
---|
1563 | */ |
---|
1564 | |
---|
1565 | rtx |
---|
1566 | adj_offsettable_operand (op, offset) |
---|
1567 | rtx op; |
---|
1568 | int offset; |
---|
1569 | { |
---|
1570 | register enum rtx_code code = GET_CODE (op); |
---|
1571 | |
---|
1572 | if (code == MEM) |
---|
1573 | { |
---|
1574 | register rtx y = XEXP (op, 0); |
---|
1575 | register rtx new; |
---|
1576 | |
---|
1577 | if (CONSTANT_ADDRESS_P (y)) |
---|
1578 | { |
---|
1579 | new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset)); |
---|
1580 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op); |
---|
1581 | return new; |
---|
1582 | } |
---|
1583 | |
---|
1584 | if (GET_CODE (y) == PLUS) |
---|
1585 | { |
---|
1586 | rtx z = y; |
---|
1587 | register rtx *const_loc; |
---|
1588 | |
---|
1589 | op = copy_rtx (op); |
---|
1590 | z = XEXP (op, 0); |
---|
1591 | const_loc = find_constant_term_loc (&z); |
---|
1592 | if (const_loc) |
---|
1593 | { |
---|
1594 | *const_loc = plus_constant_for_output (*const_loc, offset); |
---|
1595 | return op; |
---|
1596 | } |
---|
1597 | } |
---|
1598 | |
---|
1599 | new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset)); |
---|
1600 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op); |
---|
1601 | return new; |
---|
1602 | } |
---|
1603 | abort (); |
---|
1604 | } |
---|
1605 | |
---|
1606 | #ifdef REGISTER_CONSTRAINTS |
---|
1607 | |
---|
1608 | /* Check the operands of an insn (found in recog_operands) |
---|
1609 | against the insn's operand constraints (found via INSN_CODE_NUM) |
---|
1610 | and return 1 if they are valid. |
---|
1611 | |
---|
1612 | WHICH_ALTERNATIVE is set to a number which indicates which |
---|
1613 | alternative of constraints was matched: 0 for the first alternative, |
---|
1614 | 1 for the next, etc. |
---|
1615 | |
---|
1616 | In addition, when two operands are match |
---|
1617 | and it happens that the output operand is (reg) while the |
---|
1618 | input operand is --(reg) or ++(reg) (a pre-inc or pre-dec), |
---|
1619 | make the output operand look like the input. |
---|
1620 | This is because the output operand is the one the template will print. |
---|
1621 | |
---|
1622 | This is used in final, just before printing the assembler code and by |
---|
1623 | the routines that determine an insn's attribute. |
---|
1624 | |
---|
1625 | If STRICT is a positive non-zero value, it means that we have been |
---|
1626 | called after reload has been completed. In that case, we must |
---|
1627 | do all checks strictly. If it is zero, it means that we have been called |
---|
1628 | before reload has completed. In that case, we first try to see if we can |
---|
1629 | find an alternative that matches strictly. If not, we try again, this |
---|
1630 | time assuming that reload will fix up the insn. This provides a "best |
---|
1631 | guess" for the alternative and is used to compute attributes of insns prior |
---|
1632 | to reload. A negative value of STRICT is used for this internal call. */ |
---|
1633 | |
---|
1634 | struct funny_match |
---|
1635 | { |
---|
1636 | int this, other; |
---|
1637 | }; |
---|
1638 | |
---|
1639 | int |
---|
1640 | constrain_operands (insn_code_num, strict) |
---|
1641 | int insn_code_num; |
---|
1642 | int strict; |
---|
1643 | { |
---|
1644 | char *constraints[MAX_RECOG_OPERANDS]; |
---|
1645 | int matching_operands[MAX_RECOG_OPERANDS]; |
---|
1646 | enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS]; |
---|
1647 | int earlyclobber[MAX_RECOG_OPERANDS]; |
---|
1648 | register int c; |
---|
1649 | int noperands = insn_n_operands[insn_code_num]; |
---|
1650 | |
---|
1651 | struct funny_match funny_match[MAX_RECOG_OPERANDS]; |
---|
1652 | int funny_match_index; |
---|
1653 | int nalternatives = insn_n_alternatives[insn_code_num]; |
---|
1654 | |
---|
1655 | if (noperands == 0 || nalternatives == 0) |
---|
1656 | return 1; |
---|
1657 | |
---|
1658 | for (c = 0; c < noperands; c++) |
---|
1659 | { |
---|
1660 | constraints[c] = insn_operand_constraint[insn_code_num][c]; |
---|
1661 | matching_operands[c] = -1; |
---|
1662 | op_types[c] = OP_IN; |
---|
1663 | } |
---|
1664 | |
---|
1665 | which_alternative = 0; |
---|
1666 | |
---|
1667 | while (which_alternative < nalternatives) |
---|
1668 | { |
---|
1669 | register int opno; |
---|
1670 | int lose = 0; |
---|
1671 | funny_match_index = 0; |
---|
1672 | |
---|
1673 | for (opno = 0; opno < noperands; opno++) |
---|
1674 | { |
---|
1675 | register rtx op = recog_operand[opno]; |
---|
1676 | enum machine_mode mode = GET_MODE (op); |
---|
1677 | register char *p = constraints[opno]; |
---|
1678 | int offset = 0; |
---|
1679 | int win = 0; |
---|
1680 | int val; |
---|
1681 | |
---|
1682 | earlyclobber[opno] = 0; |
---|
1683 | |
---|
1684 | /* A unary operator may be accepted by the predicate, but it |
---|
1685 | is irrelevant for matching constraints. */ |
---|
1686 | if (GET_RTX_CLASS (GET_CODE (op)) == '1') |
---|
1687 | op = XEXP (op, 0); |
---|
1688 | |
---|
1689 | if (GET_CODE (op) == SUBREG) |
---|
1690 | { |
---|
1691 | if (GET_CODE (SUBREG_REG (op)) == REG |
---|
1692 | && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER) |
---|
1693 | offset = SUBREG_WORD (op); |
---|
1694 | op = SUBREG_REG (op); |
---|
1695 | } |
---|
1696 | |
---|
1697 | /* An empty constraint or empty alternative |
---|
1698 | allows anything which matched the pattern. */ |
---|
1699 | if (*p == 0 || *p == ',') |
---|
1700 | win = 1; |
---|
1701 | |
---|
1702 | while (*p && (c = *p++) != ',') |
---|
1703 | switch (c) |
---|
1704 | { |
---|
1705 | case '?': |
---|
1706 | case '!': |
---|
1707 | case '*': |
---|
1708 | case '%': |
---|
1709 | break; |
---|
1710 | |
---|
1711 | case '#': |
---|
1712 | /* Ignore rest of this alternative as far as |
---|
1713 | constraint checking is concerned. */ |
---|
1714 | while (*p && *p != ',') |
---|
1715 | p++; |
---|
1716 | break; |
---|
1717 | |
---|
1718 | case '=': |
---|
1719 | op_types[opno] = OP_OUT; |
---|
1720 | break; |
---|
1721 | |
---|
1722 | case '+': |
---|
1723 | op_types[opno] = OP_INOUT; |
---|
1724 | break; |
---|
1725 | |
---|
1726 | case '&': |
---|
1727 | earlyclobber[opno] = 1; |
---|
1728 | break; |
---|
1729 | |
---|
1730 | case '0': |
---|
1731 | case '1': |
---|
1732 | case '2': |
---|
1733 | case '3': |
---|
1734 | case '4': |
---|
1735 | /* This operand must be the same as a previous one. |
---|
1736 | This kind of constraint is used for instructions such |
---|
1737 | as add when they take only two operands. |
---|
1738 | |
---|
1739 | Note that the lower-numbered operand is passed first. |
---|
1740 | |
---|
1741 | If we are not testing strictly, assume that this constraint |
---|
1742 | will be satisfied. */ |
---|
1743 | if (strict < 0) |
---|
1744 | val = 1; |
---|
1745 | else |
---|
1746 | val = operands_match_p (recog_operand[c - '0'], |
---|
1747 | recog_operand[opno]); |
---|
1748 | |
---|
1749 | matching_operands[opno] = c - '0'; |
---|
1750 | matching_operands[c - '0'] = opno; |
---|
1751 | |
---|
1752 | if (val != 0) |
---|
1753 | win = 1; |
---|
1754 | /* If output is *x and input is *--x, |
---|
1755 | arrange later to change the output to *--x as well, |
---|
1756 | since the output op is the one that will be printed. */ |
---|
1757 | if (val == 2 && strict > 0) |
---|
1758 | { |
---|
1759 | funny_match[funny_match_index].this = opno; |
---|
1760 | funny_match[funny_match_index++].other = c - '0'; |
---|
1761 | } |
---|
1762 | break; |
---|
1763 | |
---|
1764 | case 'p': |
---|
1765 | /* p is used for address_operands. When we are called by |
---|
1766 | gen_reload, no one will have checked that the address is |
---|
1767 | strictly valid, i.e., that all pseudos requiring hard regs |
---|
1768 | have gotten them. */ |
---|
1769 | if (strict <= 0 |
---|
1770 | || (strict_memory_address_p |
---|
1771 | (insn_operand_mode[insn_code_num][opno], op))) |
---|
1772 | win = 1; |
---|
1773 | break; |
---|
1774 | |
---|
1775 | /* No need to check general_operand again; |
---|
1776 | it was done in insn-recog.c. */ |
---|
1777 | case 'g': |
---|
1778 | /* Anything goes unless it is a REG and really has a hard reg |
---|
1779 | but the hard reg is not in the class GENERAL_REGS. */ |
---|
1780 | if (strict < 0 |
---|
1781 | || GENERAL_REGS == ALL_REGS |
---|
1782 | || GET_CODE (op) != REG |
---|
1783 | || (reload_in_progress |
---|
1784 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
---|
1785 | || reg_fits_class_p (op, GENERAL_REGS, offset, mode)) |
---|
1786 | win = 1; |
---|
1787 | break; |
---|
1788 | |
---|
1789 | case 'r': |
---|
1790 | if (strict < 0 |
---|
1791 | || (strict == 0 |
---|
1792 | && GET_CODE (op) == REG |
---|
1793 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
---|
1794 | || (strict == 0 && GET_CODE (op) == SCRATCH) |
---|
1795 | || (GET_CODE (op) == REG |
---|
1796 | && ((GENERAL_REGS == ALL_REGS |
---|
1797 | && REGNO (op) < FIRST_PSEUDO_REGISTER) |
---|
1798 | || reg_fits_class_p (op, GENERAL_REGS, |
---|
1799 | offset, mode)))) |
---|
1800 | win = 1; |
---|
1801 | break; |
---|
1802 | |
---|
1803 | case 'X': |
---|
1804 | /* This is used for a MATCH_SCRATCH in the cases when |
---|
1805 | we don't actually need anything. So anything goes |
---|
1806 | any time. */ |
---|
1807 | win = 1; |
---|
1808 | break; |
---|
1809 | |
---|
1810 | case 'm': |
---|
1811 | if (GET_CODE (op) == MEM |
---|
1812 | /* Before reload, accept what reload can turn into mem. */ |
---|
1813 | || (strict < 0 && CONSTANT_P (op)) |
---|
1814 | /* During reload, accept a pseudo */ |
---|
1815 | || (reload_in_progress && GET_CODE (op) == REG |
---|
1816 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)) |
---|
1817 | win = 1; |
---|
1818 | break; |
---|
1819 | |
---|
1820 | case '<': |
---|
1821 | if (GET_CODE (op) == MEM |
---|
1822 | && (GET_CODE (XEXP (op, 0)) == PRE_DEC |
---|
1823 | || GET_CODE (XEXP (op, 0)) == POST_DEC)) |
---|
1824 | win = 1; |
---|
1825 | break; |
---|
1826 | |
---|
1827 | case '>': |
---|
1828 | if (GET_CODE (op) == MEM |
---|
1829 | && (GET_CODE (XEXP (op, 0)) == PRE_INC |
---|
1830 | || GET_CODE (XEXP (op, 0)) == POST_INC)) |
---|
1831 | win = 1; |
---|
1832 | break; |
---|
1833 | |
---|
1834 | case 'E': |
---|
1835 | #ifndef REAL_ARITHMETIC |
---|
1836 | /* Match any CONST_DOUBLE, but only if |
---|
1837 | we can examine the bits of it reliably. */ |
---|
1838 | if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT |
---|
1839 | || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD) |
---|
1840 | && GET_MODE (op) != VOIDmode && ! flag_pretend_float) |
---|
1841 | break; |
---|
1842 | #endif |
---|
1843 | if (GET_CODE (op) == CONST_DOUBLE) |
---|
1844 | win = 1; |
---|
1845 | break; |
---|
1846 | |
---|
1847 | case 'F': |
---|
1848 | if (GET_CODE (op) == CONST_DOUBLE) |
---|
1849 | win = 1; |
---|
1850 | break; |
---|
1851 | |
---|
1852 | case 'G': |
---|
1853 | case 'H': |
---|
1854 | if (GET_CODE (op) == CONST_DOUBLE |
---|
1855 | && CONST_DOUBLE_OK_FOR_LETTER_P (op, c)) |
---|
1856 | win = 1; |
---|
1857 | break; |
---|
1858 | |
---|
1859 | case 's': |
---|
1860 | if (GET_CODE (op) == CONST_INT |
---|
1861 | || (GET_CODE (op) == CONST_DOUBLE |
---|
1862 | && GET_MODE (op) == VOIDmode)) |
---|
1863 | break; |
---|
1864 | case 'i': |
---|
1865 | if (CONSTANT_P (op)) |
---|
1866 | win = 1; |
---|
1867 | break; |
---|
1868 | |
---|
1869 | case 'n': |
---|
1870 | if (GET_CODE (op) == CONST_INT |
---|
1871 | || (GET_CODE (op) == CONST_DOUBLE |
---|
1872 | && GET_MODE (op) == VOIDmode)) |
---|
1873 | win = 1; |
---|
1874 | break; |
---|
1875 | |
---|
1876 | case 'I': |
---|
1877 | case 'J': |
---|
1878 | case 'K': |
---|
1879 | case 'L': |
---|
1880 | case 'M': |
---|
1881 | case 'N': |
---|
1882 | case 'O': |
---|
1883 | case 'P': |
---|
1884 | if (GET_CODE (op) == CONST_INT |
---|
1885 | && CONST_OK_FOR_LETTER_P (INTVAL (op), c)) |
---|
1886 | win = 1; |
---|
1887 | break; |
---|
1888 | |
---|
1889 | #ifdef EXTRA_CONSTRAINT |
---|
1890 | case 'Q': |
---|
1891 | case 'R': |
---|
1892 | case 'S': |
---|
1893 | case 'T': |
---|
1894 | case 'U': |
---|
1895 | if (EXTRA_CONSTRAINT (op, c)) |
---|
1896 | win = 1; |
---|
1897 | break; |
---|
1898 | #endif |
---|
1899 | |
---|
1900 | case 'V': |
---|
1901 | if (GET_CODE (op) == MEM |
---|
1902 | && ((strict > 0 && ! offsettable_memref_p (op)) |
---|
1903 | || (strict < 0 |
---|
1904 | && !(CONSTANT_P (op) || GET_CODE (op) == MEM)) |
---|
1905 | || (reload_in_progress |
---|
1906 | && !(GET_CODE (op) == REG |
---|
1907 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)))) |
---|
1908 | win = 1; |
---|
1909 | break; |
---|
1910 | |
---|
1911 | case 'o': |
---|
1912 | if ((strict > 0 && offsettable_memref_p (op)) |
---|
1913 | || (strict == 0 && offsettable_nonstrict_memref_p (op)) |
---|
1914 | /* Before reload, accept what reload can handle. */ |
---|
1915 | || (strict < 0 |
---|
1916 | && (CONSTANT_P (op) || GET_CODE (op) == MEM)) |
---|
1917 | /* During reload, accept a pseudo */ |
---|
1918 | || (reload_in_progress && GET_CODE (op) == REG |
---|
1919 | && REGNO (op) >= FIRST_PSEUDO_REGISTER)) |
---|
1920 | win = 1; |
---|
1921 | break; |
---|
1922 | |
---|
1923 | default: |
---|
1924 | if (strict < 0 |
---|
1925 | || (strict == 0 |
---|
1926 | && GET_CODE (op) == REG |
---|
1927 | && REGNO (op) >= FIRST_PSEUDO_REGISTER) |
---|
1928 | || (strict == 0 && GET_CODE (op) == SCRATCH) |
---|
1929 | || (GET_CODE (op) == REG |
---|
1930 | && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c), |
---|
1931 | offset, mode))) |
---|
1932 | win = 1; |
---|
1933 | } |
---|
1934 | |
---|
1935 | constraints[opno] = p; |
---|
1936 | /* If this operand did not win somehow, |
---|
1937 | this alternative loses. */ |
---|
1938 | if (! win) |
---|
1939 | lose = 1; |
---|
1940 | } |
---|
1941 | /* This alternative won; the operands are ok. |
---|
1942 | Change whichever operands this alternative says to change. */ |
---|
1943 | if (! lose) |
---|
1944 | { |
---|
1945 | int opno, eopno; |
---|
1946 | |
---|
1947 | /* See if any earlyclobber operand conflicts with some other |
---|
1948 | operand. */ |
---|
1949 | |
---|
1950 | if (strict > 0) |
---|
1951 | for (eopno = 0; eopno < noperands; eopno++) |
---|
1952 | /* Ignore earlyclobber operands now in memory, |
---|
1953 | because we would often report failure when we have |
---|
1954 | two memory operands, one of which was formerly a REG. */ |
---|
1955 | if (earlyclobber[eopno] |
---|
1956 | && GET_CODE (recog_operand[eopno]) == REG) |
---|
1957 | for (opno = 0; opno < noperands; opno++) |
---|
1958 | if ((GET_CODE (recog_operand[opno]) == MEM |
---|
1959 | || op_types[opno] != OP_OUT) |
---|
1960 | && opno != eopno |
---|
1961 | /* Ignore things like match_operator operands. */ |
---|
1962 | && *insn_operand_constraint[insn_code_num][opno] != 0 |
---|
1963 | && ! (matching_operands[opno] == eopno |
---|
1964 | && operands_match_p (recog_operand[opno], |
---|
1965 | recog_operand[eopno])) |
---|
1966 | && ! safe_from_earlyclobber (recog_operand[opno], |
---|
1967 | recog_operand[eopno])) |
---|
1968 | lose = 1; |
---|
1969 | |
---|
1970 | if (! lose) |
---|
1971 | { |
---|
1972 | while (--funny_match_index >= 0) |
---|
1973 | { |
---|
1974 | recog_operand[funny_match[funny_match_index].other] |
---|
1975 | = recog_operand[funny_match[funny_match_index].this]; |
---|
1976 | } |
---|
1977 | |
---|
1978 | return 1; |
---|
1979 | } |
---|
1980 | } |
---|
1981 | |
---|
1982 | which_alternative++; |
---|
1983 | } |
---|
1984 | |
---|
1985 | /* If we are about to reject this, but we are not to test strictly, |
---|
1986 | try a very loose test. Only return failure if it fails also. */ |
---|
1987 | if (strict == 0) |
---|
1988 | return constrain_operands (insn_code_num, -1); |
---|
1989 | else |
---|
1990 | return 0; |
---|
1991 | } |
---|
1992 | |
---|
1993 | /* Return 1 iff OPERAND (assumed to be a REG rtx) |
---|
1994 | is a hard reg in class CLASS when its regno is offset by OFFSET |
---|
1995 | and changed to mode MODE. |
---|
1996 | If REG occupies multiple hard regs, all of them must be in CLASS. */ |
---|
1997 | |
---|
1998 | int |
---|
1999 | reg_fits_class_p (operand, class, offset, mode) |
---|
2000 | rtx operand; |
---|
2001 | register enum reg_class class; |
---|
2002 | int offset; |
---|
2003 | enum machine_mode mode; |
---|
2004 | { |
---|
2005 | register int regno = REGNO (operand); |
---|
2006 | if (regno < FIRST_PSEUDO_REGISTER |
---|
2007 | && TEST_HARD_REG_BIT (reg_class_contents[(int) class], |
---|
2008 | regno + offset)) |
---|
2009 | { |
---|
2010 | register int sr; |
---|
2011 | regno += offset; |
---|
2012 | for (sr = HARD_REGNO_NREGS (regno, mode) - 1; |
---|
2013 | sr > 0; sr--) |
---|
2014 | if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class], |
---|
2015 | regno + sr)) |
---|
2016 | break; |
---|
2017 | return sr == 0; |
---|
2018 | } |
---|
2019 | |
---|
2020 | return 0; |
---|
2021 | } |
---|
2022 | |
---|
2023 | #endif /* REGISTER_CONSTRAINTS */ |
---|