1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
---|
2 | Copyright (C) 1987, 91, 94-97, 1998 Free Software Foundation, Inc. |
---|
3 | |
---|
4 | This file is part of GNU CC. |
---|
5 | |
---|
6 | GNU CC is free software; you can redistribute it and/or modify |
---|
7 | it under the terms of the GNU General Public License as published by |
---|
8 | the Free Software Foundation; either version 2, or (at your option) |
---|
9 | any later version. |
---|
10 | |
---|
11 | GNU CC is distributed in the hope that it will be useful, |
---|
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
14 | GNU General Public License for more details. |
---|
15 | |
---|
16 | You should have received a copy of the GNU General Public License |
---|
17 | along with GNU CC; see the file COPYING. If not, write to |
---|
18 | the Free Software Foundation, 59 Temple Place - Suite 330, |
---|
19 | Boston, MA 02111-1307, USA. */ |
---|
20 | |
---|
21 | |
---|
22 | #include "config.h" |
---|
23 | #include <stdio.h> |
---|
24 | #include "rtl.h" |
---|
25 | #include "tree.h" |
---|
26 | #include "flags.h" |
---|
27 | #include "expr.h" |
---|
28 | #include "hard-reg-set.h" |
---|
29 | #include "insn-config.h" |
---|
30 | #include "recog.h" |
---|
31 | #include "insn-flags.h" |
---|
32 | #include "insn-codes.h" |
---|
33 | |
---|
34 | static rtx break_out_memory_refs PROTO((rtx)); |
---|
35 | static void emit_stack_probe PROTO((rtx)); |
---|
36 | /* Return an rtx for the sum of X and the integer C. |
---|
37 | |
---|
38 | This function should be used via the `plus_constant' macro. */ |
---|
39 | |
---|
40 | rtx |
---|
41 | plus_constant_wide (x, c) |
---|
42 | register rtx x; |
---|
43 | register HOST_WIDE_INT c; |
---|
44 | { |
---|
45 | register RTX_CODE code; |
---|
46 | register enum machine_mode mode; |
---|
47 | register rtx tem; |
---|
48 | int all_constant = 0; |
---|
49 | |
---|
50 | if (c == 0) |
---|
51 | return x; |
---|
52 | |
---|
53 | restart: |
---|
54 | |
---|
55 | code = GET_CODE (x); |
---|
56 | mode = GET_MODE (x); |
---|
57 | switch (code) |
---|
58 | { |
---|
59 | case CONST_INT: |
---|
60 | return GEN_INT (INTVAL (x) + c); |
---|
61 | |
---|
62 | case CONST_DOUBLE: |
---|
63 | { |
---|
64 | HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x); |
---|
65 | HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x); |
---|
66 | HOST_WIDE_INT l2 = c; |
---|
67 | HOST_WIDE_INT h2 = c < 0 ? ~0 : 0; |
---|
68 | HOST_WIDE_INT lv, hv; |
---|
69 | |
---|
70 | add_double (l1, h1, l2, h2, &lv, &hv); |
---|
71 | |
---|
72 | return immed_double_const (lv, hv, VOIDmode); |
---|
73 | } |
---|
74 | |
---|
75 | case MEM: |
---|
76 | /* If this is a reference to the constant pool, try replacing it with |
---|
77 | a reference to a new constant. If the resulting address isn't |
---|
78 | valid, don't return it because we have no way to validize it. */ |
---|
79 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
---|
80 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) |
---|
81 | { |
---|
82 | /* Any rtl we create here must go in a saveable obstack, since |
---|
83 | we might have been called from within combine. */ |
---|
84 | push_obstacks_nochange (); |
---|
85 | rtl_in_saveable_obstack (); |
---|
86 | tem |
---|
87 | = force_const_mem (GET_MODE (x), |
---|
88 | plus_constant (get_pool_constant (XEXP (x, 0)), |
---|
89 | c)); |
---|
90 | pop_obstacks (); |
---|
91 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) |
---|
92 | return tem; |
---|
93 | } |
---|
94 | break; |
---|
95 | |
---|
96 | case CONST: |
---|
97 | /* If adding to something entirely constant, set a flag |
---|
98 | so that we can add a CONST around the result. */ |
---|
99 | x = XEXP (x, 0); |
---|
100 | all_constant = 1; |
---|
101 | goto restart; |
---|
102 | |
---|
103 | case SYMBOL_REF: |
---|
104 | case LABEL_REF: |
---|
105 | all_constant = 1; |
---|
106 | break; |
---|
107 | |
---|
108 | case PLUS: |
---|
109 | /* The interesting case is adding the integer to a sum. |
---|
110 | Look for constant term in the sum and combine |
---|
111 | with C. For an integer constant term, we make a combined |
---|
112 | integer. For a constant term that is not an explicit integer, |
---|
113 | we cannot really combine, but group them together anyway. |
---|
114 | |
---|
115 | Use a recursive call in case the remaining operand is something |
---|
116 | that we handle specially, such as a SYMBOL_REF. */ |
---|
117 | |
---|
118 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) |
---|
119 | return plus_constant (XEXP (x, 0), c + INTVAL (XEXP (x, 1))); |
---|
120 | else if (CONSTANT_P (XEXP (x, 0))) |
---|
121 | return gen_rtx (PLUS, mode, |
---|
122 | plus_constant (XEXP (x, 0), c), |
---|
123 | XEXP (x, 1)); |
---|
124 | else if (CONSTANT_P (XEXP (x, 1))) |
---|
125 | return gen_rtx (PLUS, mode, |
---|
126 | XEXP (x, 0), |
---|
127 | plus_constant (XEXP (x, 1), c)); |
---|
128 | break; |
---|
129 | |
---|
130 | default: |
---|
131 | break; |
---|
132 | } |
---|
133 | |
---|
134 | if (c != 0) |
---|
135 | x = gen_rtx (PLUS, mode, x, GEN_INT (c)); |
---|
136 | |
---|
137 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) |
---|
138 | return x; |
---|
139 | else if (all_constant) |
---|
140 | return gen_rtx (CONST, mode, x); |
---|
141 | else |
---|
142 | return x; |
---|
143 | } |
---|
144 | |
---|
145 | /* This is the same as `plus_constant', except that it handles LO_SUM. |
---|
146 | |
---|
147 | This function should be used via the `plus_constant_for_output' macro. */ |
---|
148 | |
---|
149 | rtx |
---|
150 | plus_constant_for_output_wide (x, c) |
---|
151 | register rtx x; |
---|
152 | register HOST_WIDE_INT c; |
---|
153 | { |
---|
154 | register RTX_CODE code = GET_CODE (x); |
---|
155 | register enum machine_mode mode = GET_MODE (x); |
---|
156 | int all_constant = 0; |
---|
157 | |
---|
158 | if (GET_CODE (x) == LO_SUM) |
---|
159 | return gen_rtx (LO_SUM, mode, XEXP (x, 0), |
---|
160 | plus_constant_for_output (XEXP (x, 1), c)); |
---|
161 | |
---|
162 | else |
---|
163 | return plus_constant (x, c); |
---|
164 | } |
---|
165 | |
---|
166 | /* If X is a sum, return a new sum like X but lacking any constant terms. |
---|
167 | Add all the removed constant terms into *CONSTPTR. |
---|
168 | X itself is not altered. The result != X if and only if |
---|
169 | it is not isomorphic to X. */ |
---|
170 | |
---|
171 | rtx |
---|
172 | eliminate_constant_term (x, constptr) |
---|
173 | rtx x; |
---|
174 | rtx *constptr; |
---|
175 | { |
---|
176 | register rtx x0, x1; |
---|
177 | rtx tem; |
---|
178 | |
---|
179 | if (GET_CODE (x) != PLUS) |
---|
180 | return x; |
---|
181 | |
---|
182 | /* First handle constants appearing at this level explicitly. */ |
---|
183 | if (GET_CODE (XEXP (x, 1)) == CONST_INT |
---|
184 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
---|
185 | XEXP (x, 1))) |
---|
186 | && GET_CODE (tem) == CONST_INT) |
---|
187 | { |
---|
188 | *constptr = tem; |
---|
189 | return eliminate_constant_term (XEXP (x, 0), constptr); |
---|
190 | } |
---|
191 | |
---|
192 | tem = const0_rtx; |
---|
193 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); |
---|
194 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); |
---|
195 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) |
---|
196 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), |
---|
197 | *constptr, tem)) |
---|
198 | && GET_CODE (tem) == CONST_INT) |
---|
199 | { |
---|
200 | *constptr = tem; |
---|
201 | return gen_rtx (PLUS, GET_MODE (x), x0, x1); |
---|
202 | } |
---|
203 | |
---|
204 | return x; |
---|
205 | } |
---|
206 | |
---|
207 | /* Returns the insn that next references REG after INSN, or 0 |
---|
208 | if REG is clobbered before next referenced or we cannot find |
---|
209 | an insn that references REG in a straight-line piece of code. */ |
---|
210 | |
---|
211 | rtx |
---|
212 | find_next_ref (reg, insn) |
---|
213 | rtx reg; |
---|
214 | rtx insn; |
---|
215 | { |
---|
216 | rtx next; |
---|
217 | |
---|
218 | for (insn = NEXT_INSN (insn); insn; insn = next) |
---|
219 | { |
---|
220 | next = NEXT_INSN (insn); |
---|
221 | if (GET_CODE (insn) == NOTE) |
---|
222 | continue; |
---|
223 | if (GET_CODE (insn) == CODE_LABEL |
---|
224 | || GET_CODE (insn) == BARRIER) |
---|
225 | return 0; |
---|
226 | if (GET_CODE (insn) == INSN |
---|
227 | || GET_CODE (insn) == JUMP_INSN |
---|
228 | || GET_CODE (insn) == CALL_INSN) |
---|
229 | { |
---|
230 | if (reg_set_p (reg, insn)) |
---|
231 | return 0; |
---|
232 | if (reg_mentioned_p (reg, PATTERN (insn))) |
---|
233 | return insn; |
---|
234 | if (GET_CODE (insn) == JUMP_INSN) |
---|
235 | { |
---|
236 | if (simplejump_p (insn)) |
---|
237 | next = JUMP_LABEL (insn); |
---|
238 | else |
---|
239 | return 0; |
---|
240 | } |
---|
241 | if (GET_CODE (insn) == CALL_INSN |
---|
242 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
---|
243 | && call_used_regs[REGNO (reg)]) |
---|
244 | return 0; |
---|
245 | } |
---|
246 | else |
---|
247 | abort (); |
---|
248 | } |
---|
249 | return 0; |
---|
250 | } |
---|
251 | |
---|
252 | /* Return an rtx for the size in bytes of the value of EXP. */ |
---|
253 | |
---|
254 | rtx |
---|
255 | expr_size (exp) |
---|
256 | tree exp; |
---|
257 | { |
---|
258 | tree size = size_in_bytes (TREE_TYPE (exp)); |
---|
259 | |
---|
260 | if (TREE_CODE (size) != INTEGER_CST |
---|
261 | && contains_placeholder_p (size)) |
---|
262 | size = build (WITH_RECORD_EXPR, sizetype, size, exp); |
---|
263 | |
---|
264 | return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), |
---|
265 | EXPAND_MEMORY_USE_BAD); |
---|
266 | } |
---|
267 | |
---|
268 | /* Return a copy of X in which all memory references |
---|
269 | and all constants that involve symbol refs |
---|
270 | have been replaced with new temporary registers. |
---|
271 | Also emit code to load the memory locations and constants |
---|
272 | into those registers. |
---|
273 | |
---|
274 | If X contains no such constants or memory references, |
---|
275 | X itself (not a copy) is returned. |
---|
276 | |
---|
277 | If a constant is found in the address that is not a legitimate constant |
---|
278 | in an insn, it is left alone in the hope that it might be valid in the |
---|
279 | address. |
---|
280 | |
---|
281 | X may contain no arithmetic except addition, subtraction and multiplication. |
---|
282 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ |
---|
283 | |
---|
284 | static rtx |
---|
285 | break_out_memory_refs (x) |
---|
286 | register rtx x; |
---|
287 | { |
---|
288 | if (GET_CODE (x) == MEM |
---|
289 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
---|
290 | && GET_MODE (x) != VOIDmode)) |
---|
291 | x = force_reg (GET_MODE (x), x); |
---|
292 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
---|
293 | || GET_CODE (x) == MULT) |
---|
294 | { |
---|
295 | register rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
---|
296 | register rtx op1 = break_out_memory_refs (XEXP (x, 1)); |
---|
297 | |
---|
298 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
---|
299 | x = gen_rtx (GET_CODE (x), Pmode, op0, op1); |
---|
300 | } |
---|
301 | |
---|
302 | return x; |
---|
303 | } |
---|
304 | |
---|
305 | #ifdef POINTERS_EXTEND_UNSIGNED |
---|
306 | |
---|
307 | /* Given X, a memory address in ptr_mode, convert it to an address |
---|
308 | in Pmode, or vice versa (TO_MODE says which way). We take advantage of |
---|
309 | the fact that pointers are not allowed to overflow by commuting arithmetic |
---|
310 | operations over conversions so that address arithmetic insns can be |
---|
311 | used. */ |
---|
312 | |
---|
313 | rtx |
---|
314 | convert_memory_address (to_mode, x) |
---|
315 | enum machine_mode to_mode; |
---|
316 | rtx x; |
---|
317 | { |
---|
318 | enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode; |
---|
319 | rtx temp; |
---|
320 | |
---|
321 | /* Here we handle some special cases. If none of them apply, fall through |
---|
322 | to the default case. */ |
---|
323 | switch (GET_CODE (x)) |
---|
324 | { |
---|
325 | case CONST_INT: |
---|
326 | case CONST_DOUBLE: |
---|
327 | return x; |
---|
328 | |
---|
329 | case LABEL_REF: |
---|
330 | temp = gen_rtx (LABEL_REF, to_mode, XEXP (x, 0)); |
---|
331 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); |
---|
332 | return temp; |
---|
333 | |
---|
334 | case SYMBOL_REF: |
---|
335 | temp = gen_rtx (SYMBOL_REF, to_mode, XSTR (x, 0)); |
---|
336 | SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x); |
---|
337 | CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x); |
---|
338 | return temp; |
---|
339 | |
---|
340 | case CONST: |
---|
341 | return gen_rtx (CONST, to_mode, |
---|
342 | convert_memory_address (to_mode, XEXP (x, 0))); |
---|
343 | |
---|
344 | case PLUS: |
---|
345 | case MULT: |
---|
346 | /* For addition the second operand is a small constant, we can safely |
---|
347 | permute the conversion and addition operation. We can always safely |
---|
348 | permute them if we are making the address narrower. In addition, |
---|
349 | always permute the operations if this is a constant. */ |
---|
350 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
---|
351 | || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT |
---|
352 | && (INTVAL (XEXP (x, 1)) + 20000 < 40000 |
---|
353 | || CONSTANT_P (XEXP (x, 0))))) |
---|
354 | return gen_rtx (GET_CODE (x), to_mode, |
---|
355 | convert_memory_address (to_mode, XEXP (x, 0)), |
---|
356 | convert_memory_address (to_mode, XEXP (x, 1))); |
---|
357 | break; |
---|
358 | |
---|
359 | default: |
---|
360 | break; |
---|
361 | } |
---|
362 | |
---|
363 | return convert_modes (to_mode, from_mode, |
---|
364 | x, POINTERS_EXTEND_UNSIGNED); |
---|
365 | } |
---|
366 | #endif |
---|
367 | |
---|
368 | /* Given a memory address or facsimile X, construct a new address, |
---|
369 | currently equivalent, that is stable: future stores won't change it. |
---|
370 | |
---|
371 | X must be composed of constants, register and memory references |
---|
372 | combined with addition, subtraction and multiplication: |
---|
373 | in other words, just what you can get from expand_expr if sum_ok is 1. |
---|
374 | |
---|
375 | Works by making copies of all regs and memory locations used |
---|
376 | by X and combining them the same way X does. |
---|
377 | You could also stabilize the reference to this address |
---|
378 | by copying the address to a register with copy_to_reg; |
---|
379 | but then you wouldn't get indexed addressing in the reference. */ |
---|
380 | |
---|
381 | rtx |
---|
382 | copy_all_regs (x) |
---|
383 | register rtx x; |
---|
384 | { |
---|
385 | if (GET_CODE (x) == REG) |
---|
386 | { |
---|
387 | if (REGNO (x) != FRAME_POINTER_REGNUM |
---|
388 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
---|
389 | && REGNO (x) != HARD_FRAME_POINTER_REGNUM |
---|
390 | #endif |
---|
391 | ) |
---|
392 | x = copy_to_reg (x); |
---|
393 | } |
---|
394 | else if (GET_CODE (x) == MEM) |
---|
395 | x = copy_to_reg (x); |
---|
396 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
---|
397 | || GET_CODE (x) == MULT) |
---|
398 | { |
---|
399 | register rtx op0 = copy_all_regs (XEXP (x, 0)); |
---|
400 | register rtx op1 = copy_all_regs (XEXP (x, 1)); |
---|
401 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
---|
402 | x = gen_rtx (GET_CODE (x), Pmode, op0, op1); |
---|
403 | } |
---|
404 | return x; |
---|
405 | } |
---|
406 | |
---|
407 | /* Return something equivalent to X but valid as a memory address |
---|
408 | for something of mode MODE. When X is not itself valid, this |
---|
409 | works by copying X or subexpressions of it into registers. */ |
---|
410 | |
---|
411 | rtx |
---|
412 | memory_address (mode, x) |
---|
413 | enum machine_mode mode; |
---|
414 | register rtx x; |
---|
415 | { |
---|
416 | register rtx oldx = x; |
---|
417 | |
---|
418 | if (GET_CODE (x) == ADDRESSOF) |
---|
419 | return x; |
---|
420 | |
---|
421 | #ifdef POINTERS_EXTEND_UNSIGNED |
---|
422 | if (GET_MODE (x) == ptr_mode) |
---|
423 | x = convert_memory_address (Pmode, x); |
---|
424 | #endif |
---|
425 | |
---|
426 | /* By passing constant addresses thru registers |
---|
427 | we get a chance to cse them. */ |
---|
428 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
---|
429 | x = force_reg (Pmode, x); |
---|
430 | |
---|
431 | /* Accept a QUEUED that refers to a REG |
---|
432 | even though that isn't a valid address. |
---|
433 | On attempting to put this in an insn we will call protect_from_queue |
---|
434 | which will turn it into a REG, which is valid. */ |
---|
435 | else if (GET_CODE (x) == QUEUED |
---|
436 | && GET_CODE (QUEUED_VAR (x)) == REG) |
---|
437 | ; |
---|
438 | |
---|
439 | /* We get better cse by rejecting indirect addressing at this stage. |
---|
440 | Let the combiner create indirect addresses where appropriate. |
---|
441 | For now, generate the code so that the subexpressions useful to share |
---|
442 | are visible. But not if cse won't be done! */ |
---|
443 | else |
---|
444 | { |
---|
445 | if (! cse_not_expected && GET_CODE (x) != REG) |
---|
446 | x = break_out_memory_refs (x); |
---|
447 | |
---|
448 | /* At this point, any valid address is accepted. */ |
---|
449 | GO_IF_LEGITIMATE_ADDRESS (mode, x, win); |
---|
450 | |
---|
451 | /* If it was valid before but breaking out memory refs invalidated it, |
---|
452 | use it the old way. */ |
---|
453 | if (memory_address_p (mode, oldx)) |
---|
454 | goto win2; |
---|
455 | |
---|
456 | /* Perform machine-dependent transformations on X |
---|
457 | in certain cases. This is not necessary since the code |
---|
458 | below can handle all possible cases, but machine-dependent |
---|
459 | transformations can make better code. */ |
---|
460 | LEGITIMIZE_ADDRESS (x, oldx, mode, win); |
---|
461 | |
---|
462 | /* PLUS and MULT can appear in special ways |
---|
463 | as the result of attempts to make an address usable for indexing. |
---|
464 | Usually they are dealt with by calling force_operand, below. |
---|
465 | But a sum containing constant terms is special |
---|
466 | if removing them makes the sum a valid address: |
---|
467 | then we generate that address in a register |
---|
468 | and index off of it. We do this because it often makes |
---|
469 | shorter code, and because the addresses thus generated |
---|
470 | in registers often become common subexpressions. */ |
---|
471 | if (GET_CODE (x) == PLUS) |
---|
472 | { |
---|
473 | rtx constant_term = const0_rtx; |
---|
474 | rtx y = eliminate_constant_term (x, &constant_term); |
---|
475 | if (constant_term == const0_rtx |
---|
476 | || ! memory_address_p (mode, y)) |
---|
477 | x = force_operand (x, NULL_RTX); |
---|
478 | else |
---|
479 | { |
---|
480 | y = gen_rtx (PLUS, GET_MODE (x), copy_to_reg (y), constant_term); |
---|
481 | if (! memory_address_p (mode, y)) |
---|
482 | x = force_operand (x, NULL_RTX); |
---|
483 | else |
---|
484 | x = y; |
---|
485 | } |
---|
486 | } |
---|
487 | |
---|
488 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
---|
489 | x = force_operand (x, NULL_RTX); |
---|
490 | |
---|
491 | /* If we have a register that's an invalid address, |
---|
492 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ |
---|
493 | else if (GET_CODE (x) == REG) |
---|
494 | x = copy_to_reg (x); |
---|
495 | |
---|
496 | /* Last resort: copy the value to a register, since |
---|
497 | the register is a valid address. */ |
---|
498 | else |
---|
499 | x = force_reg (Pmode, x); |
---|
500 | |
---|
501 | goto done; |
---|
502 | |
---|
503 | win2: |
---|
504 | x = oldx; |
---|
505 | win: |
---|
506 | if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG |
---|
507 | /* Don't copy an addr via a reg if it is one of our stack slots. */ |
---|
508 | && ! (GET_CODE (x) == PLUS |
---|
509 | && (XEXP (x, 0) == virtual_stack_vars_rtx |
---|
510 | || XEXP (x, 0) == virtual_incoming_args_rtx))) |
---|
511 | { |
---|
512 | if (general_operand (x, Pmode)) |
---|
513 | x = force_reg (Pmode, x); |
---|
514 | else |
---|
515 | x = force_operand (x, NULL_RTX); |
---|
516 | } |
---|
517 | } |
---|
518 | |
---|
519 | done: |
---|
520 | |
---|
521 | /* If we didn't change the address, we are done. Otherwise, mark |
---|
522 | a reg as a pointer if we have REG or REG + CONST_INT. */ |
---|
523 | if (oldx == x) |
---|
524 | return x; |
---|
525 | else if (GET_CODE (x) == REG) |
---|
526 | mark_reg_pointer (x, 1); |
---|
527 | else if (GET_CODE (x) == PLUS |
---|
528 | && GET_CODE (XEXP (x, 0)) == REG |
---|
529 | && GET_CODE (XEXP (x, 1)) == CONST_INT) |
---|
530 | mark_reg_pointer (XEXP (x, 0), 1); |
---|
531 | |
---|
532 | /* OLDX may have been the address on a temporary. Update the address |
---|
533 | to indicate that X is now used. */ |
---|
534 | update_temp_slot_address (oldx, x); |
---|
535 | |
---|
536 | return x; |
---|
537 | } |
---|
538 | |
---|
539 | /* Like `memory_address' but pretend `flag_force_addr' is 0. */ |
---|
540 | |
---|
541 | rtx |
---|
542 | memory_address_noforce (mode, x) |
---|
543 | enum machine_mode mode; |
---|
544 | rtx x; |
---|
545 | { |
---|
546 | int ambient_force_addr = flag_force_addr; |
---|
547 | rtx val; |
---|
548 | |
---|
549 | flag_force_addr = 0; |
---|
550 | val = memory_address (mode, x); |
---|
551 | flag_force_addr = ambient_force_addr; |
---|
552 | return val; |
---|
553 | } |
---|
554 | |
---|
555 | /* Convert a mem ref into one with a valid memory address. |
---|
556 | Pass through anything else unchanged. */ |
---|
557 | |
---|
558 | rtx |
---|
559 | validize_mem (ref) |
---|
560 | rtx ref; |
---|
561 | { |
---|
562 | if (GET_CODE (ref) != MEM) |
---|
563 | return ref; |
---|
564 | if (memory_address_p (GET_MODE (ref), XEXP (ref, 0))) |
---|
565 | return ref; |
---|
566 | /* Don't alter REF itself, since that is probably a stack slot. */ |
---|
567 | return change_address (ref, GET_MODE (ref), XEXP (ref, 0)); |
---|
568 | } |
---|
569 | |
---|
570 | /* Return a modified copy of X with its memory address copied |
---|
571 | into a temporary register to protect it from side effects. |
---|
572 | If X is not a MEM, it is returned unchanged (and not copied). |
---|
573 | Perhaps even if it is a MEM, if there is no need to change it. */ |
---|
574 | |
---|
575 | rtx |
---|
576 | stabilize (x) |
---|
577 | rtx x; |
---|
578 | { |
---|
579 | register rtx addr; |
---|
580 | if (GET_CODE (x) != MEM) |
---|
581 | return x; |
---|
582 | addr = XEXP (x, 0); |
---|
583 | if (rtx_unstable_p (addr)) |
---|
584 | { |
---|
585 | rtx temp = copy_all_regs (addr); |
---|
586 | rtx mem; |
---|
587 | if (GET_CODE (temp) != REG) |
---|
588 | temp = copy_to_reg (temp); |
---|
589 | mem = gen_rtx (MEM, GET_MODE (x), temp); |
---|
590 | |
---|
591 | /* Mark returned memref with in_struct if it's in an array or |
---|
592 | structure. Copy const and volatile from original memref. */ |
---|
593 | |
---|
594 | MEM_IN_STRUCT_P (mem) = MEM_IN_STRUCT_P (x) || GET_CODE (addr) == PLUS; |
---|
595 | RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x); |
---|
596 | MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (x); |
---|
597 | return mem; |
---|
598 | } |
---|
599 | return x; |
---|
600 | } |
---|
601 | |
---|
602 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
---|
603 | |
---|
604 | rtx |
---|
605 | copy_to_reg (x) |
---|
606 | rtx x; |
---|
607 | { |
---|
608 | register rtx temp = gen_reg_rtx (GET_MODE (x)); |
---|
609 | |
---|
610 | /* If not an operand, must be an address with PLUS and MULT so |
---|
611 | do the computation. */ |
---|
612 | if (! general_operand (x, VOIDmode)) |
---|
613 | x = force_operand (x, temp); |
---|
614 | |
---|
615 | if (x != temp) |
---|
616 | emit_move_insn (temp, x); |
---|
617 | |
---|
618 | return temp; |
---|
619 | } |
---|
620 | |
---|
621 | /* Like copy_to_reg but always give the new register mode Pmode |
---|
622 | in case X is a constant. */ |
---|
623 | |
---|
624 | rtx |
---|
625 | copy_addr_to_reg (x) |
---|
626 | rtx x; |
---|
627 | { |
---|
628 | return copy_to_mode_reg (Pmode, x); |
---|
629 | } |
---|
630 | |
---|
631 | /* Like copy_to_reg but always give the new register mode MODE |
---|
632 | in case X is a constant. */ |
---|
633 | |
---|
634 | rtx |
---|
635 | copy_to_mode_reg (mode, x) |
---|
636 | enum machine_mode mode; |
---|
637 | rtx x; |
---|
638 | { |
---|
639 | register rtx temp = gen_reg_rtx (mode); |
---|
640 | |
---|
641 | /* If not an operand, must be an address with PLUS and MULT so |
---|
642 | do the computation. */ |
---|
643 | if (! general_operand (x, VOIDmode)) |
---|
644 | x = force_operand (x, temp); |
---|
645 | |
---|
646 | if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode) |
---|
647 | abort (); |
---|
648 | if (x != temp) |
---|
649 | emit_move_insn (temp, x); |
---|
650 | return temp; |
---|
651 | } |
---|
652 | |
---|
653 | /* Load X into a register if it is not already one. |
---|
654 | Use mode MODE for the register. |
---|
655 | X should be valid for mode MODE, but it may be a constant which |
---|
656 | is valid for all integer modes; that's why caller must specify MODE. |
---|
657 | |
---|
658 | The caller must not alter the value in the register we return, |
---|
659 | since we mark it as a "constant" register. */ |
---|
660 | |
---|
661 | rtx |
---|
662 | force_reg (mode, x) |
---|
663 | enum machine_mode mode; |
---|
664 | rtx x; |
---|
665 | { |
---|
666 | register rtx temp, insn, set; |
---|
667 | |
---|
668 | if (GET_CODE (x) == REG) |
---|
669 | return x; |
---|
670 | temp = gen_reg_rtx (mode); |
---|
671 | insn = emit_move_insn (temp, x); |
---|
672 | |
---|
673 | /* Let optimizers know that TEMP's value never changes |
---|
674 | and that X can be substituted for it. Don't get confused |
---|
675 | if INSN set something else (such as a SUBREG of TEMP). */ |
---|
676 | if (CONSTANT_P (x) |
---|
677 | && (set = single_set (insn)) != 0 |
---|
678 | && SET_DEST (set) == temp) |
---|
679 | { |
---|
680 | rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX); |
---|
681 | |
---|
682 | if (note) |
---|
683 | XEXP (note, 0) = x; |
---|
684 | else |
---|
685 | REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, x, REG_NOTES (insn)); |
---|
686 | } |
---|
687 | return temp; |
---|
688 | } |
---|
689 | |
---|
690 | /* If X is a memory ref, copy its contents to a new temp reg and return |
---|
691 | that reg. Otherwise, return X. */ |
---|
692 | |
---|
693 | rtx |
---|
694 | force_not_mem (x) |
---|
695 | rtx x; |
---|
696 | { |
---|
697 | register rtx temp; |
---|
698 | if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode) |
---|
699 | return x; |
---|
700 | temp = gen_reg_rtx (GET_MODE (x)); |
---|
701 | emit_move_insn (temp, x); |
---|
702 | return temp; |
---|
703 | } |
---|
704 | |
---|
705 | /* Copy X to TARGET (if it's nonzero and a reg) |
---|
706 | or to a new temp reg and return that reg. |
---|
707 | MODE is the mode to use for X in case it is a constant. */ |
---|
708 | |
---|
709 | rtx |
---|
710 | copy_to_suggested_reg (x, target, mode) |
---|
711 | rtx x, target; |
---|
712 | enum machine_mode mode; |
---|
713 | { |
---|
714 | register rtx temp; |
---|
715 | |
---|
716 | if (target && GET_CODE (target) == REG) |
---|
717 | temp = target; |
---|
718 | else |
---|
719 | temp = gen_reg_rtx (mode); |
---|
720 | |
---|
721 | emit_move_insn (temp, x); |
---|
722 | return temp; |
---|
723 | } |
---|
724 | |
---|
725 | /* Return the mode to use to store a scalar of TYPE and MODE. |
---|
726 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
---|
727 | to show what signedness to use on extension operations. |
---|
728 | |
---|
729 | FOR_CALL is non-zero if this call is promoting args for a call. */ |
---|
730 | |
---|
731 | enum machine_mode |
---|
732 | promote_mode (type, mode, punsignedp, for_call) |
---|
733 | tree type; |
---|
734 | enum machine_mode mode; |
---|
735 | int *punsignedp; |
---|
736 | int for_call; |
---|
737 | { |
---|
738 | enum tree_code code = TREE_CODE (type); |
---|
739 | int unsignedp = *punsignedp; |
---|
740 | |
---|
741 | #ifdef PROMOTE_FOR_CALL_ONLY |
---|
742 | if (! for_call) |
---|
743 | return mode; |
---|
744 | #endif |
---|
745 | |
---|
746 | switch (code) |
---|
747 | { |
---|
748 | #ifdef PROMOTE_MODE |
---|
749 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
---|
750 | case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE: |
---|
751 | PROMOTE_MODE (mode, unsignedp, type); |
---|
752 | break; |
---|
753 | #endif |
---|
754 | |
---|
755 | #ifdef POINTERS_EXTEND_UNSIGNED |
---|
756 | case REFERENCE_TYPE: |
---|
757 | case POINTER_TYPE: |
---|
758 | mode = Pmode; |
---|
759 | unsignedp = POINTERS_EXTEND_UNSIGNED; |
---|
760 | break; |
---|
761 | #endif |
---|
762 | |
---|
763 | default: |
---|
764 | break; |
---|
765 | } |
---|
766 | |
---|
767 | *punsignedp = unsignedp; |
---|
768 | return mode; |
---|
769 | } |
---|
770 | |
---|
771 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
---|
772 | This pops when ADJUST is positive. ADJUST need not be constant. */ |
---|
773 | |
---|
774 | void |
---|
775 | adjust_stack (adjust) |
---|
776 | rtx adjust; |
---|
777 | { |
---|
778 | rtx temp; |
---|
779 | adjust = protect_from_queue (adjust, 0); |
---|
780 | |
---|
781 | if (adjust == const0_rtx) |
---|
782 | return; |
---|
783 | |
---|
784 | temp = expand_binop (Pmode, |
---|
785 | #ifdef STACK_GROWS_DOWNWARD |
---|
786 | add_optab, |
---|
787 | #else |
---|
788 | sub_optab, |
---|
789 | #endif |
---|
790 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, |
---|
791 | OPTAB_LIB_WIDEN); |
---|
792 | |
---|
793 | if (temp != stack_pointer_rtx) |
---|
794 | emit_move_insn (stack_pointer_rtx, temp); |
---|
795 | } |
---|
796 | |
---|
797 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). |
---|
798 | This pushes when ADJUST is positive. ADJUST need not be constant. */ |
---|
799 | |
---|
800 | void |
---|
801 | anti_adjust_stack (adjust) |
---|
802 | rtx adjust; |
---|
803 | { |
---|
804 | rtx temp; |
---|
805 | adjust = protect_from_queue (adjust, 0); |
---|
806 | |
---|
807 | if (adjust == const0_rtx) |
---|
808 | return; |
---|
809 | |
---|
810 | temp = expand_binop (Pmode, |
---|
811 | #ifdef STACK_GROWS_DOWNWARD |
---|
812 | sub_optab, |
---|
813 | #else |
---|
814 | add_optab, |
---|
815 | #endif |
---|
816 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, |
---|
817 | OPTAB_LIB_WIDEN); |
---|
818 | |
---|
819 | if (temp != stack_pointer_rtx) |
---|
820 | emit_move_insn (stack_pointer_rtx, temp); |
---|
821 | } |
---|
822 | |
---|
823 | /* Round the size of a block to be pushed up to the boundary required |
---|
824 | by this machine. SIZE is the desired size, which need not be constant. */ |
---|
825 | |
---|
826 | rtx |
---|
827 | round_push (size) |
---|
828 | rtx size; |
---|
829 | { |
---|
830 | #ifdef STACK_BOUNDARY |
---|
831 | int align = STACK_BOUNDARY / BITS_PER_UNIT; |
---|
832 | if (align == 1) |
---|
833 | return size; |
---|
834 | if (GET_CODE (size) == CONST_INT) |
---|
835 | { |
---|
836 | int new = (INTVAL (size) + align - 1) / align * align; |
---|
837 | if (INTVAL (size) != new) |
---|
838 | size = GEN_INT (new); |
---|
839 | } |
---|
840 | else |
---|
841 | { |
---|
842 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
---|
843 | but we know it can't. So add ourselves and then do |
---|
844 | TRUNC_DIV_EXPR. */ |
---|
845 | size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1), |
---|
846 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
---|
847 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align), |
---|
848 | NULL_RTX, 1); |
---|
849 | size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1); |
---|
850 | } |
---|
851 | #endif /* STACK_BOUNDARY */ |
---|
852 | return size; |
---|
853 | } |
---|
854 | |
---|
855 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
---|
856 | to a previously-created save area. If no save area has been allocated, |
---|
857 | this function will allocate one. If a save area is specified, it |
---|
858 | must be of the proper mode. |
---|
859 | |
---|
860 | The insns are emitted after insn AFTER, if nonzero, otherwise the insns |
---|
861 | are emitted at the current position. */ |
---|
862 | |
---|
863 | void |
---|
864 | emit_stack_save (save_level, psave, after) |
---|
865 | enum save_level save_level; |
---|
866 | rtx *psave; |
---|
867 | rtx after; |
---|
868 | { |
---|
869 | rtx sa = *psave; |
---|
870 | /* The default is that we use a move insn and save in a Pmode object. */ |
---|
871 | rtx (*fcn) () = gen_move_insn; |
---|
872 | enum machine_mode mode = Pmode; |
---|
873 | |
---|
874 | /* See if this machine has anything special to do for this kind of save. */ |
---|
875 | switch (save_level) |
---|
876 | { |
---|
877 | #ifdef HAVE_save_stack_block |
---|
878 | case SAVE_BLOCK: |
---|
879 | if (HAVE_save_stack_block) |
---|
880 | { |
---|
881 | fcn = gen_save_stack_block; |
---|
882 | mode = insn_operand_mode[CODE_FOR_save_stack_block][0]; |
---|
883 | } |
---|
884 | break; |
---|
885 | #endif |
---|
886 | #ifdef HAVE_save_stack_function |
---|
887 | case SAVE_FUNCTION: |
---|
888 | if (HAVE_save_stack_function) |
---|
889 | { |
---|
890 | fcn = gen_save_stack_function; |
---|
891 | mode = insn_operand_mode[CODE_FOR_save_stack_function][0]; |
---|
892 | } |
---|
893 | break; |
---|
894 | #endif |
---|
895 | #ifdef HAVE_save_stack_nonlocal |
---|
896 | case SAVE_NONLOCAL: |
---|
897 | if (HAVE_save_stack_nonlocal) |
---|
898 | { |
---|
899 | fcn = gen_save_stack_nonlocal; |
---|
900 | mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]; |
---|
901 | } |
---|
902 | break; |
---|
903 | #endif |
---|
904 | default: |
---|
905 | break; |
---|
906 | } |
---|
907 | |
---|
908 | /* If there is no save area and we have to allocate one, do so. Otherwise |
---|
909 | verify the save area is the proper mode. */ |
---|
910 | |
---|
911 | if (sa == 0) |
---|
912 | { |
---|
913 | if (mode != VOIDmode) |
---|
914 | { |
---|
915 | if (save_level == SAVE_NONLOCAL) |
---|
916 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); |
---|
917 | else |
---|
918 | *psave = sa = gen_reg_rtx (mode); |
---|
919 | } |
---|
920 | } |
---|
921 | else |
---|
922 | { |
---|
923 | if (mode == VOIDmode || GET_MODE (sa) != mode) |
---|
924 | abort (); |
---|
925 | } |
---|
926 | |
---|
927 | if (after) |
---|
928 | { |
---|
929 | rtx seq; |
---|
930 | |
---|
931 | start_sequence (); |
---|
932 | /* We must validize inside the sequence, to ensure that any instructions |
---|
933 | created by the validize call also get moved to the right place. */ |
---|
934 | if (sa != 0) |
---|
935 | sa = validize_mem (sa); |
---|
936 | emit_insn (fcn (sa, stack_pointer_rtx)); |
---|
937 | seq = gen_sequence (); |
---|
938 | end_sequence (); |
---|
939 | emit_insn_after (seq, after); |
---|
940 | } |
---|
941 | else |
---|
942 | { |
---|
943 | if (sa != 0) |
---|
944 | sa = validize_mem (sa); |
---|
945 | emit_insn (fcn (sa, stack_pointer_rtx)); |
---|
946 | } |
---|
947 | } |
---|
948 | |
---|
949 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save |
---|
950 | area made by emit_stack_save. If it is zero, we have nothing to do. |
---|
951 | |
---|
952 | Put any emitted insns after insn AFTER, if nonzero, otherwise at |
---|
953 | current position. */ |
---|
954 | |
---|
955 | void |
---|
956 | emit_stack_restore (save_level, sa, after) |
---|
957 | enum save_level save_level; |
---|
958 | rtx after; |
---|
959 | rtx sa; |
---|
960 | { |
---|
961 | /* The default is that we use a move insn. */ |
---|
962 | rtx (*fcn) () = gen_move_insn; |
---|
963 | |
---|
964 | /* See if this machine has anything special to do for this kind of save. */ |
---|
965 | switch (save_level) |
---|
966 | { |
---|
967 | #ifdef HAVE_restore_stack_block |
---|
968 | case SAVE_BLOCK: |
---|
969 | if (HAVE_restore_stack_block) |
---|
970 | fcn = gen_restore_stack_block; |
---|
971 | break; |
---|
972 | #endif |
---|
973 | #ifdef HAVE_restore_stack_function |
---|
974 | case SAVE_FUNCTION: |
---|
975 | if (HAVE_restore_stack_function) |
---|
976 | fcn = gen_restore_stack_function; |
---|
977 | break; |
---|
978 | #endif |
---|
979 | #ifdef HAVE_restore_stack_nonlocal |
---|
980 | |
---|
981 | case SAVE_NONLOCAL: |
---|
982 | if (HAVE_restore_stack_nonlocal) |
---|
983 | fcn = gen_restore_stack_nonlocal; |
---|
984 | break; |
---|
985 | #endif |
---|
986 | default: |
---|
987 | break; |
---|
988 | } |
---|
989 | |
---|
990 | if (sa != 0) |
---|
991 | sa = validize_mem (sa); |
---|
992 | |
---|
993 | if (after) |
---|
994 | { |
---|
995 | rtx seq; |
---|
996 | |
---|
997 | start_sequence (); |
---|
998 | emit_insn (fcn (stack_pointer_rtx, sa)); |
---|
999 | seq = gen_sequence (); |
---|
1000 | end_sequence (); |
---|
1001 | emit_insn_after (seq, after); |
---|
1002 | } |
---|
1003 | else |
---|
1004 | emit_insn (fcn (stack_pointer_rtx, sa)); |
---|
1005 | } |
---|
1006 | |
---|
1007 | /* Return an rtx representing the address of an area of memory dynamically |
---|
1008 | pushed on the stack. This region of memory is always aligned to |
---|
1009 | a multiple of BIGGEST_ALIGNMENT. |
---|
1010 | |
---|
1011 | Any required stack pointer alignment is preserved. |
---|
1012 | |
---|
1013 | SIZE is an rtx representing the size of the area. |
---|
1014 | TARGET is a place in which the address can be placed. |
---|
1015 | |
---|
1016 | KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */ |
---|
1017 | |
---|
1018 | rtx |
---|
1019 | allocate_dynamic_stack_space (size, target, known_align) |
---|
1020 | rtx size; |
---|
1021 | rtx target; |
---|
1022 | int known_align; |
---|
1023 | { |
---|
1024 | /* If we're asking for zero bytes, it doesn't matter what we point |
---|
1025 | to since we can't dereference it. But return a reasonable |
---|
1026 | address anyway. */ |
---|
1027 | if (size == const0_rtx) |
---|
1028 | return virtual_stack_dynamic_rtx; |
---|
1029 | |
---|
1030 | /* Otherwise, show we're calling alloca or equivalent. */ |
---|
1031 | current_function_calls_alloca = 1; |
---|
1032 | |
---|
1033 | /* Ensure the size is in the proper mode. */ |
---|
1034 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) |
---|
1035 | size = convert_to_mode (Pmode, size, 1); |
---|
1036 | |
---|
1037 | /* We will need to ensure that the address we return is aligned to |
---|
1038 | BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't |
---|
1039 | always know its final value at this point in the compilation (it |
---|
1040 | might depend on the size of the outgoing parameter lists, for |
---|
1041 | example), so we must align the value to be returned in that case. |
---|
1042 | (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if |
---|
1043 | STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). |
---|
1044 | We must also do an alignment operation on the returned value if |
---|
1045 | the stack pointer alignment is less strict that BIGGEST_ALIGNMENT. |
---|
1046 | |
---|
1047 | If we have to align, we must leave space in SIZE for the hole |
---|
1048 | that might result from the alignment operation. */ |
---|
1049 | |
---|
1050 | #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (STACK_BOUNDARY) |
---|
1051 | #define MUST_ALIGN 1 |
---|
1052 | #else |
---|
1053 | #define MUST_ALIGN (STACK_BOUNDARY < BIGGEST_ALIGNMENT) |
---|
1054 | #endif |
---|
1055 | |
---|
1056 | if (MUST_ALIGN) |
---|
1057 | { |
---|
1058 | if (GET_CODE (size) == CONST_INT) |
---|
1059 | size = GEN_INT (INTVAL (size) |
---|
1060 | + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1)); |
---|
1061 | else |
---|
1062 | size = expand_binop (Pmode, add_optab, size, |
---|
1063 | GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1), |
---|
1064 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
---|
1065 | } |
---|
1066 | |
---|
1067 | #ifdef SETJMP_VIA_SAVE_AREA |
---|
1068 | /* If setjmp restores regs from a save area in the stack frame, |
---|
1069 | avoid clobbering the reg save area. Note that the offset of |
---|
1070 | virtual_incoming_args_rtx includes the preallocated stack args space. |
---|
1071 | It would be no problem to clobber that, but it's on the wrong side |
---|
1072 | of the old save area. */ |
---|
1073 | { |
---|
1074 | rtx dynamic_offset |
---|
1075 | = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx, |
---|
1076 | stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN); |
---|
1077 | size = expand_binop (Pmode, add_optab, size, dynamic_offset, |
---|
1078 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
---|
1079 | } |
---|
1080 | #endif /* SETJMP_VIA_SAVE_AREA */ |
---|
1081 | |
---|
1082 | /* Round the size to a multiple of the required stack alignment. |
---|
1083 | Since the stack if presumed to be rounded before this allocation, |
---|
1084 | this will maintain the required alignment. |
---|
1085 | |
---|
1086 | If the stack grows downward, we could save an insn by subtracting |
---|
1087 | SIZE from the stack pointer and then aligning the stack pointer. |
---|
1088 | The problem with this is that the stack pointer may be unaligned |
---|
1089 | between the execution of the subtraction and alignment insns and |
---|
1090 | some machines do not allow this. Even on those that do, some |
---|
1091 | signal handlers malfunction if a signal should occur between those |
---|
1092 | insns. Since this is an extremely rare event, we have no reliable |
---|
1093 | way of knowing which systems have this problem. So we avoid even |
---|
1094 | momentarily mis-aligning the stack. */ |
---|
1095 | |
---|
1096 | #ifdef STACK_BOUNDARY |
---|
1097 | /* If we added a variable amount to SIZE, |
---|
1098 | we can no longer assume it is aligned. */ |
---|
1099 | #if !defined (SETJMP_VIA_SAVE_AREA) |
---|
1100 | if (MUST_ALIGN || known_align % STACK_BOUNDARY != 0) |
---|
1101 | #endif |
---|
1102 | size = round_push (size); |
---|
1103 | #endif |
---|
1104 | |
---|
1105 | do_pending_stack_adjust (); |
---|
1106 | |
---|
1107 | /* If needed, check that we have the required amount of stack. Take into |
---|
1108 | account what has already been checked. */ |
---|
1109 | if (flag_stack_check && ! STACK_CHECK_BUILTIN) |
---|
1110 | probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size); |
---|
1111 | |
---|
1112 | /* Don't use a TARGET that isn't a pseudo. */ |
---|
1113 | if (target == 0 || GET_CODE (target) != REG |
---|
1114 | || REGNO (target) < FIRST_PSEUDO_REGISTER) |
---|
1115 | target = gen_reg_rtx (Pmode); |
---|
1116 | |
---|
1117 | mark_reg_pointer (target, known_align / BITS_PER_UNIT); |
---|
1118 | |
---|
1119 | /* Perform the required allocation from the stack. Some systems do |
---|
1120 | this differently than simply incrementing/decrementing from the |
---|
1121 | stack pointer, such as acquiring the space by calling malloc(). */ |
---|
1122 | #ifdef HAVE_allocate_stack |
---|
1123 | if (HAVE_allocate_stack) |
---|
1124 | { |
---|
1125 | enum machine_mode mode; |
---|
1126 | |
---|
1127 | if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0] |
---|
1128 | && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]) |
---|
1129 | (target, Pmode))) |
---|
1130 | target = copy_to_mode_reg (Pmode, target); |
---|
1131 | mode = insn_operand_mode[(int) CODE_FOR_allocate_stack][1]; |
---|
1132 | size = convert_modes (mode, ptr_mode, size, 1); |
---|
1133 | if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1] |
---|
1134 | && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]) |
---|
1135 | (size, mode))) |
---|
1136 | size = copy_to_mode_reg (mode, size); |
---|
1137 | |
---|
1138 | emit_insn (gen_allocate_stack (target, size)); |
---|
1139 | } |
---|
1140 | else |
---|
1141 | #endif |
---|
1142 | { |
---|
1143 | #ifndef STACK_GROWS_DOWNWARD |
---|
1144 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
---|
1145 | #endif |
---|
1146 | size = convert_modes (Pmode, ptr_mode, size, 1); |
---|
1147 | anti_adjust_stack (size); |
---|
1148 | #ifdef STACK_GROWS_DOWNWARD |
---|
1149 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
---|
1150 | #endif |
---|
1151 | } |
---|
1152 | |
---|
1153 | if (MUST_ALIGN) |
---|
1154 | { |
---|
1155 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
---|
1156 | but we know it can't. So add ourselves and then do |
---|
1157 | TRUNC_DIV_EXPR. */ |
---|
1158 | target = expand_binop (Pmode, add_optab, target, |
---|
1159 | GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1), |
---|
1160 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
---|
1161 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, |
---|
1162 | GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT), |
---|
1163 | NULL_RTX, 1); |
---|
1164 | target = expand_mult (Pmode, target, |
---|
1165 | GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT), |
---|
1166 | NULL_RTX, 1); |
---|
1167 | } |
---|
1168 | |
---|
1169 | /* Some systems require a particular insn to refer to the stack |
---|
1170 | to make the pages exist. */ |
---|
1171 | #ifdef HAVE_probe |
---|
1172 | if (HAVE_probe) |
---|
1173 | emit_insn (gen_probe ()); |
---|
1174 | #endif |
---|
1175 | |
---|
1176 | /* Record the new stack level for nonlocal gotos. */ |
---|
1177 | if (nonlocal_goto_handler_slot != 0) |
---|
1178 | emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX); |
---|
1179 | |
---|
1180 | return target; |
---|
1181 | } |
---|
1182 | |
---|
1183 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
---|
1184 | |
---|
1185 | static void |
---|
1186 | emit_stack_probe (address) |
---|
1187 | rtx address; |
---|
1188 | { |
---|
1189 | rtx memref = gen_rtx (MEM, word_mode, address); |
---|
1190 | |
---|
1191 | MEM_VOLATILE_P (memref) = 1; |
---|
1192 | |
---|
1193 | if (STACK_CHECK_PROBE_LOAD) |
---|
1194 | emit_move_insn (gen_reg_rtx (word_mode), memref); |
---|
1195 | else |
---|
1196 | emit_move_insn (memref, const0_rtx); |
---|
1197 | } |
---|
1198 | |
---|
1199 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
---|
1200 | FIRST is a constant and size is a Pmode RTX. These are offsets from the |
---|
1201 | current stack pointer. STACK_GROWS_DOWNWARD says whether to add or |
---|
1202 | subtract from the stack. If SIZE is constant, this is done |
---|
1203 | with a fixed number of probes. Otherwise, we must make a loop. */ |
---|
1204 | |
---|
1205 | #ifdef STACK_GROWS_DOWNWARD |
---|
1206 | #define STACK_GROW_OP MINUS |
---|
1207 | #else |
---|
1208 | #define STACK_GROW_OP PLUS |
---|
1209 | #endif |
---|
1210 | |
---|
1211 | void |
---|
1212 | probe_stack_range (first, size) |
---|
1213 | HOST_WIDE_INT first; |
---|
1214 | rtx size; |
---|
1215 | { |
---|
1216 | /* First see if we have an insn to check the stack. Use it if so. */ |
---|
1217 | #ifdef HAVE_check_stack |
---|
1218 | if (HAVE_check_stack) |
---|
1219 | { |
---|
1220 | rtx last_addr = force_operand (gen_rtx (STACK_GROW_OP, Pmode, |
---|
1221 | stack_pointer_rtx, |
---|
1222 | plus_constant (size, first)), |
---|
1223 | NULL_RTX); |
---|
1224 | |
---|
1225 | if (insn_operand_predicate[(int) CODE_FOR_check_stack][0] |
---|
1226 | && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0]) |
---|
1227 | (last_address, Pmode))) |
---|
1228 | last_address = copy_to_mode_reg (Pmode, last_address); |
---|
1229 | |
---|
1230 | emit_insn (gen_check_stack (last_address)); |
---|
1231 | return; |
---|
1232 | } |
---|
1233 | #endif |
---|
1234 | |
---|
1235 | /* If we have to generate explicit probes, see if we have a constant |
---|
1236 | small number of them to generate. If so, that's the easy case. */ |
---|
1237 | if (GET_CODE (size) == CONST_INT |
---|
1238 | && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL) |
---|
1239 | { |
---|
1240 | HOST_WIDE_INT offset; |
---|
1241 | |
---|
1242 | /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL |
---|
1243 | for values of N from 1 until it exceeds LAST. If only one |
---|
1244 | probe is needed, this will not generate any code. Then probe |
---|
1245 | at LAST. */ |
---|
1246 | for (offset = first + STACK_CHECK_PROBE_INTERVAL; |
---|
1247 | offset < INTVAL (size); |
---|
1248 | offset = offset + STACK_CHECK_PROBE_INTERVAL) |
---|
1249 | emit_stack_probe (gen_rtx (STACK_GROW_OP, Pmode, |
---|
1250 | stack_pointer_rtx, GEN_INT (offset))); |
---|
1251 | |
---|
1252 | emit_stack_probe (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx, |
---|
1253 | plus_constant (size, first))); |
---|
1254 | } |
---|
1255 | |
---|
1256 | /* In the variable case, do the same as above, but in a loop. We emit loop |
---|
1257 | notes so that loop optimization can be done. */ |
---|
1258 | else |
---|
1259 | { |
---|
1260 | rtx test_addr |
---|
1261 | = force_operand (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx, |
---|
1262 | GEN_INT (first |
---|
1263 | + STACK_CHECK_PROBE_INTERVAL)), |
---|
1264 | NULL_RTX); |
---|
1265 | rtx last_addr |
---|
1266 | = force_operand (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx, |
---|
1267 | plus_constant (size, first)), |
---|
1268 | NULL_RTX); |
---|
1269 | rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL); |
---|
1270 | rtx loop_lab = gen_label_rtx (); |
---|
1271 | rtx test_lab = gen_label_rtx (); |
---|
1272 | rtx end_lab = gen_label_rtx (); |
---|
1273 | rtx temp; |
---|
1274 | |
---|
1275 | if (GET_CODE (test_addr) != REG |
---|
1276 | || REGNO (test_addr) < FIRST_PSEUDO_REGISTER) |
---|
1277 | test_addr = force_reg (Pmode, test_addr); |
---|
1278 | |
---|
1279 | emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG); |
---|
1280 | emit_jump (test_lab); |
---|
1281 | |
---|
1282 | emit_label (loop_lab); |
---|
1283 | emit_stack_probe (test_addr); |
---|
1284 | |
---|
1285 | emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT); |
---|
1286 | |
---|
1287 | #ifdef STACK_GROWS_DOWNWARD |
---|
1288 | #define CMP_OPCODE GTU |
---|
1289 | temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr, |
---|
1290 | 1, OPTAB_WIDEN); |
---|
1291 | #else |
---|
1292 | #define CMP_OPCODE LTU |
---|
1293 | temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr, |
---|
1294 | 1, OPTAB_WIDEN); |
---|
1295 | #endif |
---|
1296 | |
---|
1297 | if (temp != test_addr) |
---|
1298 | abort (); |
---|
1299 | |
---|
1300 | emit_label (test_lab); |
---|
1301 | emit_cmp_insn (test_addr, last_addr, CMP_OPCODE, NULL_RTX, Pmode, 1, 0); |
---|
1302 | emit_jump_insn ((*bcc_gen_fctn[(int) CMP_OPCODE]) (loop_lab)); |
---|
1303 | emit_jump (end_lab); |
---|
1304 | emit_note (NULL_PTR, NOTE_INSN_LOOP_END); |
---|
1305 | emit_label (end_lab); |
---|
1306 | |
---|
1307 | /* If will be doing stupid optimization, show test_addr is still live. */ |
---|
1308 | if (obey_regdecls) |
---|
1309 | emit_insn (gen_rtx (USE, VOIDmode, test_addr)); |
---|
1310 | |
---|
1311 | emit_stack_probe (last_addr); |
---|
1312 | } |
---|
1313 | } |
---|
1314 | |
---|
1315 | /* Return an rtx representing the register or memory location |
---|
1316 | in which a scalar value of data type VALTYPE |
---|
1317 | was returned by a function call to function FUNC. |
---|
1318 | FUNC is a FUNCTION_DECL node if the precise function is known, |
---|
1319 | otherwise 0. */ |
---|
1320 | |
---|
1321 | rtx |
---|
1322 | hard_function_value (valtype, func) |
---|
1323 | tree valtype; |
---|
1324 | tree func; |
---|
1325 | { |
---|
1326 | rtx val = FUNCTION_VALUE (valtype, func); |
---|
1327 | if (GET_CODE (val) == REG |
---|
1328 | && GET_MODE (val) == BLKmode) |
---|
1329 | { |
---|
1330 | int bytes = int_size_in_bytes (valtype); |
---|
1331 | enum machine_mode tmpmode; |
---|
1332 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
---|
1333 | tmpmode != MAX_MACHINE_MODE; |
---|
1334 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) |
---|
1335 | { |
---|
1336 | /* Have we found a large enough mode? */ |
---|
1337 | if (GET_MODE_SIZE (tmpmode) >= bytes) |
---|
1338 | break; |
---|
1339 | } |
---|
1340 | |
---|
1341 | /* No suitable mode found. */ |
---|
1342 | if (tmpmode == MAX_MACHINE_MODE) |
---|
1343 | abort (); |
---|
1344 | |
---|
1345 | PUT_MODE (val, tmpmode); |
---|
1346 | } |
---|
1347 | return val; |
---|
1348 | } |
---|
1349 | |
---|
1350 | /* Return an rtx representing the register or memory location |
---|
1351 | in which a scalar value of mode MODE was returned by a library call. */ |
---|
1352 | |
---|
1353 | rtx |
---|
1354 | hard_libcall_value (mode) |
---|
1355 | enum machine_mode mode; |
---|
1356 | { |
---|
1357 | return LIBCALL_VALUE (mode); |
---|
1358 | } |
---|
1359 | |
---|
1360 | /* Look up the tree code for a given rtx code |
---|
1361 | to provide the arithmetic operation for REAL_ARITHMETIC. |
---|
1362 | The function returns an int because the caller may not know |
---|
1363 | what `enum tree_code' means. */ |
---|
1364 | |
---|
1365 | int |
---|
1366 | rtx_to_tree_code (code) |
---|
1367 | enum rtx_code code; |
---|
1368 | { |
---|
1369 | enum tree_code tcode; |
---|
1370 | |
---|
1371 | switch (code) |
---|
1372 | { |
---|
1373 | case PLUS: |
---|
1374 | tcode = PLUS_EXPR; |
---|
1375 | break; |
---|
1376 | case MINUS: |
---|
1377 | tcode = MINUS_EXPR; |
---|
1378 | break; |
---|
1379 | case MULT: |
---|
1380 | tcode = MULT_EXPR; |
---|
1381 | break; |
---|
1382 | case DIV: |
---|
1383 | tcode = RDIV_EXPR; |
---|
1384 | break; |
---|
1385 | case SMIN: |
---|
1386 | tcode = MIN_EXPR; |
---|
1387 | break; |
---|
1388 | case SMAX: |
---|
1389 | tcode = MAX_EXPR; |
---|
1390 | break; |
---|
1391 | default: |
---|
1392 | tcode = LAST_AND_UNUSED_TREE_CODE; |
---|
1393 | break; |
---|
1394 | } |
---|
1395 | return ((int) tcode); |
---|
1396 | } |
---|