]> gcc.gnu.org Git - gcc.git/blob - gcc/dojump.c
re PR middle-end/17746 (ICE when building the Ada RTS)
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
43
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47 void
48 init_pending_stack_adjust (void)
49 {
50 pending_stack_adjust = 0;
51 }
52
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
57 {
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
60 }
61
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
64
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
67
68 void
69 clear_pending_stack_adjust (void)
70 {
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
76 }
77
78 /* Pop any previously-pushed arguments that have not been popped yet. */
79
80 void
81 do_pending_stack_adjust (void)
82 {
83 if (inhibit_defer_pop == 0)
84 {
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
88 }
89 }
90 \f
91 /* Expand conditional expressions. */
92
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
96
97 void
98 jumpifnot (tree exp, rtx label)
99 {
100 do_jump (exp, label, NULL_RTX);
101 }
102
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
104
105 void
106 jumpif (tree exp, rtx label)
107 {
108 do_jump (exp, NULL_RTX, label);
109 }
110
111 /* Used internally by prefer_and_bit_test. */
112
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
116
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
120
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
123 {
124 if (and_test == 0)
125 {
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
132 }
133 else
134 {
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
140 }
141
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
145
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
148 }
149
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
154
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
158
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
161 {
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
167
168 switch (code)
169 {
170 case ERROR_MARK:
171 break;
172
173 case INTEGER_CST:
174 temp = integer_zerop (exp) ? if_false_label : if_true_label;
175 if (temp)
176 emit_jump (temp);
177 break;
178
179 #if 0
180 /* This is not true with #pragma weak */
181 case ADDR_EXPR:
182 /* The address of something can never be zero. */
183 if (if_true_label)
184 emit_jump (if_true_label);
185 break;
186 #endif
187
188 case NOP_EXPR:
189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
190 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
193 goto normal;
194 case CONVERT_EXPR:
195 /* If we are narrowing the operand, we have to do the compare in the
196 narrower mode. */
197 if ((TYPE_PRECISION (TREE_TYPE (exp))
198 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
199 goto normal;
200 case NON_LVALUE_EXPR:
201 case ABS_EXPR:
202 case NEGATE_EXPR:
203 case LROTATE_EXPR:
204 case RROTATE_EXPR:
205 /* These cannot change zero->nonzero or vice versa. */
206 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
207 break;
208
209 case MINUS_EXPR:
210 /* Nonzero iff operands of minus differ. */
211 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
212 TREE_OPERAND (exp, 0),
213 TREE_OPERAND (exp, 1)),
214 NE, NE, if_false_label, if_true_label);
215 break;
216
217 case BIT_AND_EXPR:
218 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
219 See if the former is preferred for jump tests and restore it
220 if so. */
221 if (integer_onep (TREE_OPERAND (exp, 1)))
222 {
223 tree exp0 = TREE_OPERAND (exp, 0);
224 rtx set_label, clr_label;
225
226 /* Strip narrowing integral type conversions. */
227 while ((TREE_CODE (exp0) == NOP_EXPR
228 || TREE_CODE (exp0) == CONVERT_EXPR
229 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
230 && TREE_OPERAND (exp0, 0) != error_mark_node
231 && TYPE_PRECISION (TREE_TYPE (exp0))
232 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
233 exp0 = TREE_OPERAND (exp0, 0);
234
235 /* "exp0 ^ 1" inverts the sense of the single bit test. */
236 if (TREE_CODE (exp0) == BIT_XOR_EXPR
237 && integer_onep (TREE_OPERAND (exp0, 1)))
238 {
239 exp0 = TREE_OPERAND (exp0, 0);
240 clr_label = if_true_label;
241 set_label = if_false_label;
242 }
243 else
244 {
245 clr_label = if_false_label;
246 set_label = if_true_label;
247 }
248
249 if (TREE_CODE (exp0) == RSHIFT_EXPR)
250 {
251 tree arg = TREE_OPERAND (exp0, 0);
252 tree shift = TREE_OPERAND (exp0, 1);
253 tree argtype = TREE_TYPE (arg);
254 if (TREE_CODE (shift) == INTEGER_CST
255 && compare_tree_int (shift, 0) >= 0
256 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
257 && prefer_and_bit_test (TYPE_MODE (argtype),
258 TREE_INT_CST_LOW (shift)))
259 {
260 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
261 << TREE_INT_CST_LOW (shift);
262 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
263 build_int_cst_type (argtype, mask)),
264 clr_label, set_label);
265 break;
266 }
267 }
268 }
269
270 /* If we are AND'ing with a small constant, do this comparison in the
271 smallest type that fits. If the machine doesn't have comparisons
272 that small, it will be converted back to the wider comparison.
273 This helps if we are testing the sign bit of a narrower object.
274 combine can't do this for us because it can't know whether a
275 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
276
277 if (! SLOW_BYTE_ACCESS
278 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
279 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
280 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
281 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
282 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
283 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
284 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
285 != CODE_FOR_nothing))
286 {
287 do_jump (convert (type, exp), if_false_label, if_true_label);
288 break;
289 }
290 goto normal;
291
292 case TRUTH_NOT_EXPR:
293 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
294 break;
295
296 case TRUTH_ANDIF_EXPR:
297 case TRUTH_ORIF_EXPR:
298 case COMPOUND_EXPR:
299 case COND_EXPR:
300 /* Lowered by gimplify.c. */
301 gcc_unreachable ();
302
303 case COMPONENT_REF:
304 case BIT_FIELD_REF:
305 case ARRAY_REF:
306 case ARRAY_RANGE_REF:
307 {
308 HOST_WIDE_INT bitsize, bitpos;
309 int unsignedp;
310 enum machine_mode mode;
311 tree type;
312 tree offset;
313 int volatilep = 0;
314
315 /* Get description of this reference. We don't actually care
316 about the underlying object here. */
317 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
318 &unsignedp, &volatilep, false);
319
320 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
321 if (! SLOW_BYTE_ACCESS
322 && type != 0 && bitsize >= 0
323 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
324 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
325 != CODE_FOR_nothing))
326 {
327 do_jump (convert (type, exp), if_false_label, if_true_label);
328 break;
329 }
330 goto normal;
331 }
332
333 case EQ_EXPR:
334 {
335 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
336
337 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
338 != MODE_COMPLEX_FLOAT);
339 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
340 != MODE_COMPLEX_INT);
341
342 if (integer_zerop (TREE_OPERAND (exp, 1)))
343 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
344 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
345 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
346 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
347 else
348 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
349 break;
350 }
351
352 case NE_EXPR:
353 {
354 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
355
356 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
357 != MODE_COMPLEX_FLOAT);
358 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
359 != MODE_COMPLEX_INT);
360
361 if (integer_zerop (TREE_OPERAND (exp, 1)))
362 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
363 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
364 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
365 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
366 else
367 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
368 break;
369 }
370
371 case LT_EXPR:
372 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
373 if (GET_MODE_CLASS (mode) == MODE_INT
374 && ! can_compare_p (LT, mode, ccp_jump))
375 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
376 else
377 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
378 break;
379
380 case LE_EXPR:
381 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
382 if (GET_MODE_CLASS (mode) == MODE_INT
383 && ! can_compare_p (LE, mode, ccp_jump))
384 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
385 else
386 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
387 break;
388
389 case GT_EXPR:
390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 if (GET_MODE_CLASS (mode) == MODE_INT
392 && ! can_compare_p (GT, mode, ccp_jump))
393 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
394 else
395 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
396 break;
397
398 case GE_EXPR:
399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
400 if (GET_MODE_CLASS (mode) == MODE_INT
401 && ! can_compare_p (GE, mode, ccp_jump))
402 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
403 else
404 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
405 break;
406
407 case UNORDERED_EXPR:
408 case ORDERED_EXPR:
409 {
410 enum rtx_code cmp, rcmp;
411 int do_rev;
412
413 if (code == UNORDERED_EXPR)
414 cmp = UNORDERED, rcmp = ORDERED;
415 else
416 cmp = ORDERED, rcmp = UNORDERED;
417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
418
419 do_rev = 0;
420 if (! can_compare_p (cmp, mode, ccp_jump)
421 && (can_compare_p (rcmp, mode, ccp_jump)
422 /* If the target doesn't provide either UNORDERED or ORDERED
423 comparisons, canonicalize on UNORDERED for the library. */
424 || rcmp == UNORDERED))
425 do_rev = 1;
426
427 if (! do_rev)
428 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
429 else
430 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
431 }
432 break;
433
434 {
435 enum rtx_code rcode1;
436 enum tree_code tcode1, tcode2;
437
438 case UNLT_EXPR:
439 rcode1 = UNLT;
440 tcode1 = UNORDERED_EXPR;
441 tcode2 = LT_EXPR;
442 goto unordered_bcc;
443 case UNLE_EXPR:
444 rcode1 = UNLE;
445 tcode1 = UNORDERED_EXPR;
446 tcode2 = LE_EXPR;
447 goto unordered_bcc;
448 case UNGT_EXPR:
449 rcode1 = UNGT;
450 tcode1 = UNORDERED_EXPR;
451 tcode2 = GT_EXPR;
452 goto unordered_bcc;
453 case UNGE_EXPR:
454 rcode1 = UNGE;
455 tcode1 = UNORDERED_EXPR;
456 tcode2 = GE_EXPR;
457 goto unordered_bcc;
458 case UNEQ_EXPR:
459 rcode1 = UNEQ;
460 tcode1 = UNORDERED_EXPR;
461 tcode2 = EQ_EXPR;
462 goto unordered_bcc;
463 case LTGT_EXPR:
464 /* It is ok for LTGT_EXPR to trap when the result is unordered,
465 so expand to (a < b) || (a > b). */
466 rcode1 = LTGT;
467 tcode1 = LT_EXPR;
468 tcode2 = GT_EXPR;
469 goto unordered_bcc;
470
471 unordered_bcc:
472 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
473 if (can_compare_p (rcode1, mode, ccp_jump))
474 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
475 if_true_label);
476 else
477 {
478 tree op0 = save_expr (TREE_OPERAND (exp, 0));
479 tree op1 = save_expr (TREE_OPERAND (exp, 1));
480 tree cmp0, cmp1;
481 rtx drop_through_label = 0;
482
483 /* If the target doesn't support combined unordered
484 compares, decompose into two comparisons. */
485 if (if_true_label == 0)
486 drop_through_label = if_true_label = gen_label_rtx ();
487
488 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
489 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
490 do_jump (cmp0, 0, if_true_label);
491 do_jump (cmp1, if_false_label, if_true_label);
492
493 if (drop_through_label)
494 {
495 do_pending_stack_adjust ();
496 emit_label (drop_through_label);
497 }
498 }
499 }
500 break;
501
502 /* Special case:
503 __builtin_expect (<test>, 0) and
504 __builtin_expect (<test>, 1)
505
506 We need to do this here, so that <test> is not converted to a SCC
507 operation on machines that use condition code registers and COMPARE
508 like the PowerPC, and then the jump is done based on whether the SCC
509 operation produced a 1 or 0. */
510 case CALL_EXPR:
511 /* Check for a built-in function. */
512 {
513 tree fndecl = get_callee_fndecl (exp);
514 tree arglist = TREE_OPERAND (exp, 1);
515
516 if (fndecl
517 && DECL_BUILT_IN (fndecl)
518 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
519 && arglist != NULL_TREE
520 && TREE_CHAIN (arglist) != NULL_TREE)
521 {
522 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
523 if_true_label);
524
525 if (seq != NULL_RTX)
526 {
527 emit_insn (seq);
528 return;
529 }
530 }
531 }
532 /* Fall through and generate the normal code. */
533
534 default:
535 normal:
536 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
537 do_pending_stack_adjust ();
538
539 if (GET_CODE (temp) == CONST_INT
540 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
541 || GET_CODE (temp) == LABEL_REF)
542 {
543 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
544 if (target)
545 emit_jump (target);
546 }
547 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
548 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
549 /* Note swapping the labels gives us not-equal. */
550 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
551 else
552 {
553 gcc_assert (GET_MODE (temp) != VOIDmode);
554
555 /* The RTL optimizers prefer comparisons against pseudos. */
556 if (GET_CODE (temp) == SUBREG)
557 {
558 /* Compare promoted variables in their promoted mode. */
559 if (SUBREG_PROMOTED_VAR_P (temp)
560 && REG_P (XEXP (temp, 0)))
561 temp = XEXP (temp, 0);
562 else
563 temp = copy_to_reg (temp);
564 }
565 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
566 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
567 GET_MODE (temp), NULL_RTX,
568 if_false_label, if_true_label);
569 }
570 }
571 }
572 \f
573 /* Given a comparison expression EXP for values too wide to be compared
574 with one insn, test the comparison and jump to the appropriate label.
575 The code of EXP is ignored; we always test GT if SWAP is 0,
576 and LT if SWAP is 1. */
577
578 static void
579 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
580 rtx if_true_label)
581 {
582 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
583 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
584 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
585 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
586
587 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
588 if_true_label);
589 }
590
591 /* Compare OP0 with OP1, word at a time, in mode MODE.
592 UNSIGNEDP says to do unsigned comparison.
593 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
594
595 void
596 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
597 rtx op1, rtx if_false_label, rtx if_true_label)
598 {
599 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
600 rtx drop_through_label = 0;
601 int i;
602
603 if (! if_true_label || ! if_false_label)
604 drop_through_label = gen_label_rtx ();
605 if (! if_true_label)
606 if_true_label = drop_through_label;
607 if (! if_false_label)
608 if_false_label = drop_through_label;
609
610 /* Compare a word at a time, high order first. */
611 for (i = 0; i < nwords; i++)
612 {
613 rtx op0_word, op1_word;
614
615 if (WORDS_BIG_ENDIAN)
616 {
617 op0_word = operand_subword_force (op0, i, mode);
618 op1_word = operand_subword_force (op1, i, mode);
619 }
620 else
621 {
622 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
623 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
624 }
625
626 /* All but high-order word must be compared as unsigned. */
627 do_compare_rtx_and_jump (op0_word, op1_word, GT,
628 (unsignedp || i > 0), word_mode, NULL_RTX,
629 NULL_RTX, if_true_label);
630
631 /* Consider lower words only if these are equal. */
632 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
633 NULL_RTX, NULL_RTX, if_false_label);
634 }
635
636 if (if_false_label)
637 emit_jump (if_false_label);
638 if (drop_through_label)
639 emit_label (drop_through_label);
640 }
641
642 /* Given an EQ_EXPR expression EXP for values too wide to be compared
643 with one insn, test the comparison and jump to the appropriate label. */
644
645 static void
646 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
647 {
648 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
649 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
650 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
651 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
652 int i;
653 rtx drop_through_label = 0;
654
655 if (! if_false_label)
656 drop_through_label = if_false_label = gen_label_rtx ();
657
658 for (i = 0; i < nwords; i++)
659 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
660 operand_subword_force (op1, i, mode),
661 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
662 word_mode, NULL_RTX, if_false_label, NULL_RTX);
663
664 if (if_true_label)
665 emit_jump (if_true_label);
666 if (drop_through_label)
667 emit_label (drop_through_label);
668 }
669 \f
670 /* Jump according to whether OP0 is 0.
671 We assume that OP0 has an integer mode that is too wide
672 for the available compare insns. */
673
674 void
675 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
676 {
677 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
678 rtx part;
679 int i;
680 rtx drop_through_label = 0;
681
682 /* The fastest way of doing this comparison on almost any machine is to
683 "or" all the words and compare the result. If all have to be loaded
684 from memory and this is a very wide item, it's possible this may
685 be slower, but that's highly unlikely. */
686
687 part = gen_reg_rtx (word_mode);
688 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
689 for (i = 1; i < nwords && part != 0; i++)
690 part = expand_binop (word_mode, ior_optab, part,
691 operand_subword_force (op0, i, GET_MODE (op0)),
692 part, 1, OPTAB_WIDEN);
693
694 if (part != 0)
695 {
696 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
697 NULL_RTX, if_false_label, if_true_label);
698
699 return;
700 }
701
702 /* If we couldn't do the "or" simply, do this with a series of compares. */
703 if (! if_false_label)
704 drop_through_label = if_false_label = gen_label_rtx ();
705
706 for (i = 0; i < nwords; i++)
707 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
708 const0_rtx, EQ, 1, word_mode, NULL_RTX,
709 if_false_label, NULL_RTX);
710
711 if (if_true_label)
712 emit_jump (if_true_label);
713
714 if (drop_through_label)
715 emit_label (drop_through_label);
716 }
717 \f
718 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
719 MODE is the machine mode of the comparison, not of the result.
720 (including code to compute the values to be compared) and set CC0
721 according to the result. The decision as to signed or unsigned
722 comparison must be made by the caller.
723
724 We force a stack adjustment unless there are currently
725 things pushed on the stack that aren't yet used.
726
727 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
728 compared. */
729
730 rtx
731 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
732 enum machine_mode mode, rtx size)
733 {
734 rtx tem;
735
736 /* If one operand is constant, make it the second one. Only do this
737 if the other operand is not constant as well. */
738
739 if (swap_commutative_operands_p (op0, op1))
740 {
741 tem = op0;
742 op0 = op1;
743 op1 = tem;
744 code = swap_condition (code);
745 }
746
747 if (flag_force_mem)
748 {
749 op0 = force_not_mem (op0);
750 op1 = force_not_mem (op1);
751 }
752
753 do_pending_stack_adjust ();
754
755 code = unsignedp ? unsigned_condition (code) : code;
756 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
757 if (tem)
758 {
759 if (CONSTANT_P (tem))
760 return tem;
761
762 if (COMPARISON_P (tem))
763 {
764 code = GET_CODE (tem);
765 op0 = XEXP (tem, 0);
766 op1 = XEXP (tem, 1);
767 mode = GET_MODE (op0);
768 unsignedp = (code == GTU || code == LTU
769 || code == GEU || code == LEU);
770 }
771 }
772
773 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
774
775 #if HAVE_cc0
776 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
777 #else
778 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
779 #endif
780 }
781
782 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
783 The decision as to signed or unsigned comparison must be made by the caller.
784
785 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
786 compared. */
787
788 void
789 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
790 enum machine_mode mode, rtx size, rtx if_false_label,
791 rtx if_true_label)
792 {
793 rtx tem;
794 int dummy_true_label = 0;
795
796 /* Reverse the comparison if that is safe and we want to jump if it is
797 false. */
798 if (! if_true_label && ! FLOAT_MODE_P (mode))
799 {
800 if_true_label = if_false_label;
801 if_false_label = 0;
802 code = reverse_condition (code);
803 }
804
805 /* If one operand is constant, make it the second one. Only do this
806 if the other operand is not constant as well. */
807
808 if (swap_commutative_operands_p (op0, op1))
809 {
810 tem = op0;
811 op0 = op1;
812 op1 = tem;
813 code = swap_condition (code);
814 }
815
816 if (flag_force_mem)
817 {
818 op0 = force_not_mem (op0);
819 op1 = force_not_mem (op1);
820 }
821
822 do_pending_stack_adjust ();
823
824 code = unsignedp ? unsigned_condition (code) : code;
825 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
826 op0, op1)))
827 {
828 if (CONSTANT_P (tem))
829 {
830 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
831 ? if_false_label : if_true_label;
832 if (label)
833 emit_jump (label);
834 return;
835 }
836
837 code = GET_CODE (tem);
838 mode = GET_MODE (tem);
839 op0 = XEXP (tem, 0);
840 op1 = XEXP (tem, 1);
841 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
842 }
843
844 if (! if_true_label)
845 {
846 dummy_true_label = 1;
847 if_true_label = gen_label_rtx ();
848 }
849
850 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
851 if_true_label);
852
853 if (if_false_label)
854 emit_jump (if_false_label);
855 if (dummy_true_label)
856 emit_label (if_true_label);
857 }
858
859 /* Generate code for a comparison expression EXP (including code to compute
860 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
861 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
862 generated code will drop through.
863 SIGNED_CODE should be the rtx operation for this comparison for
864 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
865
866 We force a stack adjustment unless there are currently
867 things pushed on the stack that aren't yet used. */
868
869 static void
870 do_compare_and_jump (tree exp, enum rtx_code signed_code,
871 enum rtx_code unsigned_code, rtx if_false_label,
872 rtx if_true_label)
873 {
874 rtx op0, op1;
875 tree type;
876 enum machine_mode mode;
877 int unsignedp;
878 enum rtx_code code;
879
880 /* Don't crash if the comparison was erroneous. */
881 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
883 return;
884
885 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
886 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
887 return;
888
889 type = TREE_TYPE (TREE_OPERAND (exp, 0));
890 mode = TYPE_MODE (type);
891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
892 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
893 || (GET_MODE_BITSIZE (mode)
894 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
895 1)))))))
896 {
897 /* op0 might have been replaced by promoted constant, in which
898 case the type of second argument should be used. */
899 type = TREE_TYPE (TREE_OPERAND (exp, 1));
900 mode = TYPE_MODE (type);
901 }
902 unsignedp = TYPE_UNSIGNED (type);
903 code = unsignedp ? unsigned_code : signed_code;
904
905 #ifdef HAVE_canonicalize_funcptr_for_compare
906 /* If function pointers need to be "canonicalized" before they can
907 be reliably compared, then canonicalize them.
908 Only do this if *both* sides of the comparison are function pointers.
909 If one side isn't, we want a noncanonicalized comparison. See PR
910 middle-end/17564. */
911 if (HAVE_canonicalize_funcptr_for_compare
912 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
913 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
914 == FUNCTION_TYPE
915 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
916 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
917 == FUNCTION_TYPE)
918 {
919 rtx new_op0 = gen_reg_rtx (mode);
920 rtx new_op1 = gen_reg_rtx (mode);
921
922 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
923 op0 = new_op0;
924
925 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
926 op1 = new_op1;
927 }
928 #endif
929
930 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
931 ((mode == BLKmode)
932 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
933 if_false_label, if_true_label);
934 }
935
936 #include "gt-dojump.h"
This page took 0.081947 seconds and 5 git commands to generate.