]>
Commit | Line | Data |
---|---|---|
bbf6f052 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
8752c357 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
d9221e01 | 3 | 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
bbf6f052 | 4 | |
1322177d | 5 | This file is part of GCC. |
bbf6f052 | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
bbf6f052 | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
bbf6f052 RK |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
bbf6f052 | 21 | |
bbf6f052 | 22 | #include "config.h" |
670ee920 | 23 | #include "system.h" |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
ca695ac9 | 26 | #include "machmode.h" |
11ad4784 | 27 | #include "real.h" |
bbf6f052 RK |
28 | #include "rtl.h" |
29 | #include "tree.h" | |
30 | #include "flags.h" | |
bf76bb5a | 31 | #include "regs.h" |
4ed67205 | 32 | #include "hard-reg-set.h" |
3d195391 | 33 | #include "except.h" |
bbf6f052 | 34 | #include "function.h" |
bbf6f052 | 35 | #include "insn-config.h" |
34e81b5a | 36 | #include "insn-attr.h" |
3a94c984 | 37 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 38 | #include "expr.h" |
e78d8e51 ZW |
39 | #include "optabs.h" |
40 | #include "libfuncs.h" | |
bbf6f052 | 41 | #include "recog.h" |
3ef1eef4 | 42 | #include "reload.h" |
bbf6f052 | 43 | #include "output.h" |
bbf6f052 | 44 | #include "typeclass.h" |
10f0ad3d | 45 | #include "toplev.h" |
d7db6646 | 46 | #include "ggc.h" |
ac79cd5a | 47 | #include "langhooks.h" |
e2c49ac2 | 48 | #include "intl.h" |
b1474bb7 | 49 | #include "tm_p.h" |
6de9cd9a | 50 | #include "tree-iterator.h" |
2f8e398b PB |
51 | #include "tree-pass.h" |
52 | #include "tree-flow.h" | |
c988af2b | 53 | #include "target.h" |
2f8e398b | 54 | #include "timevar.h" |
bbf6f052 | 55 | |
bbf6f052 | 56 | /* Decide whether a function's arguments should be processed |
bbc8a071 RK |
57 | from first to last or from last to first. |
58 | ||
59 | They should if the stack and args grow in opposite directions, but | |
60 | only if we have push insns. */ | |
bbf6f052 | 61 | |
bbf6f052 | 62 | #ifdef PUSH_ROUNDING |
bbc8a071 | 63 | |
2da4124d | 64 | #ifndef PUSH_ARGS_REVERSED |
3319a347 | 65 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
3a94c984 | 66 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
bbf6f052 | 67 | #endif |
2da4124d | 68 | #endif |
bbc8a071 | 69 | |
bbf6f052 RK |
70 | #endif |
71 | ||
72 | #ifndef STACK_PUSH_CODE | |
73 | #ifdef STACK_GROWS_DOWNWARD | |
74 | #define STACK_PUSH_CODE PRE_DEC | |
75 | #else | |
76 | #define STACK_PUSH_CODE PRE_INC | |
77 | #endif | |
78 | #endif | |
79 | ||
4ca79136 | 80 | |
bbf6f052 RK |
81 | /* If this is nonzero, we do not bother generating VOLATILE |
82 | around volatile memory references, and we are willing to | |
83 | output indirect addresses. If cse is to follow, we reject | |
84 | indirect addresses so a useful potential cse is generated; | |
85 | if it is used only once, instruction combination will produce | |
86 | the same indirect address eventually. */ | |
87 | int cse_not_expected; | |
88 | ||
4969d05d RK |
89 | /* This structure is used by move_by_pieces to describe the move to |
90 | be performed. */ | |
4969d05d RK |
91 | struct move_by_pieces |
92 | { | |
93 | rtx to; | |
94 | rtx to_addr; | |
95 | int autinc_to; | |
96 | int explicit_inc_to; | |
97 | rtx from; | |
98 | rtx from_addr; | |
99 | int autinc_from; | |
100 | int explicit_inc_from; | |
3bdf5ad1 RK |
101 | unsigned HOST_WIDE_INT len; |
102 | HOST_WIDE_INT offset; | |
4969d05d RK |
103 | int reverse; |
104 | }; | |
105 | ||
57814e5e | 106 | /* This structure is used by store_by_pieces to describe the clear to |
9de08200 RK |
107 | be performed. */ |
108 | ||
57814e5e | 109 | struct store_by_pieces |
9de08200 RK |
110 | { |
111 | rtx to; | |
112 | rtx to_addr; | |
113 | int autinc_to; | |
114 | int explicit_inc_to; | |
3bdf5ad1 RK |
115 | unsigned HOST_WIDE_INT len; |
116 | HOST_WIDE_INT offset; | |
502b8322 | 117 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode); |
fad205ff | 118 | void *constfundata; |
9de08200 RK |
119 | int reverse; |
120 | }; | |
121 | ||
502b8322 AJ |
122 | static rtx enqueue_insn (rtx, rtx); |
123 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, | |
124 | unsigned int); | |
125 | static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, | |
126 | struct move_by_pieces *); | |
127 | static bool block_move_libcall_safe_for_call_parm (void); | |
128 | static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned); | |
129 | static rtx emit_block_move_via_libcall (rtx, rtx, rtx); | |
130 | static tree emit_block_move_libcall_fn (int); | |
131 | static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); | |
132 | static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); | |
133 | static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); | |
134 | static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); | |
135 | static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, | |
136 | struct store_by_pieces *); | |
137 | static bool clear_storage_via_clrstr (rtx, rtx, unsigned); | |
138 | static rtx clear_storage_via_libcall (rtx, rtx); | |
139 | static tree clear_storage_libcall_fn (int); | |
140 | static rtx compress_float_constant (rtx, rtx); | |
141 | static rtx get_subtarget (rtx); | |
502b8322 AJ |
142 | static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, |
143 | HOST_WIDE_INT, enum machine_mode, | |
144 | tree, tree, int, int); | |
145 | static void store_constructor (tree, rtx, int, HOST_WIDE_INT); | |
146 | static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, | |
147 | tree, enum machine_mode, int, tree, int); | |
148 | static rtx var_rtx (tree); | |
149 | ||
150 | static unsigned HOST_WIDE_INT highest_pow2_factor (tree); | |
d50a16c4 | 151 | static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree); |
502b8322 AJ |
152 | |
153 | static int is_aligning_offset (tree, tree); | |
154 | static rtx expand_increment (tree, int, int); | |
eb698c58 RS |
155 | static void expand_operands (tree, tree, rtx, rtx*, rtx*, |
156 | enum expand_modifier); | |
502b8322 | 157 | static rtx do_store_flag (tree, rtx, enum machine_mode, int); |
21d93687 | 158 | #ifdef PUSH_ROUNDING |
502b8322 | 159 | static void emit_single_push_insn (enum machine_mode, rtx, tree); |
21d93687 | 160 | #endif |
502b8322 AJ |
161 | static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx); |
162 | static rtx const_vector_from_tree (tree); | |
bbf6f052 | 163 | |
4fa52007 RK |
164 | /* Record for each mode whether we can move a register directly to or |
165 | from an object of that mode in memory. If we can't, we won't try | |
166 | to use that mode directly when accessing a field of that mode. */ | |
167 | ||
168 | static char direct_load[NUM_MACHINE_MODES]; | |
169 | static char direct_store[NUM_MACHINE_MODES]; | |
170 | ||
51286de6 RH |
171 | /* Record for each mode whether we can float-extend from memory. */ |
172 | ||
173 | static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; | |
174 | ||
fbe1758d | 175 | /* This macro is used to determine whether move_by_pieces should be called |
3a94c984 | 176 | to perform a structure copy. */ |
fbe1758d | 177 | #ifndef MOVE_BY_PIECES_P |
19caa751 | 178 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
8752c357 | 179 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO) |
fbe1758d AM |
180 | #endif |
181 | ||
78762e3b RS |
182 | /* This macro is used to determine whether clear_by_pieces should be |
183 | called to clear storage. */ | |
184 | #ifndef CLEAR_BY_PIECES_P | |
185 | #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ | |
186 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO) | |
187 | #endif | |
188 | ||
4977bab6 ZW |
189 | /* This macro is used to determine whether store_by_pieces should be |
190 | called to "memset" storage with byte values other than zero, or | |
191 | to "memcpy" storage when the source is a constant string. */ | |
192 | #ifndef STORE_BY_PIECES_P | |
193 | #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN) | |
194 | #endif | |
195 | ||
266007a7 | 196 | /* This array records the insn_code of insns to perform block moves. */ |
e6677db3 | 197 | enum insn_code movstr_optab[NUM_MACHINE_MODES]; |
266007a7 | 198 | |
9de08200 RK |
199 | /* This array records the insn_code of insns to perform block clears. */ |
200 | enum insn_code clrstr_optab[NUM_MACHINE_MODES]; | |
201 | ||
118355a0 ZW |
202 | /* These arrays record the insn_code of two different kinds of insns |
203 | to perform block compares. */ | |
204 | enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; | |
205 | enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; | |
206 | ||
cc2902df | 207 | /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ |
e87b4f3f RS |
208 | |
209 | #ifndef SLOW_UNALIGNED_ACCESS | |
e1565e65 | 210 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
e87b4f3f | 211 | #endif |
bbf6f052 | 212 | \f |
4fa52007 | 213 | /* This is run once per compilation to set up which modes can be used |
266007a7 | 214 | directly in memory and to initialize the block move optab. */ |
4fa52007 RK |
215 | |
216 | void | |
502b8322 | 217 | init_expr_once (void) |
4fa52007 RK |
218 | { |
219 | rtx insn, pat; | |
220 | enum machine_mode mode; | |
cff48d8f | 221 | int num_clobbers; |
9ec36da5 | 222 | rtx mem, mem1; |
bf1660a6 | 223 | rtx reg; |
9ec36da5 | 224 | |
e2549997 RS |
225 | /* Try indexing by frame ptr and try by stack ptr. |
226 | It is known that on the Convex the stack ptr isn't a valid index. | |
227 | With luck, one or the other is valid on any machine. */ | |
9ec36da5 JL |
228 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
229 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
4fa52007 | 230 | |
bf1660a6 JL |
231 | /* A scratch register we can modify in-place below to avoid |
232 | useless RTL allocations. */ | |
233 | reg = gen_rtx_REG (VOIDmode, -1); | |
234 | ||
1f8c3c5b RH |
235 | insn = rtx_alloc (INSN); |
236 | pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); | |
237 | PATTERN (insn) = pat; | |
4fa52007 RK |
238 | |
239 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
240 | mode = (enum machine_mode) ((int) mode + 1)) | |
241 | { | |
242 | int regno; | |
4fa52007 RK |
243 | |
244 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
245 | PUT_MODE (mem, mode); | |
e2549997 | 246 | PUT_MODE (mem1, mode); |
bf1660a6 | 247 | PUT_MODE (reg, mode); |
4fa52007 | 248 | |
e6fe56a4 RK |
249 | /* See if there is some register that can be used in this mode and |
250 | directly loaded or stored from memory. */ | |
251 | ||
7308a047 RS |
252 | if (mode != VOIDmode && mode != BLKmode) |
253 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
254 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
255 | regno++) | |
256 | { | |
257 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
258 | continue; | |
e6fe56a4 | 259 | |
bf1660a6 | 260 | REGNO (reg) = regno; |
e6fe56a4 | 261 | |
7308a047 RS |
262 | SET_SRC (pat) = mem; |
263 | SET_DEST (pat) = reg; | |
264 | if (recog (pat, insn, &num_clobbers) >= 0) | |
265 | direct_load[(int) mode] = 1; | |
e6fe56a4 | 266 | |
e2549997 RS |
267 | SET_SRC (pat) = mem1; |
268 | SET_DEST (pat) = reg; | |
269 | if (recog (pat, insn, &num_clobbers) >= 0) | |
270 | direct_load[(int) mode] = 1; | |
271 | ||
7308a047 RS |
272 | SET_SRC (pat) = reg; |
273 | SET_DEST (pat) = mem; | |
274 | if (recog (pat, insn, &num_clobbers) >= 0) | |
275 | direct_store[(int) mode] = 1; | |
e2549997 RS |
276 | |
277 | SET_SRC (pat) = reg; | |
278 | SET_DEST (pat) = mem1; | |
279 | if (recog (pat, insn, &num_clobbers) >= 0) | |
280 | direct_store[(int) mode] = 1; | |
7308a047 | 281 | } |
4fa52007 RK |
282 | } |
283 | ||
51286de6 RH |
284 | mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); |
285 | ||
286 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; | |
287 | mode = GET_MODE_WIDER_MODE (mode)) | |
288 | { | |
289 | enum machine_mode srcmode; | |
290 | for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; | |
0fb7aeda | 291 | srcmode = GET_MODE_WIDER_MODE (srcmode)) |
51286de6 RH |
292 | { |
293 | enum insn_code ic; | |
294 | ||
295 | ic = can_extend_p (mode, srcmode, 0); | |
296 | if (ic == CODE_FOR_nothing) | |
297 | continue; | |
298 | ||
299 | PUT_MODE (mem, srcmode); | |
0fb7aeda | 300 | |
51286de6 RH |
301 | if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) |
302 | float_extend_from_mem[mode][srcmode] = true; | |
303 | } | |
304 | } | |
4fa52007 | 305 | } |
cff48d8f | 306 | |
bbf6f052 RK |
307 | /* This is run at the start of compiling a function. */ |
308 | ||
309 | void | |
502b8322 | 310 | init_expr (void) |
bbf6f052 | 311 | { |
3a70d621 | 312 | cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status)); |
bbf6f052 RK |
313 | } |
314 | ||
49ad7cfa | 315 | /* Small sanity check that the queue is empty at the end of a function. */ |
296b4ed9 | 316 | |
bbf6f052 | 317 | void |
502b8322 | 318 | finish_expr_for_function (void) |
bbf6f052 | 319 | { |
49ad7cfa BS |
320 | if (pending_chain) |
321 | abort (); | |
bbf6f052 RK |
322 | } |
323 | \f | |
324 | /* Manage the queue of increment instructions to be output | |
325 | for POSTINCREMENT_EXPR expressions, etc. */ | |
326 | ||
bbf6f052 RK |
327 | /* Queue up to increment (or change) VAR later. BODY says how: |
328 | BODY should be the same thing you would pass to emit_insn | |
329 | to increment right away. It will go to emit_insn later on. | |
330 | ||
331 | The value is a QUEUED expression to be used in place of VAR | |
332 | where you want to guarantee the pre-incrementation value of VAR. */ | |
333 | ||
334 | static rtx | |
502b8322 | 335 | enqueue_insn (rtx var, rtx body) |
bbf6f052 | 336 | { |
c5c76735 JL |
337 | pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX, |
338 | body, pending_chain); | |
bbf6f052 RK |
339 | return pending_chain; |
340 | } | |
341 | ||
342 | /* Use protect_from_queue to convert a QUEUED expression | |
343 | into something that you can put immediately into an instruction. | |
344 | If the queued incrementation has not happened yet, | |
345 | protect_from_queue returns the variable itself. | |
346 | If the incrementation has happened, protect_from_queue returns a temp | |
347 | that contains a copy of the old value of the variable. | |
348 | ||
349 | Any time an rtx which might possibly be a QUEUED is to be put | |
350 | into an instruction, it must be passed through protect_from_queue first. | |
351 | QUEUED expressions are not meaningful in instructions. | |
352 | ||
353 | Do not pass a value through protect_from_queue and then hold | |
354 | on to it for a while before putting it in an instruction! | |
355 | If the queue is flushed in between, incorrect code will result. */ | |
356 | ||
357 | rtx | |
502b8322 | 358 | protect_from_queue (rtx x, int modify) |
bbf6f052 | 359 | { |
b3694847 | 360 | RTX_CODE code = GET_CODE (x); |
bbf6f052 RK |
361 | |
362 | #if 0 /* A QUEUED can hang around after the queue is forced out. */ | |
363 | /* Shortcut for most common case. */ | |
364 | if (pending_chain == 0) | |
365 | return x; | |
366 | #endif | |
367 | ||
368 | if (code != QUEUED) | |
369 | { | |
e9baa644 RK |
370 | /* A special hack for read access to (MEM (QUEUED ...)) to facilitate |
371 | use of autoincrement. Make a copy of the contents of the memory | |
372 | location rather than a copy of the address, but not if the value is | |
373 | of mode BLKmode. Don't modify X in place since it might be | |
374 | shared. */ | |
bbf6f052 RK |
375 | if (code == MEM && GET_MODE (x) != BLKmode |
376 | && GET_CODE (XEXP (x, 0)) == QUEUED && !modify) | |
377 | { | |
f1ec5147 RK |
378 | rtx y = XEXP (x, 0); |
379 | rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y)); | |
e9baa644 | 380 | |
bbf6f052 RK |
381 | if (QUEUED_INSN (y)) |
382 | { | |
f1ec5147 RK |
383 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
384 | ||
e9baa644 | 385 | emit_insn_before (gen_move_insn (temp, new), |
bbf6f052 RK |
386 | QUEUED_INSN (y)); |
387 | return temp; | |
388 | } | |
f1ec5147 | 389 | |
73b7f58c BS |
390 | /* Copy the address into a pseudo, so that the returned value |
391 | remains correct across calls to emit_queue. */ | |
f1ec5147 | 392 | return replace_equiv_address (new, copy_to_reg (XEXP (new, 0))); |
bbf6f052 | 393 | } |
f1ec5147 | 394 | |
bbf6f052 RK |
395 | /* Otherwise, recursively protect the subexpressions of all |
396 | the kinds of rtx's that can contain a QUEUED. */ | |
397 | if (code == MEM) | |
3f15938e RS |
398 | { |
399 | rtx tem = protect_from_queue (XEXP (x, 0), 0); | |
400 | if (tem != XEXP (x, 0)) | |
401 | { | |
402 | x = copy_rtx (x); | |
403 | XEXP (x, 0) = tem; | |
404 | } | |
405 | } | |
bbf6f052 RK |
406 | else if (code == PLUS || code == MULT) |
407 | { | |
3f15938e RS |
408 | rtx new0 = protect_from_queue (XEXP (x, 0), 0); |
409 | rtx new1 = protect_from_queue (XEXP (x, 1), 0); | |
410 | if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) | |
411 | { | |
412 | x = copy_rtx (x); | |
413 | XEXP (x, 0) = new0; | |
414 | XEXP (x, 1) = new1; | |
415 | } | |
bbf6f052 RK |
416 | } |
417 | return x; | |
418 | } | |
73b7f58c BS |
419 | /* If the increment has not happened, use the variable itself. Copy it |
420 | into a new pseudo so that the value remains correct across calls to | |
421 | emit_queue. */ | |
bbf6f052 | 422 | if (QUEUED_INSN (x) == 0) |
73b7f58c | 423 | return copy_to_reg (QUEUED_VAR (x)); |
bbf6f052 RK |
424 | /* If the increment has happened and a pre-increment copy exists, |
425 | use that copy. */ | |
426 | if (QUEUED_COPY (x) != 0) | |
427 | return QUEUED_COPY (x); | |
428 | /* The increment has happened but we haven't set up a pre-increment copy. | |
429 | Set one up now, and use it. */ | |
430 | QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x))); | |
431 | emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)), | |
432 | QUEUED_INSN (x)); | |
433 | return QUEUED_COPY (x); | |
434 | } | |
435 | ||
436 | /* Return nonzero if X contains a QUEUED expression: | |
437 | if it contains anything that will be altered by a queued increment. | |
438 | We handle only combinations of MEM, PLUS, MINUS and MULT operators | |
439 | since memory addresses generally contain only those. */ | |
440 | ||
1f06ee8d | 441 | int |
502b8322 | 442 | queued_subexp_p (rtx x) |
bbf6f052 | 443 | { |
b3694847 | 444 | enum rtx_code code = GET_CODE (x); |
bbf6f052 RK |
445 | switch (code) |
446 | { | |
447 | case QUEUED: | |
448 | return 1; | |
449 | case MEM: | |
450 | return queued_subexp_p (XEXP (x, 0)); | |
451 | case MULT: | |
452 | case PLUS: | |
453 | case MINUS: | |
e9a25f70 JL |
454 | return (queued_subexp_p (XEXP (x, 0)) |
455 | || queued_subexp_p (XEXP (x, 1))); | |
456 | default: | |
457 | return 0; | |
bbf6f052 | 458 | } |
bbf6f052 RK |
459 | } |
460 | ||
1bbd65cd EB |
461 | /* Retrieve a mark on the queue. */ |
462 | ||
463 | static rtx | |
464 | mark_queue (void) | |
465 | { | |
466 | return pending_chain; | |
467 | } | |
bbf6f052 | 468 | |
1bbd65cd EB |
469 | /* Perform all the pending incrementations that have been enqueued |
470 | after MARK was retrieved. If MARK is null, perform all the | |
471 | pending incrementations. */ | |
472 | ||
473 | static void | |
474 | emit_insns_enqueued_after_mark (rtx mark) | |
bbf6f052 | 475 | { |
b3694847 | 476 | rtx p; |
1bbd65cd EB |
477 | |
478 | /* The marked incrementation may have been emitted in the meantime | |
479 | through a call to emit_queue. In this case, the mark is not valid | |
480 | anymore so do nothing. */ | |
481 | if (mark && ! QUEUED_BODY (mark)) | |
482 | return; | |
483 | ||
484 | while ((p = pending_chain) != mark) | |
bbf6f052 | 485 | { |
41b083c4 R |
486 | rtx body = QUEUED_BODY (p); |
487 | ||
2f937369 DM |
488 | switch (GET_CODE (body)) |
489 | { | |
490 | case INSN: | |
491 | case JUMP_INSN: | |
492 | case CALL_INSN: | |
493 | case CODE_LABEL: | |
494 | case BARRIER: | |
495 | case NOTE: | |
496 | QUEUED_INSN (p) = body; | |
497 | emit_insn (body); | |
498 | break; | |
499 | ||
500 | #ifdef ENABLE_CHECKING | |
501 | case SEQUENCE: | |
502 | abort (); | |
503 | break; | |
504 | #endif | |
505 | ||
506 | default: | |
507 | QUEUED_INSN (p) = emit_insn (body); | |
508 | break; | |
41b083c4 | 509 | } |
2f937369 | 510 | |
1bbd65cd | 511 | QUEUED_BODY (p) = 0; |
bbf6f052 RK |
512 | pending_chain = QUEUED_NEXT (p); |
513 | } | |
514 | } | |
1bbd65cd EB |
515 | |
516 | /* Perform all the pending incrementations. */ | |
517 | ||
518 | void | |
519 | emit_queue (void) | |
520 | { | |
521 | emit_insns_enqueued_after_mark (NULL_RTX); | |
522 | } | |
bbf6f052 RK |
523 | \f |
524 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
525 | Both modes may be integer, or both may be floating. | |
526 | UNSIGNEDP should be nonzero if FROM is an unsigned type. | |
527 | This causes zero-extension instead of sign-extension. */ | |
528 | ||
529 | void | |
502b8322 | 530 | convert_move (rtx to, rtx from, int unsignedp) |
bbf6f052 RK |
531 | { |
532 | enum machine_mode to_mode = GET_MODE (to); | |
533 | enum machine_mode from_mode = GET_MODE (from); | |
534 | int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; | |
535 | int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; | |
536 | enum insn_code code; | |
537 | rtx libcall; | |
538 | ||
539 | /* rtx code for making an equivalent value. */ | |
37d0b254 SE |
540 | enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN |
541 | : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); | |
bbf6f052 RK |
542 | |
543 | to = protect_from_queue (to, 1); | |
544 | from = protect_from_queue (from, 0); | |
545 | ||
546 | if (to_real != from_real) | |
547 | abort (); | |
548 | ||
6de9cd9a DN |
549 | /* If the source and destination are already the same, then there's |
550 | nothing to do. */ | |
551 | if (to == from) | |
552 | return; | |
553 | ||
1499e0a8 RK |
554 | /* If FROM is a SUBREG that indicates that we have already done at least |
555 | the required extension, strip it. We don't handle such SUBREGs as | |
556 | TO here. */ | |
557 | ||
558 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
559 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
560 | >= GET_MODE_SIZE (to_mode)) | |
561 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
562 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
563 | ||
564 | if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) | |
565 | abort (); | |
566 | ||
bbf6f052 RK |
567 | if (to_mode == from_mode |
568 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
569 | { | |
570 | emit_move_insn (to, from); | |
571 | return; | |
572 | } | |
573 | ||
0b4565c9 BS |
574 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
575 | { | |
576 | if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) | |
577 | abort (); | |
3a94c984 | 578 | |
0b4565c9 | 579 | if (VECTOR_MODE_P (to_mode)) |
bafe341a | 580 | from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
0b4565c9 | 581 | else |
bafe341a | 582 | to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
0b4565c9 BS |
583 | |
584 | emit_move_insn (to, from); | |
585 | return; | |
586 | } | |
587 | ||
06765df1 R |
588 | if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT) |
589 | { | |
590 | convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp); | |
591 | convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp); | |
592 | return; | |
593 | } | |
594 | ||
bbf6f052 RK |
595 | if (to_real) |
596 | { | |
642dfa8b | 597 | rtx value, insns; |
85363ca0 | 598 | convert_optab tab; |
81d79e2c | 599 | |
e44846d6 | 600 | if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) |
85363ca0 | 601 | tab = sext_optab; |
e44846d6 | 602 | else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode)) |
85363ca0 ZW |
603 | tab = trunc_optab; |
604 | else | |
605 | abort (); | |
2b01c326 | 606 | |
85363ca0 | 607 | /* Try converting directly if the insn is supported. */ |
2b01c326 | 608 | |
85363ca0 ZW |
609 | code = tab->handlers[to_mode][from_mode].insn_code; |
610 | if (code != CODE_FOR_nothing) | |
b092b471 | 611 | { |
85363ca0 ZW |
612 | emit_unop_insn (code, to, from, |
613 | tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE); | |
b092b471 JW |
614 | return; |
615 | } | |
b092b471 | 616 | |
85363ca0 ZW |
617 | /* Otherwise use a libcall. */ |
618 | libcall = tab->handlers[to_mode][from_mode].libfunc; | |
3a94c984 | 619 | |
85363ca0 | 620 | if (!libcall) |
b092b471 | 621 | /* This conversion is not implemented yet. */ |
bbf6f052 RK |
622 | abort (); |
623 | ||
642dfa8b | 624 | start_sequence (); |
ebb1b59a | 625 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
81d79e2c | 626 | 1, from, from_mode); |
642dfa8b BS |
627 | insns = get_insns (); |
628 | end_sequence (); | |
450b1728 EC |
629 | emit_libcall_block (insns, to, value, |
630 | tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode, | |
631 | from) | |
632 | : gen_rtx_FLOAT_EXTEND (to_mode, from)); | |
bbf6f052 RK |
633 | return; |
634 | } | |
635 | ||
85363ca0 ZW |
636 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
637 | /* Targets are expected to provide conversion insns between PxImode and | |
638 | xImode for all MODE_PARTIAL_INT modes they use, but no others. */ | |
639 | if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT) | |
640 | { | |
641 | enum machine_mode full_mode | |
642 | = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); | |
643 | ||
644 | if (trunc_optab->handlers[to_mode][full_mode].insn_code | |
645 | == CODE_FOR_nothing) | |
646 | abort (); | |
647 | ||
648 | if (full_mode != from_mode) | |
649 | from = convert_to_mode (full_mode, from, unsignedp); | |
650 | emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code, | |
651 | to, from, UNKNOWN); | |
652 | return; | |
653 | } | |
654 | if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) | |
655 | { | |
656 | enum machine_mode full_mode | |
657 | = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); | |
658 | ||
659 | if (sext_optab->handlers[full_mode][from_mode].insn_code | |
660 | == CODE_FOR_nothing) | |
661 | abort (); | |
662 | ||
663 | emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, | |
664 | to, from, UNKNOWN); | |
665 | if (to_mode == full_mode) | |
666 | return; | |
667 | ||
a1105617 | 668 | /* else proceed to integer conversions below. */ |
85363ca0 ZW |
669 | from_mode = full_mode; |
670 | } | |
671 | ||
bbf6f052 RK |
672 | /* Now both modes are integers. */ |
673 | ||
674 | /* Handle expanding beyond a word. */ | |
675 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
676 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
677 | { | |
678 | rtx insns; | |
679 | rtx lowpart; | |
680 | rtx fill_value; | |
681 | rtx lowfrom; | |
682 | int i; | |
683 | enum machine_mode lowpart_mode; | |
684 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
685 | ||
686 | /* Try converting directly if the insn is supported. */ | |
687 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
688 | != CODE_FOR_nothing) | |
689 | { | |
cd1b4b44 RK |
690 | /* If FROM is a SUBREG, put it into a register. Do this |
691 | so that we always generate the same set of insns for | |
692 | better cse'ing; if an intermediate assignment occurred, | |
693 | we won't be doing the operation directly on the SUBREG. */ | |
694 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
695 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
696 | emit_unop_insn (code, to, from, equiv_code); |
697 | return; | |
698 | } | |
699 | /* Next, try converting via full word. */ | |
700 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
701 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
702 | != CODE_FOR_nothing)) | |
703 | { | |
f8cfc6aa | 704 | if (REG_P (to)) |
6a2d136b EB |
705 | { |
706 | if (reg_overlap_mentioned_p (to, from)) | |
707 | from = force_reg (from_mode, from); | |
708 | emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); | |
709 | } | |
bbf6f052 RK |
710 | convert_move (gen_lowpart (word_mode, to), from, unsignedp); |
711 | emit_unop_insn (code, to, | |
712 | gen_lowpart (word_mode, to), equiv_code); | |
713 | return; | |
714 | } | |
715 | ||
716 | /* No special multiword conversion insn; do it by hand. */ | |
717 | start_sequence (); | |
718 | ||
5c5033c3 RK |
719 | /* Since we will turn this into a no conflict block, we must ensure |
720 | that the source does not overlap the target. */ | |
721 | ||
722 | if (reg_overlap_mentioned_p (to, from)) | |
723 | from = force_reg (from_mode, from); | |
724 | ||
bbf6f052 RK |
725 | /* Get a copy of FROM widened to a word, if necessary. */ |
726 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
727 | lowpart_mode = word_mode; | |
728 | else | |
729 | lowpart_mode = from_mode; | |
730 | ||
731 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
732 | ||
733 | lowpart = gen_lowpart (lowpart_mode, to); | |
734 | emit_move_insn (lowpart, lowfrom); | |
735 | ||
736 | /* Compute the value to put in each remaining word. */ | |
737 | if (unsignedp) | |
738 | fill_value = const0_rtx; | |
739 | else | |
740 | { | |
741 | #ifdef HAVE_slt | |
742 | if (HAVE_slt | |
a995e389 | 743 | && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
bbf6f052 RK |
744 | && STORE_FLAG_VALUE == -1) |
745 | { | |
906c4e36 | 746 | emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
a06ef755 | 747 | lowpart_mode, 0); |
bbf6f052 RK |
748 | fill_value = gen_reg_rtx (word_mode); |
749 | emit_insn (gen_slt (fill_value)); | |
750 | } | |
751 | else | |
752 | #endif | |
753 | { | |
754 | fill_value | |
755 | = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, | |
756 | size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), | |
906c4e36 | 757 | NULL_RTX, 0); |
bbf6f052 RK |
758 | fill_value = convert_to_mode (word_mode, fill_value, 1); |
759 | } | |
760 | } | |
761 | ||
762 | /* Fill the remaining words. */ | |
763 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
764 | { | |
765 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
766 | rtx subword = operand_subword (to, index, 1, to_mode); | |
767 | ||
768 | if (subword == 0) | |
769 | abort (); | |
770 | ||
771 | if (fill_value != subword) | |
772 | emit_move_insn (subword, fill_value); | |
773 | } | |
774 | ||
775 | insns = get_insns (); | |
776 | end_sequence (); | |
777 | ||
906c4e36 | 778 | emit_no_conflict_block (insns, to, from, NULL_RTX, |
38a448ca | 779 | gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); |
bbf6f052 RK |
780 | return; |
781 | } | |
782 | ||
d3c64ee3 RS |
783 | /* Truncating multi-word to a word or less. */ |
784 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
785 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
bbf6f052 | 786 | { |
3c0cb5de | 787 | if (!((MEM_P (from) |
431a6eca JW |
788 | && ! MEM_VOLATILE_P (from) |
789 | && direct_load[(int) to_mode] | |
790 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 791 | || REG_P (from) |
431a6eca JW |
792 | || GET_CODE (from) == SUBREG)) |
793 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
794 | convert_move (to, gen_lowpart (word_mode, from), 0); |
795 | return; | |
796 | } | |
797 | ||
bbf6f052 RK |
798 | /* Now follow all the conversions between integers |
799 | no more than a word long. */ | |
800 | ||
801 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
802 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
803 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
d3c64ee3 | 804 | GET_MODE_BITSIZE (from_mode))) |
bbf6f052 | 805 | { |
3c0cb5de | 806 | if (!((MEM_P (from) |
d3c64ee3 RS |
807 | && ! MEM_VOLATILE_P (from) |
808 | && direct_load[(int) to_mode] | |
809 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 810 | || REG_P (from) |
d3c64ee3 RS |
811 | || GET_CODE (from) == SUBREG)) |
812 | from = force_reg (from_mode, from); | |
f8cfc6aa | 813 | if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER |
34aa3599 RK |
814 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) |
815 | from = copy_to_reg (from); | |
bbf6f052 RK |
816 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
817 | return; | |
818 | } | |
819 | ||
d3c64ee3 | 820 | /* Handle extension. */ |
bbf6f052 RK |
821 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
822 | { | |
823 | /* Convert directly if that works. */ | |
824 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
825 | != CODE_FOR_nothing) | |
826 | { | |
9413de45 RK |
827 | if (flag_force_mem) |
828 | from = force_not_mem (from); | |
829 | ||
bbf6f052 RK |
830 | emit_unop_insn (code, to, from, equiv_code); |
831 | return; | |
832 | } | |
833 | else | |
834 | { | |
835 | enum machine_mode intermediate; | |
2b28d92e NC |
836 | rtx tmp; |
837 | tree shift_amount; | |
bbf6f052 RK |
838 | |
839 | /* Search for a mode to convert via. */ | |
840 | for (intermediate = from_mode; intermediate != VOIDmode; | |
841 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
930b4e39 RK |
842 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
843 | != CODE_FOR_nothing) | |
844 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
d60eaeff JL |
845 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
846 | GET_MODE_BITSIZE (intermediate)))) | |
bbf6f052 RK |
847 | && (can_extend_p (intermediate, from_mode, unsignedp) |
848 | != CODE_FOR_nothing)) | |
849 | { | |
850 | convert_move (to, convert_to_mode (intermediate, from, | |
851 | unsignedp), unsignedp); | |
852 | return; | |
853 | } | |
854 | ||
2b28d92e | 855 | /* No suitable intermediate mode. |
3a94c984 | 856 | Generate what we need with shifts. */ |
2b28d92e NC |
857 | shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode) |
858 | - GET_MODE_BITSIZE (from_mode), 0); | |
859 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); | |
860 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
861 | to, unsignedp); | |
3a94c984 | 862 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
2b28d92e NC |
863 | to, unsignedp); |
864 | if (tmp != to) | |
865 | emit_move_insn (to, tmp); | |
866 | return; | |
bbf6f052 RK |
867 | } |
868 | } | |
869 | ||
3a94c984 | 870 | /* Support special truncate insns for certain modes. */ |
85363ca0 | 871 | if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing) |
bbf6f052 | 872 | { |
85363ca0 ZW |
873 | emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code, |
874 | to, from, UNKNOWN); | |
b9bcad65 RK |
875 | return; |
876 | } | |
877 | ||
bbf6f052 RK |
878 | /* Handle truncation of volatile memrefs, and so on; |
879 | the things that couldn't be truncated directly, | |
85363ca0 ZW |
880 | and for which there was no special instruction. |
881 | ||
882 | ??? Code above formerly short-circuited this, for most integer | |
883 | mode pairs, with a force_reg in from_mode followed by a recursive | |
884 | call to this routine. Appears always to have been wrong. */ | |
bbf6f052 RK |
885 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) |
886 | { | |
887 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
888 | emit_move_insn (to, temp); | |
889 | return; | |
890 | } | |
891 | ||
892 | /* Mode combination is not recognized. */ | |
893 | abort (); | |
894 | } | |
895 | ||
896 | /* Return an rtx for a value that would result | |
897 | from converting X to mode MODE. | |
898 | Both X and MODE may be floating, or both integer. | |
899 | UNSIGNEDP is nonzero if X is an unsigned value. | |
900 | This can be done by referring to a part of X in place | |
5d901c31 RS |
901 | or by copying to a new temporary with conversion. |
902 | ||
903 | This function *must not* call protect_from_queue | |
904 | except when putting X into an insn (in which case convert_move does it). */ | |
bbf6f052 RK |
905 | |
906 | rtx | |
502b8322 | 907 | convert_to_mode (enum machine_mode mode, rtx x, int unsignedp) |
5ffe63ed RS |
908 | { |
909 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
910 | } | |
911 | ||
912 | /* Return an rtx for a value that would result | |
913 | from converting X from mode OLDMODE to mode MODE. | |
914 | Both modes may be floating, or both integer. | |
915 | UNSIGNEDP is nonzero if X is an unsigned value. | |
916 | ||
917 | This can be done by referring to a part of X in place | |
918 | or by copying to a new temporary with conversion. | |
919 | ||
920 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. | |
921 | ||
922 | This function *must not* call protect_from_queue | |
923 | except when putting X into an insn (in which case convert_move does it). */ | |
924 | ||
925 | rtx | |
502b8322 | 926 | convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp) |
bbf6f052 | 927 | { |
b3694847 | 928 | rtx temp; |
5ffe63ed | 929 | |
1499e0a8 RK |
930 | /* If FROM is a SUBREG that indicates that we have already done at least |
931 | the required extension, strip it. */ | |
932 | ||
933 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
934 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
935 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
936 | x = gen_lowpart (mode, x); | |
bbf6f052 | 937 | |
64791b18 RK |
938 | if (GET_MODE (x) != VOIDmode) |
939 | oldmode = GET_MODE (x); | |
3a94c984 | 940 | |
5ffe63ed | 941 | if (mode == oldmode) |
bbf6f052 RK |
942 | return x; |
943 | ||
944 | /* There is one case that we must handle specially: If we are converting | |
906c4e36 | 945 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
bbf6f052 RK |
946 | we are to interpret the constant as unsigned, gen_lowpart will do |
947 | the wrong if the constant appears negative. What we want to do is | |
948 | make the high-order word of the constant zero, not all ones. */ | |
949 | ||
950 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
906c4e36 | 951 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
bbf6f052 | 952 | && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
96ff8a16 ILT |
953 | { |
954 | HOST_WIDE_INT val = INTVAL (x); | |
955 | ||
956 | if (oldmode != VOIDmode | |
957 | && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) | |
958 | { | |
959 | int width = GET_MODE_BITSIZE (oldmode); | |
960 | ||
961 | /* We need to zero extend VAL. */ | |
962 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
963 | } | |
964 | ||
965 | return immed_double_const (val, (HOST_WIDE_INT) 0, mode); | |
966 | } | |
bbf6f052 RK |
967 | |
968 | /* We can do this with a gen_lowpart if both desired and current modes | |
969 | are integer, and this is either a constant integer, a register, or a | |
ba2e110c RK |
970 | non-volatile MEM. Except for the constant case where MODE is no |
971 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
bbf6f052 | 972 | |
ba2e110c RK |
973 | if ((GET_CODE (x) == CONST_INT |
974 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
bbf6f052 | 975 | || (GET_MODE_CLASS (mode) == MODE_INT |
5ffe63ed | 976 | && GET_MODE_CLASS (oldmode) == MODE_INT |
bbf6f052 | 977 | && (GET_CODE (x) == CONST_DOUBLE |
5ffe63ed | 978 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
3c0cb5de | 979 | && ((MEM_P (x) && ! MEM_VOLATILE_P (x) |
d57c66da | 980 | && direct_load[(int) mode]) |
f8cfc6aa | 981 | || (REG_P (x) |
006c9f4a SE |
982 | && (! HARD_REGISTER_P (x) |
983 | || HARD_REGNO_MODE_OK (REGNO (x), mode)) | |
2bf29316 JW |
984 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
985 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
ba2e110c RK |
986 | { |
987 | /* ?? If we don't know OLDMODE, we have to assume here that | |
988 | X does not need sign- or zero-extension. This may not be | |
989 | the case, but it's the best we can do. */ | |
990 | if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode | |
991 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) | |
992 | { | |
993 | HOST_WIDE_INT val = INTVAL (x); | |
994 | int width = GET_MODE_BITSIZE (oldmode); | |
995 | ||
996 | /* We must sign or zero-extend in this case. Start by | |
997 | zero-extending, then sign extend if we need to. */ | |
998 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
999 | if (! unsignedp | |
1000 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
1001 | val |= (HOST_WIDE_INT) (-1) << width; | |
1002 | ||
2496c7bd | 1003 | return gen_int_mode (val, mode); |
ba2e110c RK |
1004 | } |
1005 | ||
1006 | return gen_lowpart (mode, x); | |
1007 | } | |
bbf6f052 | 1008 | |
ebe75517 JH |
1009 | /* Converting from integer constant into mode is always equivalent to an |
1010 | subreg operation. */ | |
1011 | if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) | |
1012 | { | |
1013 | if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode)) | |
1014 | abort (); | |
1015 | return simplify_gen_subreg (mode, x, oldmode, 0); | |
1016 | } | |
1017 | ||
bbf6f052 RK |
1018 | temp = gen_reg_rtx (mode); |
1019 | convert_move (temp, x, unsignedp); | |
1020 | return temp; | |
1021 | } | |
1022 | \f | |
cf5124f6 RS |
1023 | /* STORE_MAX_PIECES is the number of bytes at a time that we can |
1024 | store efficiently. Due to internal GCC limitations, this is | |
1025 | MOVE_MAX_PIECES limited by the number of bytes GCC can represent | |
1026 | for an immediate constant. */ | |
1027 | ||
1028 | #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) | |
1029 | ||
8fd3cf4e JJ |
1030 | /* Determine whether the LEN bytes can be moved by using several move |
1031 | instructions. Return nonzero if a call to move_by_pieces should | |
1032 | succeed. */ | |
1033 | ||
1034 | int | |
502b8322 AJ |
1035 | can_move_by_pieces (unsigned HOST_WIDE_INT len, |
1036 | unsigned int align ATTRIBUTE_UNUSED) | |
8fd3cf4e JJ |
1037 | { |
1038 | return MOVE_BY_PIECES_P (len, align); | |
1039 | } | |
1040 | ||
21d93687 RK |
1041 | /* Generate several move instructions to copy LEN bytes from block FROM to |
1042 | block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM | |
1043 | and TO through protect_from_queue before calling. | |
566aa174 | 1044 | |
21d93687 RK |
1045 | If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is |
1046 | used to push FROM to the stack. | |
566aa174 | 1047 | |
8fd3cf4e | 1048 | ALIGN is maximum stack alignment we can assume. |
bbf6f052 | 1049 | |
8fd3cf4e JJ |
1050 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala |
1051 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
1052 | stpcpy. */ | |
1053 | ||
1054 | rtx | |
502b8322 AJ |
1055 | move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
1056 | unsigned int align, int endp) | |
bbf6f052 RK |
1057 | { |
1058 | struct move_by_pieces data; | |
566aa174 | 1059 | rtx to_addr, from_addr = XEXP (from, 0); |
770ae6cc | 1060 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
1061 | enum machine_mode mode = VOIDmode, tmode; |
1062 | enum insn_code icode; | |
bbf6f052 | 1063 | |
f26aca6d DD |
1064 | align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from)); |
1065 | ||
bbf6f052 | 1066 | data.offset = 0; |
bbf6f052 | 1067 | data.from_addr = from_addr; |
566aa174 JH |
1068 | if (to) |
1069 | { | |
1070 | to_addr = XEXP (to, 0); | |
1071 | data.to = to; | |
1072 | data.autinc_to | |
1073 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
1074 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
1075 | data.reverse | |
1076 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
1077 | } | |
1078 | else | |
1079 | { | |
1080 | to_addr = NULL_RTX; | |
1081 | data.to = NULL_RTX; | |
1082 | data.autinc_to = 1; | |
1083 | #ifdef STACK_GROWS_DOWNWARD | |
1084 | data.reverse = 1; | |
1085 | #else | |
1086 | data.reverse = 0; | |
1087 | #endif | |
1088 | } | |
1089 | data.to_addr = to_addr; | |
bbf6f052 | 1090 | data.from = from; |
bbf6f052 RK |
1091 | data.autinc_from |
1092 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
1093 | || GET_CODE (from_addr) == POST_INC | |
1094 | || GET_CODE (from_addr) == POST_DEC); | |
1095 | ||
1096 | data.explicit_inc_from = 0; | |
1097 | data.explicit_inc_to = 0; | |
bbf6f052 RK |
1098 | if (data.reverse) data.offset = len; |
1099 | data.len = len; | |
1100 | ||
1101 | /* If copying requires more than two move insns, | |
1102 | copy addresses to registers (to make displacements shorter) | |
1103 | and use post-increment if available. */ | |
1104 | if (!(data.autinc_from && data.autinc_to) | |
1105 | && move_by_pieces_ninsns (len, align) > 2) | |
1106 | { | |
3a94c984 | 1107 | /* Find the mode of the largest move... */ |
fbe1758d AM |
1108 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1109 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1110 | if (GET_MODE_SIZE (tmode) < max_size) | |
1111 | mode = tmode; | |
1112 | ||
1113 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
bbf6f052 RK |
1114 | { |
1115 | data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); | |
1116 | data.autinc_from = 1; | |
1117 | data.explicit_inc_from = -1; | |
1118 | } | |
fbe1758d | 1119 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
bbf6f052 RK |
1120 | { |
1121 | data.from_addr = copy_addr_to_reg (from_addr); | |
1122 | data.autinc_from = 1; | |
1123 | data.explicit_inc_from = 1; | |
1124 | } | |
bbf6f052 RK |
1125 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
1126 | data.from_addr = copy_addr_to_reg (from_addr); | |
fbe1758d | 1127 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1128 | { |
1129 | data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); | |
1130 | data.autinc_to = 1; | |
1131 | data.explicit_inc_to = -1; | |
1132 | } | |
fbe1758d | 1133 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1134 | { |
1135 | data.to_addr = copy_addr_to_reg (to_addr); | |
1136 | data.autinc_to = 1; | |
1137 | data.explicit_inc_to = 1; | |
1138 | } | |
bbf6f052 RK |
1139 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
1140 | data.to_addr = copy_addr_to_reg (to_addr); | |
1141 | } | |
1142 | ||
e1565e65 | 1143 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 RK |
1144 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
1145 | align = MOVE_MAX * BITS_PER_UNIT; | |
bbf6f052 RK |
1146 | |
1147 | /* First move what we can in the largest integer mode, then go to | |
1148 | successively smaller modes. */ | |
1149 | ||
1150 | while (max_size > 1) | |
1151 | { | |
e7c33f54 RK |
1152 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1153 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1154 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1155 | mode = tmode; |
1156 | ||
1157 | if (mode == VOIDmode) | |
1158 | break; | |
1159 | ||
1160 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1161 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1162 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
1163 | ||
1164 | max_size = GET_MODE_SIZE (mode); | |
1165 | } | |
1166 | ||
1167 | /* The code above should have handled everything. */ | |
2a8e278c | 1168 | if (data.len > 0) |
bbf6f052 | 1169 | abort (); |
8fd3cf4e JJ |
1170 | |
1171 | if (endp) | |
1172 | { | |
1173 | rtx to1; | |
1174 | ||
1175 | if (data.reverse) | |
1176 | abort (); | |
1177 | if (data.autinc_to) | |
1178 | { | |
1179 | if (endp == 2) | |
1180 | { | |
1181 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
1182 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
1183 | else | |
1184 | data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, | |
1185 | -1)); | |
1186 | } | |
1187 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
1188 | data.offset); | |
1189 | } | |
1190 | else | |
1191 | { | |
1192 | if (endp == 2) | |
1193 | --data.offset; | |
1194 | to1 = adjust_address (data.to, QImode, data.offset); | |
1195 | } | |
1196 | return to1; | |
1197 | } | |
1198 | else | |
1199 | return data.to; | |
bbf6f052 RK |
1200 | } |
1201 | ||
1202 | /* Return number of insns required to move L bytes by pieces. | |
f1eaaf73 | 1203 | ALIGN (in bits) is maximum alignment we can assume. */ |
bbf6f052 | 1204 | |
3bdf5ad1 | 1205 | static unsigned HOST_WIDE_INT |
502b8322 | 1206 | move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align) |
bbf6f052 | 1207 | { |
3bdf5ad1 RK |
1208 | unsigned HOST_WIDE_INT n_insns = 0; |
1209 | unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1; | |
bbf6f052 | 1210 | |
e1565e65 | 1211 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 1212 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
14c78e9b | 1213 | align = MOVE_MAX * BITS_PER_UNIT; |
bbf6f052 RK |
1214 | |
1215 | while (max_size > 1) | |
1216 | { | |
1217 | enum machine_mode mode = VOIDmode, tmode; | |
1218 | enum insn_code icode; | |
1219 | ||
e7c33f54 RK |
1220 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1221 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1222 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1223 | mode = tmode; |
1224 | ||
1225 | if (mode == VOIDmode) | |
1226 | break; | |
1227 | ||
1228 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1229 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1230 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1231 | ||
1232 | max_size = GET_MODE_SIZE (mode); | |
1233 | } | |
1234 | ||
13c6f0d5 NS |
1235 | if (l) |
1236 | abort (); | |
bbf6f052 RK |
1237 | return n_insns; |
1238 | } | |
1239 | ||
1240 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1241 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1242 | to make a move insn for that mode. DATA has all the other info. */ | |
1243 | ||
1244 | static void | |
502b8322 AJ |
1245 | move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
1246 | struct move_by_pieces *data) | |
bbf6f052 | 1247 | { |
3bdf5ad1 | 1248 | unsigned int size = GET_MODE_SIZE (mode); |
ae0ed63a | 1249 | rtx to1 = NULL_RTX, from1; |
bbf6f052 RK |
1250 | |
1251 | while (data->len >= size) | |
1252 | { | |
3bdf5ad1 RK |
1253 | if (data->reverse) |
1254 | data->offset -= size; | |
1255 | ||
566aa174 | 1256 | if (data->to) |
3bdf5ad1 | 1257 | { |
566aa174 | 1258 | if (data->autinc_to) |
630036c6 JJ |
1259 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
1260 | data->offset); | |
566aa174 | 1261 | else |
f4ef873c | 1262 | to1 = adjust_address (data->to, mode, data->offset); |
3bdf5ad1 | 1263 | } |
3bdf5ad1 RK |
1264 | |
1265 | if (data->autinc_from) | |
630036c6 JJ |
1266 | from1 = adjust_automodify_address (data->from, mode, data->from_addr, |
1267 | data->offset); | |
3bdf5ad1 | 1268 | else |
f4ef873c | 1269 | from1 = adjust_address (data->from, mode, data->offset); |
bbf6f052 | 1270 | |
940da324 | 1271 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
3d709fd3 RH |
1272 | emit_insn (gen_add2_insn (data->to_addr, |
1273 | GEN_INT (-(HOST_WIDE_INT)size))); | |
940da324 | 1274 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
3d709fd3 RH |
1275 | emit_insn (gen_add2_insn (data->from_addr, |
1276 | GEN_INT (-(HOST_WIDE_INT)size))); | |
bbf6f052 | 1277 | |
566aa174 JH |
1278 | if (data->to) |
1279 | emit_insn ((*genfun) (to1, from1)); | |
1280 | else | |
21d93687 RK |
1281 | { |
1282 | #ifdef PUSH_ROUNDING | |
1283 | emit_single_push_insn (mode, from1, NULL); | |
1284 | #else | |
1285 | abort (); | |
1286 | #endif | |
1287 | } | |
3bdf5ad1 | 1288 | |
940da324 | 1289 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
906c4e36 | 1290 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
940da324 | 1291 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
906c4e36 | 1292 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
bbf6f052 | 1293 | |
3bdf5ad1 RK |
1294 | if (! data->reverse) |
1295 | data->offset += size; | |
bbf6f052 RK |
1296 | |
1297 | data->len -= size; | |
1298 | } | |
1299 | } | |
1300 | \f | |
4ca79136 RH |
1301 | /* Emit code to move a block Y to a block X. This may be done with |
1302 | string-move instructions, with multiple scalar move instructions, | |
1303 | or with a library call. | |
bbf6f052 | 1304 | |
4ca79136 | 1305 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. |
bbf6f052 | 1306 | SIZE is an rtx that says how long they are. |
19caa751 | 1307 | ALIGN is the maximum alignment we can assume they have. |
44bb111a | 1308 | METHOD describes what kind of copy this is, and what mechanisms may be used. |
bbf6f052 | 1309 | |
e9a25f70 JL |
1310 | Return the address of the new block, if memcpy is called and returns it, |
1311 | 0 otherwise. */ | |
1312 | ||
1313 | rtx | |
502b8322 | 1314 | emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) |
bbf6f052 | 1315 | { |
44bb111a | 1316 | bool may_use_call; |
e9a25f70 | 1317 | rtx retval = 0; |
44bb111a RH |
1318 | unsigned int align; |
1319 | ||
1320 | switch (method) | |
1321 | { | |
1322 | case BLOCK_OP_NORMAL: | |
1323 | may_use_call = true; | |
1324 | break; | |
1325 | ||
1326 | case BLOCK_OP_CALL_PARM: | |
1327 | may_use_call = block_move_libcall_safe_for_call_parm (); | |
1328 | ||
1329 | /* Make inhibit_defer_pop nonzero around the library call | |
1330 | to force it to pop the arguments right away. */ | |
1331 | NO_DEFER_POP; | |
1332 | break; | |
1333 | ||
1334 | case BLOCK_OP_NO_LIBCALL: | |
1335 | may_use_call = false; | |
1336 | break; | |
1337 | ||
1338 | default: | |
1339 | abort (); | |
1340 | } | |
1341 | ||
1342 | align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); | |
e9a25f70 | 1343 | |
bbf6f052 RK |
1344 | x = protect_from_queue (x, 1); |
1345 | y = protect_from_queue (y, 0); | |
5d901c31 | 1346 | size = protect_from_queue (size, 0); |
bbf6f052 | 1347 | |
3c0cb5de | 1348 | if (!MEM_P (x)) |
bbf6f052 | 1349 | abort (); |
3c0cb5de | 1350 | if (!MEM_P (y)) |
bbf6f052 RK |
1351 | abort (); |
1352 | if (size == 0) | |
1353 | abort (); | |
1354 | ||
82c82743 RH |
1355 | /* Make sure we've got BLKmode addresses; store_one_arg can decide that |
1356 | block copy is more efficient for other large modes, e.g. DCmode. */ | |
1357 | x = adjust_address (x, BLKmode, 0); | |
1358 | y = adjust_address (y, BLKmode, 0); | |
1359 | ||
cb38fd88 RH |
1360 | /* Set MEM_SIZE as appropriate for this block copy. The main place this |
1361 | can be incorrect is coming from __builtin_memcpy. */ | |
1362 | if (GET_CODE (size) == CONST_INT) | |
1363 | { | |
6972c506 JJ |
1364 | if (INTVAL (size) == 0) |
1365 | return 0; | |
1366 | ||
cb38fd88 RH |
1367 | x = shallow_copy_rtx (x); |
1368 | y = shallow_copy_rtx (y); | |
1369 | set_mem_size (x, size); | |
1370 | set_mem_size (y, size); | |
1371 | } | |
1372 | ||
fbe1758d | 1373 | if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
8fd3cf4e | 1374 | move_by_pieces (x, y, INTVAL (size), align, 0); |
4ca79136 RH |
1375 | else if (emit_block_move_via_movstr (x, y, size, align)) |
1376 | ; | |
44bb111a | 1377 | else if (may_use_call) |
4ca79136 | 1378 | retval = emit_block_move_via_libcall (x, y, size); |
44bb111a RH |
1379 | else |
1380 | emit_block_move_via_loop (x, y, size, align); | |
1381 | ||
1382 | if (method == BLOCK_OP_CALL_PARM) | |
1383 | OK_DEFER_POP; | |
266007a7 | 1384 | |
4ca79136 RH |
1385 | return retval; |
1386 | } | |
266007a7 | 1387 | |
502b8322 | 1388 | /* A subroutine of emit_block_move. Returns true if calling the |
44bb111a RH |
1389 | block move libcall will not clobber any parameters which may have |
1390 | already been placed on the stack. */ | |
1391 | ||
1392 | static bool | |
502b8322 | 1393 | block_move_libcall_safe_for_call_parm (void) |
44bb111a | 1394 | { |
a357a6d4 | 1395 | /* If arguments are pushed on the stack, then they're safe. */ |
44bb111a RH |
1396 | if (PUSH_ARGS) |
1397 | return true; | |
44bb111a | 1398 | |
450b1728 | 1399 | /* If registers go on the stack anyway, any argument is sure to clobber |
a357a6d4 GK |
1400 | an outgoing argument. */ |
1401 | #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE) | |
1402 | { | |
1403 | tree fn = emit_block_move_libcall_fn (false); | |
1404 | (void) fn; | |
1405 | if (REG_PARM_STACK_SPACE (fn) != 0) | |
1406 | return false; | |
1407 | } | |
44bb111a | 1408 | #endif |
44bb111a | 1409 | |
a357a6d4 GK |
1410 | /* If any argument goes in memory, then it might clobber an outgoing |
1411 | argument. */ | |
1412 | { | |
1413 | CUMULATIVE_ARGS args_so_far; | |
1414 | tree fn, arg; | |
450b1728 | 1415 | |
a357a6d4 | 1416 | fn = emit_block_move_libcall_fn (false); |
0f6937fe | 1417 | INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3); |
450b1728 | 1418 | |
a357a6d4 GK |
1419 | arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); |
1420 | for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) | |
1421 | { | |
1422 | enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); | |
1423 | rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); | |
1424 | if (!tmp || !REG_P (tmp)) | |
44bb111a | 1425 | return false; |
a357a6d4 GK |
1426 | #ifdef FUNCTION_ARG_PARTIAL_NREGS |
1427 | if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, | |
1428 | NULL_TREE, 1)) | |
1429 | return false; | |
1430 | #endif | |
1431 | FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1); | |
1432 | } | |
1433 | } | |
1434 | return true; | |
44bb111a RH |
1435 | } |
1436 | ||
502b8322 | 1437 | /* A subroutine of emit_block_move. Expand a movstr pattern; |
4ca79136 | 1438 | return true if successful. */ |
3ef1eef4 | 1439 | |
4ca79136 | 1440 | static bool |
502b8322 | 1441 | emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align) |
4ca79136 | 1442 | { |
4ca79136 | 1443 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
a5e9c810 | 1444 | int save_volatile_ok = volatile_ok; |
4ca79136 | 1445 | enum machine_mode mode; |
266007a7 | 1446 | |
4ca79136 RH |
1447 | /* Since this is a move insn, we don't care about volatility. */ |
1448 | volatile_ok = 1; | |
1449 | ||
ee960939 OH |
1450 | /* Try the most limited insn first, because there's no point |
1451 | including more than one in the machine description unless | |
1452 | the more limited one has some advantage. */ | |
1453 | ||
4ca79136 RH |
1454 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
1455 | mode = GET_MODE_WIDER_MODE (mode)) | |
1456 | { | |
1457 | enum insn_code code = movstr_optab[(int) mode]; | |
1458 | insn_operand_predicate_fn pred; | |
1459 | ||
1460 | if (code != CODE_FOR_nothing | |
1461 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT | |
1462 | here because if SIZE is less than the mode mask, as it is | |
1463 | returned by the macro, it will definitely be less than the | |
1464 | actual mode mask. */ | |
1465 | && ((GET_CODE (size) == CONST_INT | |
1466 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
1467 | <= (GET_MODE_MASK (mode) >> 1))) | |
1468 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
1469 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
1470 | || (*pred) (x, BLKmode)) | |
1471 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1472 | || (*pred) (y, BLKmode)) | |
1473 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1474 | || (*pred) (opalign, VOIDmode))) | |
1475 | { | |
1476 | rtx op2; | |
1477 | rtx last = get_last_insn (); | |
1478 | rtx pat; | |
1479 | ||
1480 | op2 = convert_to_mode (mode, size, 1); | |
1481 | pred = insn_data[(int) code].operand[2].predicate; | |
1482 | if (pred != 0 && ! (*pred) (op2, mode)) | |
1483 | op2 = copy_to_mode_reg (mode, op2); | |
1484 | ||
1485 | /* ??? When called via emit_block_move_for_call, it'd be | |
1486 | nice if there were some way to inform the backend, so | |
1487 | that it doesn't fail the expansion because it thinks | |
1488 | emitting the libcall would be more efficient. */ | |
1489 | ||
1490 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1491 | if (pat) | |
1492 | { | |
1493 | emit_insn (pat); | |
a5e9c810 | 1494 | volatile_ok = save_volatile_ok; |
4ca79136 | 1495 | return true; |
bbf6f052 | 1496 | } |
4ca79136 RH |
1497 | else |
1498 | delete_insns_since (last); | |
bbf6f052 | 1499 | } |
4ca79136 | 1500 | } |
bbf6f052 | 1501 | |
a5e9c810 | 1502 | volatile_ok = save_volatile_ok; |
4ca79136 RH |
1503 | return false; |
1504 | } | |
3ef1eef4 | 1505 | |
8f99553f | 1506 | /* A subroutine of emit_block_move. Expand a call to memcpy. |
4ca79136 | 1507 | Return the return value from memcpy, 0 otherwise. */ |
4bc973ae | 1508 | |
4ca79136 | 1509 | static rtx |
502b8322 | 1510 | emit_block_move_via_libcall (rtx dst, rtx src, rtx size) |
4ca79136 | 1511 | { |
ee960939 | 1512 | rtx dst_addr, src_addr; |
4ca79136 RH |
1513 | tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; |
1514 | enum machine_mode size_mode; | |
1515 | rtx retval; | |
4bc973ae | 1516 | |
4ca79136 | 1517 | /* DST, SRC, or SIZE may have been passed through protect_from_queue. |
4bc973ae | 1518 | |
ee960939 OH |
1519 | It is unsafe to save the value generated by protect_from_queue and reuse |
1520 | it later. Consider what happens if emit_queue is called before the | |
1521 | return value from protect_from_queue is used. | |
4bc973ae | 1522 | |
ee960939 OH |
1523 | Expansion of the CALL_EXPR below will call emit_queue before we are |
1524 | finished emitting RTL for argument setup. So if we are not careful we | |
1525 | could get the wrong value for an argument. | |
4bc973ae | 1526 | |
ee960939 | 1527 | To avoid this problem we go ahead and emit code to copy the addresses of |
4dfa0342 | 1528 | DST and SRC and SIZE into new pseudos. |
4bc973ae | 1529 | |
ee960939 OH |
1530 | Note this is not strictly needed for library calls since they do not call |
1531 | emit_queue before loading their arguments. However, we may need to have | |
1532 | library calls call emit_queue in the future since failing to do so could | |
1533 | cause problems for targets which define SMALL_REGISTER_CLASSES and pass | |
1534 | arguments in registers. */ | |
1535 | ||
1536 | dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0)); | |
1537 | src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0)); | |
4ca79136 | 1538 | |
ee960939 OH |
1539 | dst_addr = convert_memory_address (ptr_mode, dst_addr); |
1540 | src_addr = convert_memory_address (ptr_mode, src_addr); | |
ee960939 OH |
1541 | |
1542 | dst_tree = make_tree (ptr_type_node, dst_addr); | |
1543 | src_tree = make_tree (ptr_type_node, src_addr); | |
4ca79136 | 1544 | |
8f99553f | 1545 | size_mode = TYPE_MODE (sizetype); |
ee960939 | 1546 | |
4ca79136 RH |
1547 | size = convert_to_mode (size_mode, size, 1); |
1548 | size = copy_to_mode_reg (size_mode, size); | |
1549 | ||
1550 | /* It is incorrect to use the libcall calling conventions to call | |
1551 | memcpy in this context. This could be a user call to memcpy and | |
1552 | the user may wish to examine the return value from memcpy. For | |
1553 | targets where libcalls and normal calls have different conventions | |
8f99553f | 1554 | for returning pointers, we could end up generating incorrect code. */ |
4ca79136 | 1555 | |
8f99553f | 1556 | size_tree = make_tree (sizetype, size); |
4ca79136 RH |
1557 | |
1558 | fn = emit_block_move_libcall_fn (true); | |
1559 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
8f99553f JM |
1560 | arg_list = tree_cons (NULL_TREE, src_tree, arg_list); |
1561 | arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); | |
4ca79136 RH |
1562 | |
1563 | /* Now we have to build up the CALL_EXPR itself. */ | |
1564 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
1565 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
1566 | call_expr, arg_list, NULL_TREE); | |
4ca79136 RH |
1567 | |
1568 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
1569 | ||
ee960939 OH |
1570 | /* If we are initializing a readonly value, show the above call clobbered |
1571 | it. Otherwise, a load from it may erroneously be hoisted from a loop, or | |
1572 | the delay slot scheduler might overlook conflicts and take nasty | |
1573 | decisions. */ | |
4ca79136 | 1574 | if (RTX_UNCHANGING_P (dst)) |
ee960939 OH |
1575 | add_function_usage_to |
1576 | (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode, | |
1577 | gen_rtx_CLOBBER (VOIDmode, dst), | |
1578 | NULL_RTX)); | |
4ca79136 | 1579 | |
8f99553f | 1580 | return retval; |
4ca79136 | 1581 | } |
52cf7115 | 1582 | |
4ca79136 RH |
1583 | /* A subroutine of emit_block_move_via_libcall. Create the tree node |
1584 | for the function we use for block copies. The first time FOR_CALL | |
1585 | is true, we call assemble_external. */ | |
52cf7115 | 1586 | |
4ca79136 RH |
1587 | static GTY(()) tree block_move_fn; |
1588 | ||
9661b15f | 1589 | void |
502b8322 | 1590 | init_block_move_fn (const char *asmspec) |
4ca79136 | 1591 | { |
9661b15f | 1592 | if (!block_move_fn) |
4ca79136 | 1593 | { |
8fd3cf4e | 1594 | tree args, fn; |
9661b15f | 1595 | |
8f99553f JM |
1596 | fn = get_identifier ("memcpy"); |
1597 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
1598 | const_ptr_type_node, sizetype, | |
1599 | NULL_TREE); | |
52cf7115 | 1600 | |
4ca79136 RH |
1601 | fn = build_decl (FUNCTION_DECL, fn, args); |
1602 | DECL_EXTERNAL (fn) = 1; | |
1603 | TREE_PUBLIC (fn) = 1; | |
1604 | DECL_ARTIFICIAL (fn) = 1; | |
1605 | TREE_NOTHROW (fn) = 1; | |
66c60e67 | 1606 | |
4ca79136 | 1607 | block_move_fn = fn; |
bbf6f052 | 1608 | } |
e9a25f70 | 1609 | |
9661b15f JJ |
1610 | if (asmspec) |
1611 | { | |
1612 | SET_DECL_RTL (block_move_fn, NULL_RTX); | |
1613 | SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec)); | |
1614 | } | |
1615 | } | |
1616 | ||
1617 | static tree | |
502b8322 | 1618 | emit_block_move_libcall_fn (int for_call) |
9661b15f JJ |
1619 | { |
1620 | static bool emitted_extern; | |
1621 | ||
1622 | if (!block_move_fn) | |
1623 | init_block_move_fn (NULL); | |
1624 | ||
4ca79136 RH |
1625 | if (for_call && !emitted_extern) |
1626 | { | |
1627 | emitted_extern = true; | |
9661b15f JJ |
1628 | make_decl_rtl (block_move_fn, NULL); |
1629 | assemble_external (block_move_fn); | |
4ca79136 RH |
1630 | } |
1631 | ||
9661b15f | 1632 | return block_move_fn; |
bbf6f052 | 1633 | } |
44bb111a RH |
1634 | |
1635 | /* A subroutine of emit_block_move. Copy the data via an explicit | |
1636 | loop. This is used only when libcalls are forbidden. */ | |
1637 | /* ??? It'd be nice to copy in hunks larger than QImode. */ | |
1638 | ||
1639 | static void | |
502b8322 AJ |
1640 | emit_block_move_via_loop (rtx x, rtx y, rtx size, |
1641 | unsigned int align ATTRIBUTE_UNUSED) | |
44bb111a RH |
1642 | { |
1643 | rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; | |
1644 | enum machine_mode iter_mode; | |
1645 | ||
1646 | iter_mode = GET_MODE (size); | |
1647 | if (iter_mode == VOIDmode) | |
1648 | iter_mode = word_mode; | |
1649 | ||
1650 | top_label = gen_label_rtx (); | |
1651 | cmp_label = gen_label_rtx (); | |
1652 | iter = gen_reg_rtx (iter_mode); | |
1653 | ||
1654 | emit_move_insn (iter, const0_rtx); | |
1655 | ||
1656 | x_addr = force_operand (XEXP (x, 0), NULL_RTX); | |
1657 | y_addr = force_operand (XEXP (y, 0), NULL_RTX); | |
1658 | do_pending_stack_adjust (); | |
1659 | ||
44bb111a RH |
1660 | emit_jump (cmp_label); |
1661 | emit_label (top_label); | |
1662 | ||
1663 | tmp = convert_modes (Pmode, iter_mode, iter, true); | |
1664 | x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); | |
1665 | y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); | |
1666 | x = change_address (x, QImode, x_addr); | |
1667 | y = change_address (y, QImode, y_addr); | |
1668 | ||
1669 | emit_move_insn (x, y); | |
1670 | ||
1671 | tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, | |
1672 | true, OPTAB_LIB_WIDEN); | |
1673 | if (tmp != iter) | |
1674 | emit_move_insn (iter, tmp); | |
1675 | ||
44bb111a RH |
1676 | emit_label (cmp_label); |
1677 | ||
1678 | emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, | |
1679 | true, top_label); | |
44bb111a | 1680 | } |
bbf6f052 RK |
1681 | \f |
1682 | /* Copy all or part of a value X into registers starting at REGNO. | |
1683 | The number of registers to be filled is NREGS. */ | |
1684 | ||
1685 | void | |
502b8322 | 1686 | move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode) |
bbf6f052 RK |
1687 | { |
1688 | int i; | |
381127e8 | 1689 | #ifdef HAVE_load_multiple |
3a94c984 | 1690 | rtx pat; |
381127e8 RL |
1691 | rtx last; |
1692 | #endif | |
bbf6f052 | 1693 | |
72bb9717 RK |
1694 | if (nregs == 0) |
1695 | return; | |
1696 | ||
bbf6f052 RK |
1697 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
1698 | x = validize_mem (force_const_mem (mode, x)); | |
1699 | ||
1700 | /* See if the machine can do this with a load multiple insn. */ | |
1701 | #ifdef HAVE_load_multiple | |
c3a02afe | 1702 | if (HAVE_load_multiple) |
bbf6f052 | 1703 | { |
c3a02afe | 1704 | last = get_last_insn (); |
38a448ca | 1705 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
c3a02afe RK |
1706 | GEN_INT (nregs)); |
1707 | if (pat) | |
1708 | { | |
1709 | emit_insn (pat); | |
1710 | return; | |
1711 | } | |
1712 | else | |
1713 | delete_insns_since (last); | |
bbf6f052 | 1714 | } |
bbf6f052 RK |
1715 | #endif |
1716 | ||
1717 | for (i = 0; i < nregs; i++) | |
38a448ca | 1718 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
bbf6f052 RK |
1719 | operand_subword_force (x, i, mode)); |
1720 | } | |
1721 | ||
1722 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
c6b97fac | 1723 | The number of registers to be filled is NREGS. */ |
0040593d | 1724 | |
bbf6f052 | 1725 | void |
502b8322 | 1726 | move_block_from_reg (int regno, rtx x, int nregs) |
bbf6f052 RK |
1727 | { |
1728 | int i; | |
bbf6f052 | 1729 | |
2954d7db RK |
1730 | if (nregs == 0) |
1731 | return; | |
1732 | ||
bbf6f052 RK |
1733 | /* See if the machine can do this with a store multiple insn. */ |
1734 | #ifdef HAVE_store_multiple | |
c3a02afe | 1735 | if (HAVE_store_multiple) |
bbf6f052 | 1736 | { |
c6b97fac AM |
1737 | rtx last = get_last_insn (); |
1738 | rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), | |
1739 | GEN_INT (nregs)); | |
c3a02afe RK |
1740 | if (pat) |
1741 | { | |
1742 | emit_insn (pat); | |
1743 | return; | |
1744 | } | |
1745 | else | |
1746 | delete_insns_since (last); | |
bbf6f052 | 1747 | } |
bbf6f052 RK |
1748 | #endif |
1749 | ||
1750 | for (i = 0; i < nregs; i++) | |
1751 | { | |
1752 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
1753 | ||
1754 | if (tem == 0) | |
1755 | abort (); | |
1756 | ||
38a448ca | 1757 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
bbf6f052 RK |
1758 | } |
1759 | } | |
1760 | ||
084a1106 JDA |
1761 | /* Generate a PARALLEL rtx for a new non-consecutive group of registers from |
1762 | ORIG, where ORIG is a non-consecutive group of registers represented by | |
1763 | a PARALLEL. The clone is identical to the original except in that the | |
1764 | original set of registers is replaced by a new set of pseudo registers. | |
1765 | The new set has the same modes as the original set. */ | |
1766 | ||
1767 | rtx | |
502b8322 | 1768 | gen_group_rtx (rtx orig) |
084a1106 JDA |
1769 | { |
1770 | int i, length; | |
1771 | rtx *tmps; | |
1772 | ||
1773 | if (GET_CODE (orig) != PARALLEL) | |
1774 | abort (); | |
1775 | ||
1776 | length = XVECLEN (orig, 0); | |
703ad42b | 1777 | tmps = alloca (sizeof (rtx) * length); |
084a1106 JDA |
1778 | |
1779 | /* Skip a NULL entry in first slot. */ | |
1780 | i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; | |
1781 | ||
1782 | if (i) | |
1783 | tmps[0] = 0; | |
1784 | ||
1785 | for (; i < length; i++) | |
1786 | { | |
1787 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); | |
1788 | rtx offset = XEXP (XVECEXP (orig, 0, i), 1); | |
1789 | ||
1790 | tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); | |
1791 | } | |
1792 | ||
1793 | return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); | |
1794 | } | |
1795 | ||
6e985040 AM |
1796 | /* Emit code to move a block ORIG_SRC of type TYPE to a block DST, |
1797 | where DST is non-consecutive registers represented by a PARALLEL. | |
1798 | SSIZE represents the total size of block ORIG_SRC in bytes, or -1 | |
450b1728 | 1799 | if not known. */ |
fffa9c1d JW |
1800 | |
1801 | void | |
6e985040 | 1802 | emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) |
fffa9c1d | 1803 | { |
aac5cc16 RH |
1804 | rtx *tmps, src; |
1805 | int start, i; | |
fffa9c1d | 1806 | |
aac5cc16 | 1807 | if (GET_CODE (dst) != PARALLEL) |
fffa9c1d JW |
1808 | abort (); |
1809 | ||
1810 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1811 | both on the stack and in registers. */ | |
aac5cc16 RH |
1812 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
1813 | start = 0; | |
fffa9c1d | 1814 | else |
aac5cc16 RH |
1815 | start = 1; |
1816 | ||
703ad42b | 1817 | tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0)); |
aac5cc16 | 1818 | |
aac5cc16 RH |
1819 | /* Process the pieces. */ |
1820 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1821 | { | |
1822 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
770ae6cc RK |
1823 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
1824 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
aac5cc16 RH |
1825 | int shift = 0; |
1826 | ||
1827 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1828 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
aac5cc16 | 1829 | { |
6e985040 AM |
1830 | /* Arrange to shift the fragment to where it belongs. |
1831 | extract_bit_field loads to the lsb of the reg. */ | |
1832 | if ( | |
1833 | #ifdef BLOCK_REG_PADDING | |
1834 | BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start) | |
1835 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
1836 | #else | |
1837 | BYTES_BIG_ENDIAN | |
1838 | #endif | |
1839 | ) | |
1840 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
aac5cc16 RH |
1841 | bytelen = ssize - bytepos; |
1842 | if (bytelen <= 0) | |
729a2125 | 1843 | abort (); |
aac5cc16 RH |
1844 | } |
1845 | ||
f3ce87a9 DE |
1846 | /* If we won't be loading directly from memory, protect the real source |
1847 | from strange tricks we might play; but make sure that the source can | |
1848 | be loaded directly into the destination. */ | |
1849 | src = orig_src; | |
3c0cb5de | 1850 | if (!MEM_P (orig_src) |
f3ce87a9 DE |
1851 | && (!CONSTANT_P (orig_src) |
1852 | || (GET_MODE (orig_src) != mode | |
1853 | && GET_MODE (orig_src) != VOIDmode))) | |
1854 | { | |
1855 | if (GET_MODE (orig_src) == VOIDmode) | |
1856 | src = gen_reg_rtx (mode); | |
1857 | else | |
1858 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
04050c69 | 1859 | |
f3ce87a9 DE |
1860 | emit_move_insn (src, orig_src); |
1861 | } | |
1862 | ||
aac5cc16 | 1863 | /* Optimize the access just a bit. */ |
3c0cb5de | 1864 | if (MEM_P (src) |
6e985040 AM |
1865 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src)) |
1866 | || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 1867 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 RH |
1868 | && bytelen == GET_MODE_SIZE (mode)) |
1869 | { | |
1870 | tmps[i] = gen_reg_rtx (mode); | |
f4ef873c | 1871 | emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
fffa9c1d | 1872 | } |
7c4a6db0 JW |
1873 | else if (GET_CODE (src) == CONCAT) |
1874 | { | |
015b1ad1 JDA |
1875 | unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); |
1876 | unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); | |
1877 | ||
1878 | if ((bytepos == 0 && bytelen == slen0) | |
1879 | || (bytepos != 0 && bytepos + bytelen <= slen)) | |
cbb92744 | 1880 | { |
015b1ad1 JDA |
1881 | /* The following assumes that the concatenated objects all |
1882 | have the same size. In this case, a simple calculation | |
1883 | can be used to determine the object and the bit field | |
1884 | to be extracted. */ | |
1885 | tmps[i] = XEXP (src, bytepos / slen0); | |
cbb92744 | 1886 | if (! CONSTANT_P (tmps[i]) |
f8cfc6aa | 1887 | && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)) |
cbb92744 | 1888 | tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, |
015b1ad1 JDA |
1889 | (bytepos % slen0) * BITS_PER_UNIT, |
1890 | 1, NULL_RTX, mode, mode, ssize); | |
cbb92744 | 1891 | } |
58f69841 JH |
1892 | else if (bytepos == 0) |
1893 | { | |
015b1ad1 | 1894 | rtx mem = assign_stack_temp (GET_MODE (src), slen, 0); |
58f69841 | 1895 | emit_move_insn (mem, src); |
04050c69 | 1896 | tmps[i] = adjust_address (mem, mode, 0); |
58f69841 | 1897 | } |
7c4a6db0 JW |
1898 | else |
1899 | abort (); | |
1900 | } | |
9c0631a7 AH |
1901 | /* FIXME: A SIMD parallel will eventually lead to a subreg of a |
1902 | SIMD register, which is currently broken. While we get GCC | |
1903 | to emit proper RTL for these cases, let's dump to memory. */ | |
1904 | else if (VECTOR_MODE_P (GET_MODE (dst)) | |
f8cfc6aa | 1905 | && REG_P (src)) |
9c0631a7 AH |
1906 | { |
1907 | int slen = GET_MODE_SIZE (GET_MODE (src)); | |
1908 | rtx mem; | |
1909 | ||
1910 | mem = assign_stack_temp (GET_MODE (src), slen, 0); | |
1911 | emit_move_insn (mem, src); | |
1912 | tmps[i] = adjust_address (mem, mode, (int) bytepos); | |
1913 | } | |
d3a16cbd FJ |
1914 | else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode |
1915 | && XVECLEN (dst, 0) > 1) | |
1916 | tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos); | |
f3ce87a9 | 1917 | else if (CONSTANT_P (src) |
f8cfc6aa | 1918 | || (REG_P (src) && GET_MODE (src) == mode)) |
2ee5437b | 1919 | tmps[i] = src; |
fffa9c1d | 1920 | else |
19caa751 RK |
1921 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
1922 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
04050c69 | 1923 | mode, mode, ssize); |
fffa9c1d | 1924 | |
6e985040 | 1925 | if (shift) |
19caa751 RK |
1926 | expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), |
1927 | tmps[i], 0, OPTAB_WIDEN); | |
fffa9c1d | 1928 | } |
19caa751 | 1929 | |
3a94c984 | 1930 | emit_queue (); |
aac5cc16 RH |
1931 | |
1932 | /* Copy the extracted pieces into the proper (probable) hard regs. */ | |
1933 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1934 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]); | |
fffa9c1d JW |
1935 | } |
1936 | ||
084a1106 JDA |
1937 | /* Emit code to move a block SRC to block DST, where SRC and DST are |
1938 | non-consecutive groups of registers, each represented by a PARALLEL. */ | |
1939 | ||
1940 | void | |
502b8322 | 1941 | emit_group_move (rtx dst, rtx src) |
084a1106 JDA |
1942 | { |
1943 | int i; | |
1944 | ||
1945 | if (GET_CODE (src) != PARALLEL | |
1946 | || GET_CODE (dst) != PARALLEL | |
1947 | || XVECLEN (src, 0) != XVECLEN (dst, 0)) | |
1948 | abort (); | |
1949 | ||
1950 | /* Skip first entry if NULL. */ | |
1951 | for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) | |
1952 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), | |
1953 | XEXP (XVECEXP (src, 0, i), 0)); | |
1954 | } | |
1955 | ||
6e985040 AM |
1956 | /* Emit code to move a block SRC to a block ORIG_DST of type TYPE, |
1957 | where SRC is non-consecutive registers represented by a PARALLEL. | |
1958 | SSIZE represents the total size of block ORIG_DST, or -1 if not | |
1959 | known. */ | |
fffa9c1d JW |
1960 | |
1961 | void | |
6e985040 | 1962 | emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) |
fffa9c1d | 1963 | { |
aac5cc16 RH |
1964 | rtx *tmps, dst; |
1965 | int start, i; | |
fffa9c1d | 1966 | |
aac5cc16 | 1967 | if (GET_CODE (src) != PARALLEL) |
fffa9c1d JW |
1968 | abort (); |
1969 | ||
1970 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1971 | both on the stack and in registers. */ | |
aac5cc16 RH |
1972 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
1973 | start = 0; | |
fffa9c1d | 1974 | else |
aac5cc16 RH |
1975 | start = 1; |
1976 | ||
703ad42b | 1977 | tmps = alloca (sizeof (rtx) * XVECLEN (src, 0)); |
fffa9c1d | 1978 | |
aac5cc16 RH |
1979 | /* Copy the (probable) hard regs into pseudos. */ |
1980 | for (i = start; i < XVECLEN (src, 0); i++) | |
fffa9c1d | 1981 | { |
aac5cc16 RH |
1982 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
1983 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
1984 | emit_move_insn (tmps[i], reg); | |
1985 | } | |
3a94c984 | 1986 | emit_queue (); |
fffa9c1d | 1987 | |
aac5cc16 RH |
1988 | /* If we won't be storing directly into memory, protect the real destination |
1989 | from strange tricks we might play. */ | |
1990 | dst = orig_dst; | |
10a9f2be JW |
1991 | if (GET_CODE (dst) == PARALLEL) |
1992 | { | |
1993 | rtx temp; | |
1994 | ||
1995 | /* We can get a PARALLEL dst if there is a conditional expression in | |
1996 | a return statement. In that case, the dst and src are the same, | |
1997 | so no action is necessary. */ | |
1998 | if (rtx_equal_p (dst, src)) | |
1999 | return; | |
2000 | ||
2001 | /* It is unclear if we can ever reach here, but we may as well handle | |
2002 | it. Allocate a temporary, and split this into a store/load to/from | |
2003 | the temporary. */ | |
2004 | ||
2005 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
6e985040 AM |
2006 | emit_group_store (temp, src, type, ssize); |
2007 | emit_group_load (dst, temp, type, ssize); | |
10a9f2be JW |
2008 | return; |
2009 | } | |
3c0cb5de | 2010 | else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) |
aac5cc16 RH |
2011 | { |
2012 | dst = gen_reg_rtx (GET_MODE (orig_dst)); | |
2013 | /* Make life a bit easier for combine. */ | |
8ae91fc0 | 2014 | emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst))); |
aac5cc16 | 2015 | } |
aac5cc16 RH |
2016 | |
2017 | /* Process the pieces. */ | |
2018 | for (i = start; i < XVECLEN (src, 0); i++) | |
2019 | { | |
770ae6cc | 2020 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
aac5cc16 | 2021 | enum machine_mode mode = GET_MODE (tmps[i]); |
770ae6cc | 2022 | unsigned int bytelen = GET_MODE_SIZE (mode); |
6ddae612 | 2023 | rtx dest = dst; |
aac5cc16 RH |
2024 | |
2025 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 2026 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
71bc0330 | 2027 | { |
6e985040 AM |
2028 | /* store_bit_field always takes its value from the lsb. |
2029 | Move the fragment to the lsb if it's not already there. */ | |
2030 | if ( | |
2031 | #ifdef BLOCK_REG_PADDING | |
2032 | BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start) | |
2033 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
2034 | #else | |
2035 | BYTES_BIG_ENDIAN | |
2036 | #endif | |
2037 | ) | |
aac5cc16 RH |
2038 | { |
2039 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2040 | expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift), | |
2041 | tmps[i], 0, OPTAB_WIDEN); | |
2042 | } | |
2043 | bytelen = ssize - bytepos; | |
71bc0330 | 2044 | } |
fffa9c1d | 2045 | |
6ddae612 JJ |
2046 | if (GET_CODE (dst) == CONCAT) |
2047 | { | |
2048 | if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
2049 | dest = XEXP (dst, 0); | |
2050 | else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
2051 | { | |
2052 | bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); | |
2053 | dest = XEXP (dst, 1); | |
2054 | } | |
0d446150 JH |
2055 | else if (bytepos == 0 && XVECLEN (src, 0)) |
2056 | { | |
2057 | dest = assign_stack_temp (GET_MODE (dest), | |
2058 | GET_MODE_SIZE (GET_MODE (dest)), 0); | |
2059 | emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), | |
2060 | tmps[i]); | |
2061 | dst = dest; | |
2062 | break; | |
2063 | } | |
6ddae612 JJ |
2064 | else |
2065 | abort (); | |
2066 | } | |
2067 | ||
aac5cc16 | 2068 | /* Optimize the access just a bit. */ |
3c0cb5de | 2069 | if (MEM_P (dest) |
6e985040 AM |
2070 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) |
2071 | || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 2072 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 | 2073 | && bytelen == GET_MODE_SIZE (mode)) |
6ddae612 | 2074 | emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); |
aac5cc16 | 2075 | else |
6ddae612 | 2076 | store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
04050c69 | 2077 | mode, tmps[i], ssize); |
fffa9c1d | 2078 | } |
729a2125 | 2079 | |
3a94c984 | 2080 | emit_queue (); |
aac5cc16 RH |
2081 | |
2082 | /* Copy from the pseudo into the (probable) hard reg. */ | |
0d446150 | 2083 | if (orig_dst != dst) |
aac5cc16 | 2084 | emit_move_insn (orig_dst, dst); |
fffa9c1d JW |
2085 | } |
2086 | ||
c36fce9a GRK |
2087 | /* Generate code to copy a BLKmode object of TYPE out of a |
2088 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
2089 | is null, a stack temporary is created. TGTBLK is returned. | |
2090 | ||
c988af2b RS |
2091 | The purpose of this routine is to handle functions that return |
2092 | BLKmode structures in registers. Some machines (the PA for example) | |
2093 | want to return all small structures in registers regardless of the | |
2094 | structure's alignment. */ | |
c36fce9a GRK |
2095 | |
2096 | rtx | |
502b8322 | 2097 | copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) |
c36fce9a | 2098 | { |
19caa751 RK |
2099 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2100 | rtx src = NULL, dst = NULL; | |
2101 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
c988af2b | 2102 | unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; |
19caa751 RK |
2103 | |
2104 | if (tgtblk == 0) | |
2105 | { | |
1da68f56 RK |
2106 | tgtblk = assign_temp (build_qualified_type (type, |
2107 | (TYPE_QUALS (type) | |
2108 | | TYPE_QUAL_CONST)), | |
2109 | 0, 1, 1); | |
19caa751 RK |
2110 | preserve_temp_slots (tgtblk); |
2111 | } | |
3a94c984 | 2112 | |
1ed1b4fb | 2113 | /* This code assumes srcreg is at least a full word. If it isn't, copy it |
9ac3e73b | 2114 | into a new pseudo which is a full word. */ |
0d7839da | 2115 | |
19caa751 RK |
2116 | if (GET_MODE (srcreg) != BLKmode |
2117 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
8df83eae | 2118 | srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type)); |
19caa751 | 2119 | |
c988af2b RS |
2120 | /* If the structure doesn't take up a whole number of words, see whether |
2121 | SRCREG is padded on the left or on the right. If it's on the left, | |
2122 | set PADDING_CORRECTION to the number of bits to skip. | |
2123 | ||
2124 | In most ABIs, the structure will be returned at the least end of | |
2125 | the register, which translates to right padding on little-endian | |
2126 | targets and left padding on big-endian targets. The opposite | |
2127 | holds if the structure is returned at the most significant | |
2128 | end of the register. */ | |
2129 | if (bytes % UNITS_PER_WORD != 0 | |
2130 | && (targetm.calls.return_in_msb (type) | |
2131 | ? !BYTES_BIG_ENDIAN | |
2132 | : BYTES_BIG_ENDIAN)) | |
2133 | padding_correction | |
19caa751 RK |
2134 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); |
2135 | ||
2136 | /* Copy the structure BITSIZE bites at a time. | |
3a94c984 | 2137 | |
19caa751 RK |
2138 | We could probably emit more efficient code for machines which do not use |
2139 | strict alignment, but it doesn't seem worth the effort at the current | |
2140 | time. */ | |
c988af2b | 2141 | for (bitpos = 0, xbitpos = padding_correction; |
19caa751 RK |
2142 | bitpos < bytes * BITS_PER_UNIT; |
2143 | bitpos += bitsize, xbitpos += bitsize) | |
2144 | { | |
3a94c984 | 2145 | /* We need a new source operand each time xbitpos is on a |
c988af2b | 2146 | word boundary and when xbitpos == padding_correction |
19caa751 RK |
2147 | (the first time through). */ |
2148 | if (xbitpos % BITS_PER_WORD == 0 | |
c988af2b | 2149 | || xbitpos == padding_correction) |
b47f8cfc JH |
2150 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
2151 | GET_MODE (srcreg)); | |
19caa751 RK |
2152 | |
2153 | /* We need a new destination operand each time bitpos is on | |
2154 | a word boundary. */ | |
2155 | if (bitpos % BITS_PER_WORD == 0) | |
2156 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
3a94c984 | 2157 | |
19caa751 RK |
2158 | /* Use xbitpos for the source extraction (right justified) and |
2159 | xbitpos for the destination store (left justified). */ | |
2160 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, | |
2161 | extract_bit_field (src, bitsize, | |
2162 | xbitpos % BITS_PER_WORD, 1, | |
2163 | NULL_RTX, word_mode, word_mode, | |
04050c69 RK |
2164 | BITS_PER_WORD), |
2165 | BITS_PER_WORD); | |
19caa751 RK |
2166 | } |
2167 | ||
2168 | return tgtblk; | |
c36fce9a GRK |
2169 | } |
2170 | ||
94b25f81 RK |
2171 | /* Add a USE expression for REG to the (possibly empty) list pointed |
2172 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
bbf6f052 RK |
2173 | |
2174 | void | |
502b8322 | 2175 | use_reg (rtx *call_fusage, rtx reg) |
b3f8cf4a | 2176 | { |
f8cfc6aa | 2177 | if (!REG_P (reg) |
0304dfbb | 2178 | || REGNO (reg) >= FIRST_PSEUDO_REGISTER) |
3a94c984 | 2179 | abort (); |
b3f8cf4a RK |
2180 | |
2181 | *call_fusage | |
38a448ca RH |
2182 | = gen_rtx_EXPR_LIST (VOIDmode, |
2183 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
b3f8cf4a RK |
2184 | } |
2185 | ||
94b25f81 RK |
2186 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
2187 | starting at REGNO. All of these registers must be hard registers. */ | |
b3f8cf4a RK |
2188 | |
2189 | void | |
502b8322 | 2190 | use_regs (rtx *call_fusage, int regno, int nregs) |
bbf6f052 | 2191 | { |
0304dfbb | 2192 | int i; |
bbf6f052 | 2193 | |
0304dfbb DE |
2194 | if (regno + nregs > FIRST_PSEUDO_REGISTER) |
2195 | abort (); | |
2196 | ||
2197 | for (i = 0; i < nregs; i++) | |
e50126e8 | 2198 | use_reg (call_fusage, regno_reg_rtx[regno + i]); |
bbf6f052 | 2199 | } |
fffa9c1d JW |
2200 | |
2201 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
2202 | PARALLEL REGS. This is for calls that pass values in multiple | |
2203 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
2204 | ||
2205 | void | |
502b8322 | 2206 | use_group_regs (rtx *call_fusage, rtx regs) |
fffa9c1d JW |
2207 | { |
2208 | int i; | |
2209 | ||
6bd35f86 DE |
2210 | for (i = 0; i < XVECLEN (regs, 0); i++) |
2211 | { | |
2212 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
fffa9c1d | 2213 | |
6bd35f86 DE |
2214 | /* A NULL entry means the parameter goes both on the stack and in |
2215 | registers. This can also be a MEM for targets that pass values | |
2216 | partially on the stack and partially in registers. */ | |
f8cfc6aa | 2217 | if (reg != 0 && REG_P (reg)) |
6bd35f86 DE |
2218 | use_reg (call_fusage, reg); |
2219 | } | |
fffa9c1d | 2220 | } |
bbf6f052 | 2221 | \f |
57814e5e | 2222 | |
cf5124f6 RS |
2223 | /* Determine whether the LEN bytes generated by CONSTFUN can be |
2224 | stored to memory using several move instructions. CONSTFUNDATA is | |
2225 | a pointer which will be passed as argument in every CONSTFUN call. | |
2226 | ALIGN is maximum alignment we can assume. Return nonzero if a | |
2227 | call to store_by_pieces should succeed. */ | |
2228 | ||
57814e5e | 2229 | int |
502b8322 AJ |
2230 | can_store_by_pieces (unsigned HOST_WIDE_INT len, |
2231 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
2232 | void *constfundata, unsigned int align) | |
57814e5e | 2233 | { |
98166639 | 2234 | unsigned HOST_WIDE_INT max_size, l; |
57814e5e JJ |
2235 | HOST_WIDE_INT offset = 0; |
2236 | enum machine_mode mode, tmode; | |
2237 | enum insn_code icode; | |
2238 | int reverse; | |
2239 | rtx cst; | |
2240 | ||
2c430630 RS |
2241 | if (len == 0) |
2242 | return 1; | |
2243 | ||
4977bab6 | 2244 | if (! STORE_BY_PIECES_P (len, align)) |
57814e5e JJ |
2245 | return 0; |
2246 | ||
2247 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) | |
2248 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) | |
2249 | align = MOVE_MAX * BITS_PER_UNIT; | |
2250 | ||
2251 | /* We would first store what we can in the largest integer mode, then go to | |
2252 | successively smaller modes. */ | |
2253 | ||
2254 | for (reverse = 0; | |
2255 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2256 | reverse++) | |
2257 | { | |
2258 | l = len; | |
2259 | mode = VOIDmode; | |
cf5124f6 | 2260 | max_size = STORE_MAX_PIECES + 1; |
57814e5e JJ |
2261 | while (max_size > 1) |
2262 | { | |
2263 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2264 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2265 | if (GET_MODE_SIZE (tmode) < max_size) | |
2266 | mode = tmode; | |
2267 | ||
2268 | if (mode == VOIDmode) | |
2269 | break; | |
2270 | ||
2271 | icode = mov_optab->handlers[(int) mode].insn_code; | |
2272 | if (icode != CODE_FOR_nothing | |
2273 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2274 | { | |
2275 | unsigned int size = GET_MODE_SIZE (mode); | |
2276 | ||
2277 | while (l >= size) | |
2278 | { | |
2279 | if (reverse) | |
2280 | offset -= size; | |
2281 | ||
2282 | cst = (*constfun) (constfundata, offset, mode); | |
2283 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2284 | return 0; | |
2285 | ||
2286 | if (!reverse) | |
2287 | offset += size; | |
2288 | ||
2289 | l -= size; | |
2290 | } | |
2291 | } | |
2292 | ||
2293 | max_size = GET_MODE_SIZE (mode); | |
2294 | } | |
2295 | ||
2296 | /* The code above should have handled everything. */ | |
2297 | if (l != 0) | |
2298 | abort (); | |
2299 | } | |
2300 | ||
2301 | return 1; | |
2302 | } | |
2303 | ||
2304 | /* Generate several move instructions to store LEN bytes generated by | |
2305 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2306 | pointer which will be passed as argument in every CONSTFUN call. | |
8fd3cf4e JJ |
2307 | ALIGN is maximum alignment we can assume. |
2308 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala | |
2309 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
2310 | stpcpy. */ | |
57814e5e | 2311 | |
8fd3cf4e | 2312 | rtx |
502b8322 AJ |
2313 | store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
2314 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
2315 | void *constfundata, unsigned int align, int endp) | |
57814e5e JJ |
2316 | { |
2317 | struct store_by_pieces data; | |
2318 | ||
2c430630 RS |
2319 | if (len == 0) |
2320 | { | |
2321 | if (endp == 2) | |
2322 | abort (); | |
2323 | return to; | |
2324 | } | |
2325 | ||
4977bab6 | 2326 | if (! STORE_BY_PIECES_P (len, align)) |
57814e5e JJ |
2327 | abort (); |
2328 | to = protect_from_queue (to, 1); | |
2329 | data.constfun = constfun; | |
2330 | data.constfundata = constfundata; | |
2331 | data.len = len; | |
2332 | data.to = to; | |
2333 | store_by_pieces_1 (&data, align); | |
8fd3cf4e JJ |
2334 | if (endp) |
2335 | { | |
2336 | rtx to1; | |
2337 | ||
2338 | if (data.reverse) | |
2339 | abort (); | |
2340 | if (data.autinc_to) | |
2341 | { | |
2342 | if (endp == 2) | |
2343 | { | |
2344 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
2345 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
2346 | else | |
2347 | data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, | |
2348 | -1)); | |
2349 | } | |
2350 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
2351 | data.offset); | |
2352 | } | |
2353 | else | |
2354 | { | |
2355 | if (endp == 2) | |
2356 | --data.offset; | |
2357 | to1 = adjust_address (data.to, QImode, data.offset); | |
2358 | } | |
2359 | return to1; | |
2360 | } | |
2361 | else | |
2362 | return data.to; | |
57814e5e JJ |
2363 | } |
2364 | ||
19caa751 RK |
2365 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
2366 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2367 | before calling. ALIGN is maximum alignment we can assume. */ | |
9de08200 RK |
2368 | |
2369 | static void | |
342e2b74 | 2370 | clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) |
9de08200 | 2371 | { |
57814e5e JJ |
2372 | struct store_by_pieces data; |
2373 | ||
2c430630 RS |
2374 | if (len == 0) |
2375 | return; | |
2376 | ||
57814e5e | 2377 | data.constfun = clear_by_pieces_1; |
df4ae160 | 2378 | data.constfundata = NULL; |
57814e5e JJ |
2379 | data.len = len; |
2380 | data.to = to; | |
2381 | store_by_pieces_1 (&data, align); | |
2382 | } | |
2383 | ||
2384 | /* Callback routine for clear_by_pieces. | |
2385 | Return const0_rtx unconditionally. */ | |
2386 | ||
2387 | static rtx | |
502b8322 AJ |
2388 | clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, |
2389 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED, | |
2390 | enum machine_mode mode ATTRIBUTE_UNUSED) | |
57814e5e JJ |
2391 | { |
2392 | return const0_rtx; | |
2393 | } | |
2394 | ||
2395 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2396 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
2397 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2398 | before calling. ALIGN is maximum alignment we can assume. */ | |
2399 | ||
2400 | static void | |
502b8322 AJ |
2401 | store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
2402 | unsigned int align ATTRIBUTE_UNUSED) | |
57814e5e JJ |
2403 | { |
2404 | rtx to_addr = XEXP (data->to, 0); | |
cf5124f6 | 2405 | unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1; |
fbe1758d AM |
2406 | enum machine_mode mode = VOIDmode, tmode; |
2407 | enum insn_code icode; | |
9de08200 | 2408 | |
57814e5e JJ |
2409 | data->offset = 0; |
2410 | data->to_addr = to_addr; | |
2411 | data->autinc_to | |
9de08200 RK |
2412 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2413 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2414 | ||
57814e5e JJ |
2415 | data->explicit_inc_to = 0; |
2416 | data->reverse | |
9de08200 | 2417 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
57814e5e JJ |
2418 | if (data->reverse) |
2419 | data->offset = data->len; | |
9de08200 | 2420 | |
57814e5e | 2421 | /* If storing requires more than two move insns, |
9de08200 RK |
2422 | copy addresses to registers (to make displacements shorter) |
2423 | and use post-increment if available. */ | |
57814e5e JJ |
2424 | if (!data->autinc_to |
2425 | && move_by_pieces_ninsns (data->len, align) > 2) | |
9de08200 | 2426 | { |
3a94c984 | 2427 | /* Determine the main mode we'll be using. */ |
fbe1758d AM |
2428 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2429 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2430 | if (GET_MODE_SIZE (tmode) < max_size) | |
2431 | mode = tmode; | |
2432 | ||
57814e5e | 2433 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
9de08200 | 2434 | { |
57814e5e JJ |
2435 | data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
2436 | data->autinc_to = 1; | |
2437 | data->explicit_inc_to = -1; | |
9de08200 | 2438 | } |
3bdf5ad1 | 2439 | |
57814e5e JJ |
2440 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2441 | && ! data->autinc_to) | |
9de08200 | 2442 | { |
57814e5e JJ |
2443 | data->to_addr = copy_addr_to_reg (to_addr); |
2444 | data->autinc_to = 1; | |
2445 | data->explicit_inc_to = 1; | |
9de08200 | 2446 | } |
3bdf5ad1 | 2447 | |
57814e5e JJ |
2448 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
2449 | data->to_addr = copy_addr_to_reg (to_addr); | |
9de08200 RK |
2450 | } |
2451 | ||
e1565e65 | 2452 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 2453 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
bdb429a5 | 2454 | align = MOVE_MAX * BITS_PER_UNIT; |
9de08200 | 2455 | |
57814e5e | 2456 | /* First store what we can in the largest integer mode, then go to |
9de08200 RK |
2457 | successively smaller modes. */ |
2458 | ||
2459 | while (max_size > 1) | |
2460 | { | |
9de08200 RK |
2461 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2462 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2463 | if (GET_MODE_SIZE (tmode) < max_size) | |
2464 | mode = tmode; | |
2465 | ||
2466 | if (mode == VOIDmode) | |
2467 | break; | |
2468 | ||
2469 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 2470 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
57814e5e | 2471 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
9de08200 RK |
2472 | |
2473 | max_size = GET_MODE_SIZE (mode); | |
2474 | } | |
2475 | ||
2476 | /* The code above should have handled everything. */ | |
57814e5e | 2477 | if (data->len != 0) |
9de08200 RK |
2478 | abort (); |
2479 | } | |
2480 | ||
57814e5e | 2481 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
9de08200 RK |
2482 | with move instructions for mode MODE. GENFUN is the gen_... function |
2483 | to make a move insn for that mode. DATA has all the other info. */ | |
2484 | ||
2485 | static void | |
502b8322 AJ |
2486 | store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
2487 | struct store_by_pieces *data) | |
9de08200 | 2488 | { |
3bdf5ad1 | 2489 | unsigned int size = GET_MODE_SIZE (mode); |
57814e5e | 2490 | rtx to1, cst; |
9de08200 RK |
2491 | |
2492 | while (data->len >= size) | |
2493 | { | |
3bdf5ad1 RK |
2494 | if (data->reverse) |
2495 | data->offset -= size; | |
9de08200 | 2496 | |
3bdf5ad1 | 2497 | if (data->autinc_to) |
630036c6 JJ |
2498 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
2499 | data->offset); | |
3a94c984 | 2500 | else |
f4ef873c | 2501 | to1 = adjust_address (data->to, mode, data->offset); |
9de08200 | 2502 | |
940da324 | 2503 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
57814e5e JJ |
2504 | emit_insn (gen_add2_insn (data->to_addr, |
2505 | GEN_INT (-(HOST_WIDE_INT) size))); | |
9de08200 | 2506 | |
57814e5e JJ |
2507 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2508 | emit_insn ((*genfun) (to1, cst)); | |
3bdf5ad1 | 2509 | |
940da324 | 2510 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
9de08200 | 2511 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
9de08200 | 2512 | |
3bdf5ad1 RK |
2513 | if (! data->reverse) |
2514 | data->offset += size; | |
9de08200 RK |
2515 | |
2516 | data->len -= size; | |
2517 | } | |
2518 | } | |
2519 | \f | |
19caa751 | 2520 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
8ac61af7 | 2521 | its length in bytes. */ |
e9a25f70 JL |
2522 | |
2523 | rtx | |
502b8322 | 2524 | clear_storage (rtx object, rtx size) |
bbf6f052 | 2525 | { |
e9a25f70 | 2526 | rtx retval = 0; |
3c0cb5de | 2527 | unsigned int align = (MEM_P (object) ? MEM_ALIGN (object) |
8ac61af7 | 2528 | : GET_MODE_ALIGNMENT (GET_MODE (object))); |
e9a25f70 | 2529 | |
fcf1b822 RK |
2530 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2531 | just move a zero. Otherwise, do this a piece at a time. */ | |
69ef87e2 | 2532 | if (GET_MODE (object) != BLKmode |
fcf1b822 | 2533 | && GET_CODE (size) == CONST_INT |
4ca79136 | 2534 | && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object))) |
fcf1b822 RK |
2535 | emit_move_insn (object, CONST0_RTX (GET_MODE (object))); |
2536 | else | |
bbf6f052 | 2537 | { |
9de08200 RK |
2538 | object = protect_from_queue (object, 1); |
2539 | size = protect_from_queue (size, 0); | |
2540 | ||
6972c506 | 2541 | if (size == const0_rtx) |
2c430630 RS |
2542 | ; |
2543 | else if (GET_CODE (size) == CONST_INT | |
78762e3b | 2544 | && CLEAR_BY_PIECES_P (INTVAL (size), align)) |
9de08200 | 2545 | clear_by_pieces (object, INTVAL (size), align); |
4ca79136 RH |
2546 | else if (clear_storage_via_clrstr (object, size, align)) |
2547 | ; | |
9de08200 | 2548 | else |
4ca79136 RH |
2549 | retval = clear_storage_via_libcall (object, size); |
2550 | } | |
2551 | ||
2552 | return retval; | |
2553 | } | |
2554 | ||
2555 | /* A subroutine of clear_storage. Expand a clrstr pattern; | |
2556 | return true if successful. */ | |
2557 | ||
2558 | static bool | |
502b8322 | 2559 | clear_storage_via_clrstr (rtx object, rtx size, unsigned int align) |
4ca79136 RH |
2560 | { |
2561 | /* Try the most limited insn first, because there's no point | |
2562 | including more than one in the machine description unless | |
2563 | the more limited one has some advantage. */ | |
2564 | ||
2565 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); | |
2566 | enum machine_mode mode; | |
2567 | ||
2568 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2569 | mode = GET_MODE_WIDER_MODE (mode)) | |
2570 | { | |
2571 | enum insn_code code = clrstr_optab[(int) mode]; | |
2572 | insn_operand_predicate_fn pred; | |
2573 | ||
2574 | if (code != CODE_FOR_nothing | |
2575 | /* We don't need MODE to be narrower than | |
2576 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2577 | the mode mask, as it is returned by the macro, it will | |
2578 | definitely be less than the actual mode mask. */ | |
2579 | && ((GET_CODE (size) == CONST_INT | |
2580 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
2581 | <= (GET_MODE_MASK (mode) >> 1))) | |
2582 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
2583 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
2584 | || (*pred) (object, BLKmode)) | |
2585 | && ((pred = insn_data[(int) code].operand[2].predicate) == 0 | |
2586 | || (*pred) (opalign, VOIDmode))) | |
9de08200 | 2587 | { |
4ca79136 RH |
2588 | rtx op1; |
2589 | rtx last = get_last_insn (); | |
2590 | rtx pat; | |
9de08200 | 2591 | |
4ca79136 RH |
2592 | op1 = convert_to_mode (mode, size, 1); |
2593 | pred = insn_data[(int) code].operand[1].predicate; | |
2594 | if (pred != 0 && ! (*pred) (op1, mode)) | |
2595 | op1 = copy_to_mode_reg (mode, op1); | |
9de08200 | 2596 | |
4ca79136 RH |
2597 | pat = GEN_FCN ((int) code) (object, op1, opalign); |
2598 | if (pat) | |
9de08200 | 2599 | { |
4ca79136 RH |
2600 | emit_insn (pat); |
2601 | return true; | |
2602 | } | |
2603 | else | |
2604 | delete_insns_since (last); | |
2605 | } | |
2606 | } | |
9de08200 | 2607 | |
4ca79136 RH |
2608 | return false; |
2609 | } | |
9de08200 | 2610 | |
8f99553f | 2611 | /* A subroutine of clear_storage. Expand a call to memset. |
4ca79136 | 2612 | Return the return value of memset, 0 otherwise. */ |
9de08200 | 2613 | |
4ca79136 | 2614 | static rtx |
502b8322 | 2615 | clear_storage_via_libcall (rtx object, rtx size) |
4ca79136 RH |
2616 | { |
2617 | tree call_expr, arg_list, fn, object_tree, size_tree; | |
2618 | enum machine_mode size_mode; | |
2619 | rtx retval; | |
9de08200 | 2620 | |
4ca79136 | 2621 | /* OBJECT or SIZE may have been passed through protect_from_queue. |
52cf7115 | 2622 | |
4ca79136 RH |
2623 | It is unsafe to save the value generated by protect_from_queue |
2624 | and reuse it later. Consider what happens if emit_queue is | |
2625 | called before the return value from protect_from_queue is used. | |
52cf7115 | 2626 | |
4ca79136 RH |
2627 | Expansion of the CALL_EXPR below will call emit_queue before |
2628 | we are finished emitting RTL for argument setup. So if we are | |
2629 | not careful we could get the wrong value for an argument. | |
52cf7115 | 2630 | |
4ca79136 | 2631 | To avoid this problem we go ahead and emit code to copy OBJECT |
4dfa0342 | 2632 | and SIZE into new pseudos. |
52cf7115 | 2633 | |
4ca79136 RH |
2634 | Note this is not strictly needed for library calls since they |
2635 | do not call emit_queue before loading their arguments. However, | |
2636 | we may need to have library calls call emit_queue in the future | |
2637 | since failing to do so could cause problems for targets which | |
2638 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
52cf7115 | 2639 | |
4ca79136 | 2640 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); |
52cf7115 | 2641 | |
8f99553f | 2642 | size_mode = TYPE_MODE (sizetype); |
4ca79136 RH |
2643 | size = convert_to_mode (size_mode, size, 1); |
2644 | size = copy_to_mode_reg (size_mode, size); | |
52cf7115 | 2645 | |
4ca79136 RH |
2646 | /* It is incorrect to use the libcall calling conventions to call |
2647 | memset in this context. This could be a user call to memset and | |
2648 | the user may wish to examine the return value from memset. For | |
2649 | targets where libcalls and normal calls have different conventions | |
8f99553f | 2650 | for returning pointers, we could end up generating incorrect code. */ |
4bc973ae | 2651 | |
4ca79136 | 2652 | object_tree = make_tree (ptr_type_node, object); |
8f99553f | 2653 | size_tree = make_tree (sizetype, size); |
4ca79136 RH |
2654 | |
2655 | fn = clear_storage_libcall_fn (true); | |
2656 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
8f99553f | 2657 | arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list); |
4ca79136 RH |
2658 | arg_list = tree_cons (NULL_TREE, object_tree, arg_list); |
2659 | ||
2660 | /* Now we have to build up the CALL_EXPR itself. */ | |
2661 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
2662 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
2663 | call_expr, arg_list, NULL_TREE); | |
4ca79136 RH |
2664 | |
2665 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
2666 | ||
2667 | /* If we are initializing a readonly value, show the above call | |
2668 | clobbered it. Otherwise, a load from it may erroneously be | |
2669 | hoisted from a loop. */ | |
2670 | if (RTX_UNCHANGING_P (object)) | |
2671 | emit_insn (gen_rtx_CLOBBER (VOIDmode, object)); | |
2672 | ||
8f99553f | 2673 | return retval; |
4ca79136 RH |
2674 | } |
2675 | ||
2676 | /* A subroutine of clear_storage_via_libcall. Create the tree node | |
2677 | for the function we use for block clears. The first time FOR_CALL | |
2678 | is true, we call assemble_external. */ | |
2679 | ||
2680 | static GTY(()) tree block_clear_fn; | |
66c60e67 | 2681 | |
9661b15f | 2682 | void |
502b8322 | 2683 | init_block_clear_fn (const char *asmspec) |
4ca79136 | 2684 | { |
9661b15f | 2685 | if (!block_clear_fn) |
4ca79136 | 2686 | { |
9661b15f JJ |
2687 | tree fn, args; |
2688 | ||
8f99553f JM |
2689 | fn = get_identifier ("memset"); |
2690 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
2691 | integer_type_node, sizetype, | |
2692 | NULL_TREE); | |
4ca79136 RH |
2693 | |
2694 | fn = build_decl (FUNCTION_DECL, fn, args); | |
2695 | DECL_EXTERNAL (fn) = 1; | |
2696 | TREE_PUBLIC (fn) = 1; | |
2697 | DECL_ARTIFICIAL (fn) = 1; | |
2698 | TREE_NOTHROW (fn) = 1; | |
2699 | ||
2700 | block_clear_fn = fn; | |
bbf6f052 | 2701 | } |
e9a25f70 | 2702 | |
9661b15f JJ |
2703 | if (asmspec) |
2704 | { | |
2705 | SET_DECL_RTL (block_clear_fn, NULL_RTX); | |
2706 | SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec)); | |
2707 | } | |
2708 | } | |
2709 | ||
2710 | static tree | |
502b8322 | 2711 | clear_storage_libcall_fn (int for_call) |
9661b15f JJ |
2712 | { |
2713 | static bool emitted_extern; | |
2714 | ||
2715 | if (!block_clear_fn) | |
2716 | init_block_clear_fn (NULL); | |
2717 | ||
4ca79136 RH |
2718 | if (for_call && !emitted_extern) |
2719 | { | |
2720 | emitted_extern = true; | |
9661b15f JJ |
2721 | make_decl_rtl (block_clear_fn, NULL); |
2722 | assemble_external (block_clear_fn); | |
4ca79136 | 2723 | } |
bbf6f052 | 2724 | |
9661b15f | 2725 | return block_clear_fn; |
4ca79136 RH |
2726 | } |
2727 | \f | |
bbf6f052 RK |
2728 | /* Generate code to copy Y into X. |
2729 | Both Y and X must have the same mode, except that | |
2730 | Y can be a constant with VOIDmode. | |
2731 | This mode cannot be BLKmode; use emit_block_move for that. | |
2732 | ||
2733 | Return the last instruction emitted. */ | |
2734 | ||
2735 | rtx | |
502b8322 | 2736 | emit_move_insn (rtx x, rtx y) |
bbf6f052 RK |
2737 | { |
2738 | enum machine_mode mode = GET_MODE (x); | |
de1b33dd | 2739 | rtx y_cst = NULL_RTX; |
0c19a26f | 2740 | rtx last_insn, set; |
bbf6f052 RK |
2741 | |
2742 | x = protect_from_queue (x, 1); | |
2743 | y = protect_from_queue (y, 0); | |
2744 | ||
2745 | if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) | |
2746 | abort (); | |
2747 | ||
6de9cd9a | 2748 | if (CONSTANT_P (y)) |
de1b33dd | 2749 | { |
51286de6 | 2750 | if (optimize |
075fc17a | 2751 | && SCALAR_FLOAT_MODE_P (GET_MODE (x)) |
51286de6 RH |
2752 | && (last_insn = compress_float_constant (x, y))) |
2753 | return last_insn; | |
2754 | ||
0c19a26f RS |
2755 | y_cst = y; |
2756 | ||
51286de6 RH |
2757 | if (!LEGITIMATE_CONSTANT_P (y)) |
2758 | { | |
51286de6 | 2759 | y = force_const_mem (mode, y); |
3a04ff64 RH |
2760 | |
2761 | /* If the target's cannot_force_const_mem prevented the spill, | |
2762 | assume that the target's move expanders will also take care | |
2763 | of the non-legitimate constant. */ | |
2764 | if (!y) | |
2765 | y = y_cst; | |
51286de6 | 2766 | } |
de1b33dd | 2767 | } |
bbf6f052 RK |
2768 | |
2769 | /* If X or Y are memory references, verify that their addresses are valid | |
2770 | for the machine. */ | |
3c0cb5de | 2771 | if (MEM_P (x) |
bbf6f052 RK |
2772 | && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) |
2773 | && ! push_operand (x, GET_MODE (x))) | |
2774 | || (flag_force_addr | |
2775 | && CONSTANT_ADDRESS_P (XEXP (x, 0))))) | |
792760b9 | 2776 | x = validize_mem (x); |
bbf6f052 | 2777 | |
3c0cb5de | 2778 | if (MEM_P (y) |
bbf6f052 RK |
2779 | && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) |
2780 | || (flag_force_addr | |
2781 | && CONSTANT_ADDRESS_P (XEXP (y, 0))))) | |
792760b9 | 2782 | y = validize_mem (y); |
bbf6f052 RK |
2783 | |
2784 | if (mode == BLKmode) | |
2785 | abort (); | |
2786 | ||
de1b33dd AO |
2787 | last_insn = emit_move_insn_1 (x, y); |
2788 | ||
f8cfc6aa | 2789 | if (y_cst && REG_P (x) |
0c19a26f RS |
2790 | && (set = single_set (last_insn)) != NULL_RTX |
2791 | && SET_DEST (set) == x | |
2792 | && ! rtx_equal_p (y_cst, SET_SRC (set))) | |
3d238248 | 2793 | set_unique_reg_note (last_insn, REG_EQUAL, y_cst); |
de1b33dd AO |
2794 | |
2795 | return last_insn; | |
261c4230 RS |
2796 | } |
2797 | ||
2798 | /* Low level part of emit_move_insn. | |
2799 | Called just like emit_move_insn, but assumes X and Y | |
2800 | are basically valid. */ | |
2801 | ||
2802 | rtx | |
502b8322 | 2803 | emit_move_insn_1 (rtx x, rtx y) |
261c4230 RS |
2804 | { |
2805 | enum machine_mode mode = GET_MODE (x); | |
2806 | enum machine_mode submode; | |
2807 | enum mode_class class = GET_MODE_CLASS (mode); | |
261c4230 | 2808 | |
dbbbbf3b | 2809 | if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) |
3a94c984 | 2810 | abort (); |
76bbe028 | 2811 | |
bbf6f052 RK |
2812 | if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
2813 | return | |
2814 | emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y)); | |
2815 | ||
89742723 | 2816 | /* Expand complex moves by moving real part and imag part, if possible. */ |
7308a047 | 2817 | else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT) |
27e58a70 | 2818 | && BLKmode != (submode = GET_MODE_INNER (mode)) |
7308a047 RS |
2819 | && (mov_optab->handlers[(int) submode].insn_code |
2820 | != CODE_FOR_nothing)) | |
2821 | { | |
2822 | /* Don't split destination if it is a stack push. */ | |
2823 | int stack = push_operand (x, GET_MODE (x)); | |
7308a047 | 2824 | |
79ce92d7 | 2825 | #ifdef PUSH_ROUNDING |
0e9cbd11 KH |
2826 | /* In case we output to the stack, but the size is smaller than the |
2827 | machine can push exactly, we need to use move instructions. */ | |
1a06f5fe | 2828 | if (stack |
bb93b973 RK |
2829 | && (PUSH_ROUNDING (GET_MODE_SIZE (submode)) |
2830 | != GET_MODE_SIZE (submode))) | |
1a06f5fe JH |
2831 | { |
2832 | rtx temp; | |
bb93b973 | 2833 | HOST_WIDE_INT offset1, offset2; |
1a06f5fe JH |
2834 | |
2835 | /* Do not use anti_adjust_stack, since we don't want to update | |
2836 | stack_pointer_delta. */ | |
2837 | temp = expand_binop (Pmode, | |
2838 | #ifdef STACK_GROWS_DOWNWARD | |
2839 | sub_optab, | |
2840 | #else | |
2841 | add_optab, | |
2842 | #endif | |
2843 | stack_pointer_rtx, | |
2844 | GEN_INT | |
bb93b973 RK |
2845 | (PUSH_ROUNDING |
2846 | (GET_MODE_SIZE (GET_MODE (x)))), | |
2847 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); | |
2848 | ||
1a06f5fe JH |
2849 | if (temp != stack_pointer_rtx) |
2850 | emit_move_insn (stack_pointer_rtx, temp); | |
bb93b973 | 2851 | |
1a06f5fe JH |
2852 | #ifdef STACK_GROWS_DOWNWARD |
2853 | offset1 = 0; | |
2854 | offset2 = GET_MODE_SIZE (submode); | |
2855 | #else | |
2856 | offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); | |
2857 | offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) | |
2858 | + GET_MODE_SIZE (submode)); | |
2859 | #endif | |
bb93b973 | 2860 | |
1a06f5fe JH |
2861 | emit_move_insn (change_address (x, submode, |
2862 | gen_rtx_PLUS (Pmode, | |
2863 | stack_pointer_rtx, | |
2864 | GEN_INT (offset1))), | |
2865 | gen_realpart (submode, y)); | |
2866 | emit_move_insn (change_address (x, submode, | |
2867 | gen_rtx_PLUS (Pmode, | |
2868 | stack_pointer_rtx, | |
2869 | GEN_INT (offset2))), | |
2870 | gen_imagpart (submode, y)); | |
2871 | } | |
e9c0bd54 | 2872 | else |
79ce92d7 | 2873 | #endif |
7308a047 RS |
2874 | /* If this is a stack, push the highpart first, so it |
2875 | will be in the argument order. | |
2876 | ||
2877 | In that case, change_address is used only to convert | |
2878 | the mode, not to change the address. */ | |
e9c0bd54 | 2879 | if (stack) |
c937357e | 2880 | { |
e33c0d66 RS |
2881 | /* Note that the real part always precedes the imag part in memory |
2882 | regardless of machine's endianness. */ | |
c937357e | 2883 | #ifdef STACK_GROWS_DOWNWARD |
a79b3dc7 RS |
2884 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), |
2885 | gen_imagpart (submode, y)); | |
2886 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), | |
2887 | gen_realpart (submode, y)); | |
c937357e | 2888 | #else |
a79b3dc7 RS |
2889 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), |
2890 | gen_realpart (submode, y)); | |
2891 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), | |
2892 | gen_imagpart (submode, y)); | |
c937357e RS |
2893 | #endif |
2894 | } | |
2895 | else | |
2896 | { | |
235ae7be DM |
2897 | rtx realpart_x, realpart_y; |
2898 | rtx imagpart_x, imagpart_y; | |
2899 | ||
405f63da MM |
2900 | /* If this is a complex value with each part being smaller than a |
2901 | word, the usual calling sequence will likely pack the pieces into | |
2902 | a single register. Unfortunately, SUBREG of hard registers only | |
2903 | deals in terms of words, so we have a problem converting input | |
2904 | arguments to the CONCAT of two registers that is used elsewhere | |
2905 | for complex values. If this is before reload, we can copy it into | |
2906 | memory and reload. FIXME, we should see about using extract and | |
2907 | insert on integer registers, but complex short and complex char | |
2908 | variables should be rarely used. */ | |
3a94c984 | 2909 | if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD |
405f63da MM |
2910 | && (reload_in_progress | reload_completed) == 0) |
2911 | { | |
bb93b973 RK |
2912 | int packed_dest_p |
2913 | = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); | |
2914 | int packed_src_p | |
2915 | = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); | |
405f63da MM |
2916 | |
2917 | if (packed_dest_p || packed_src_p) | |
2918 | { | |
2919 | enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) | |
2920 | ? MODE_FLOAT : MODE_INT); | |
2921 | ||
1da68f56 RK |
2922 | enum machine_mode reg_mode |
2923 | = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); | |
405f63da MM |
2924 | |
2925 | if (reg_mode != BLKmode) | |
2926 | { | |
2927 | rtx mem = assign_stack_temp (reg_mode, | |
2928 | GET_MODE_SIZE (mode), 0); | |
f4ef873c | 2929 | rtx cmem = adjust_address (mem, mode, 0); |
405f63da | 2930 | |
405f63da MM |
2931 | if (packed_dest_p) |
2932 | { | |
2933 | rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); | |
bb93b973 | 2934 | |
405f63da MM |
2935 | emit_move_insn_1 (cmem, y); |
2936 | return emit_move_insn_1 (sreg, mem); | |
2937 | } | |
2938 | else | |
2939 | { | |
2940 | rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); | |
bb93b973 | 2941 | |
405f63da MM |
2942 | emit_move_insn_1 (mem, sreg); |
2943 | return emit_move_insn_1 (x, cmem); | |
2944 | } | |
2945 | } | |
2946 | } | |
2947 | } | |
2948 | ||
235ae7be DM |
2949 | realpart_x = gen_realpart (submode, x); |
2950 | realpart_y = gen_realpart (submode, y); | |
2951 | imagpart_x = gen_imagpart (submode, x); | |
2952 | imagpart_y = gen_imagpart (submode, y); | |
2953 | ||
2954 | /* Show the output dies here. This is necessary for SUBREGs | |
2955 | of pseudos since we cannot track their lifetimes correctly; | |
c14c6529 RH |
2956 | hard regs shouldn't appear here except as return values. |
2957 | We never want to emit such a clobber after reload. */ | |
2958 | if (x != y | |
235ae7be DM |
2959 | && ! (reload_in_progress || reload_completed) |
2960 | && (GET_CODE (realpart_x) == SUBREG | |
2961 | || GET_CODE (imagpart_x) == SUBREG)) | |
bb93b973 | 2962 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
2638126a | 2963 | |
a79b3dc7 RS |
2964 | emit_move_insn (realpart_x, realpart_y); |
2965 | emit_move_insn (imagpart_x, imagpart_y); | |
c937357e | 2966 | } |
7308a047 | 2967 | |
7a1ab50a | 2968 | return get_last_insn (); |
7308a047 RS |
2969 | } |
2970 | ||
a3600c71 HPN |
2971 | /* Handle MODE_CC modes: If we don't have a special move insn for this mode, |
2972 | find a mode to do it in. If we have a movcc, use it. Otherwise, | |
2973 | find the MODE_INT mode of the same width. */ | |
2974 | else if (GET_MODE_CLASS (mode) == MODE_CC | |
2975 | && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) | |
2976 | { | |
2977 | enum insn_code insn_code; | |
2978 | enum machine_mode tmode = VOIDmode; | |
2979 | rtx x1 = x, y1 = y; | |
2980 | ||
2981 | if (mode != CCmode | |
2982 | && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing) | |
2983 | tmode = CCmode; | |
2984 | else | |
2985 | for (tmode = QImode; tmode != VOIDmode; | |
2986 | tmode = GET_MODE_WIDER_MODE (tmode)) | |
2987 | if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode)) | |
2988 | break; | |
2989 | ||
2990 | if (tmode == VOIDmode) | |
2991 | abort (); | |
2992 | ||
2993 | /* Get X and Y in TMODE. We can't use gen_lowpart here because it | |
2994 | may call change_address which is not appropriate if we were | |
2995 | called when a reload was in progress. We don't have to worry | |
2996 | about changing the address since the size in bytes is supposed to | |
2997 | be the same. Copy the MEM to change the mode and move any | |
2998 | substitutions from the old MEM to the new one. */ | |
2999 | ||
3000 | if (reload_in_progress) | |
3001 | { | |
3002 | x = gen_lowpart_common (tmode, x1); | |
3c0cb5de | 3003 | if (x == 0 && MEM_P (x1)) |
a3600c71 HPN |
3004 | { |
3005 | x = adjust_address_nv (x1, tmode, 0); | |
3006 | copy_replacements (x1, x); | |
3007 | } | |
3008 | ||
3009 | y = gen_lowpart_common (tmode, y1); | |
3c0cb5de | 3010 | if (y == 0 && MEM_P (y1)) |
a3600c71 HPN |
3011 | { |
3012 | y = adjust_address_nv (y1, tmode, 0); | |
3013 | copy_replacements (y1, y); | |
3014 | } | |
3015 | } | |
3016 | else | |
3017 | { | |
3018 | x = gen_lowpart (tmode, x); | |
3019 | y = gen_lowpart (tmode, y); | |
3020 | } | |
502b8322 | 3021 | |
a3600c71 HPN |
3022 | insn_code = mov_optab->handlers[(int) tmode].insn_code; |
3023 | return emit_insn (GEN_FCN (insn_code) (x, y)); | |
3024 | } | |
3025 | ||
5581fc91 RS |
3026 | /* Try using a move pattern for the corresponding integer mode. This is |
3027 | only safe when simplify_subreg can convert MODE constants into integer | |
3028 | constants. At present, it can only do this reliably if the value | |
3029 | fits within a HOST_WIDE_INT. */ | |
3030 | else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
3031 | && (submode = int_mode_for_mode (mode)) != BLKmode | |
3032 | && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing) | |
3033 | return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code) | |
3034 | (simplify_gen_subreg (submode, x, mode, 0), | |
3035 | simplify_gen_subreg (submode, y, mode, 0))); | |
3036 | ||
cffa2189 R |
3037 | /* This will handle any multi-word or full-word mode that lacks a move_insn |
3038 | pattern. However, you will get better code if you define such patterns, | |
bbf6f052 | 3039 | even if they must turn into multiple assembler instructions. */ |
cffa2189 | 3040 | else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD) |
bbf6f052 RK |
3041 | { |
3042 | rtx last_insn = 0; | |
3ef1eef4 | 3043 | rtx seq, inner; |
235ae7be | 3044 | int need_clobber; |
bb93b973 | 3045 | int i; |
3a94c984 | 3046 | |
a98c9f1a RK |
3047 | #ifdef PUSH_ROUNDING |
3048 | ||
3049 | /* If X is a push on the stack, do the push now and replace | |
3050 | X with a reference to the stack pointer. */ | |
3051 | if (push_operand (x, GET_MODE (x))) | |
3052 | { | |
918a6124 GK |
3053 | rtx temp; |
3054 | enum rtx_code code; | |
0fb7aeda | 3055 | |
918a6124 GK |
3056 | /* Do not use anti_adjust_stack, since we don't want to update |
3057 | stack_pointer_delta. */ | |
3058 | temp = expand_binop (Pmode, | |
3059 | #ifdef STACK_GROWS_DOWNWARD | |
3060 | sub_optab, | |
3061 | #else | |
3062 | add_optab, | |
3063 | #endif | |
3064 | stack_pointer_rtx, | |
3065 | GEN_INT | |
bb93b973 RK |
3066 | (PUSH_ROUNDING |
3067 | (GET_MODE_SIZE (GET_MODE (x)))), | |
a426c92e | 3068 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); |
bb93b973 | 3069 | |
0fb7aeda KH |
3070 | if (temp != stack_pointer_rtx) |
3071 | emit_move_insn (stack_pointer_rtx, temp); | |
918a6124 GK |
3072 | |
3073 | code = GET_CODE (XEXP (x, 0)); | |
bb93b973 | 3074 | |
918a6124 GK |
3075 | /* Just hope that small offsets off SP are OK. */ |
3076 | if (code == POST_INC) | |
0fb7aeda | 3077 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
bb93b973 RK |
3078 | GEN_INT (-((HOST_WIDE_INT) |
3079 | GET_MODE_SIZE (GET_MODE (x))))); | |
918a6124 | 3080 | else if (code == POST_DEC) |
0fb7aeda | 3081 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
918a6124 GK |
3082 | GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); |
3083 | else | |
3084 | temp = stack_pointer_rtx; | |
3085 | ||
3086 | x = change_address (x, VOIDmode, temp); | |
a98c9f1a RK |
3087 | } |
3088 | #endif | |
3a94c984 | 3089 | |
3ef1eef4 RK |
3090 | /* If we are in reload, see if either operand is a MEM whose address |
3091 | is scheduled for replacement. */ | |
3c0cb5de | 3092 | if (reload_in_progress && MEM_P (x) |
3ef1eef4 | 3093 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) |
f1ec5147 | 3094 | x = replace_equiv_address_nv (x, inner); |
3c0cb5de | 3095 | if (reload_in_progress && MEM_P (y) |
3ef1eef4 | 3096 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) |
f1ec5147 | 3097 | y = replace_equiv_address_nv (y, inner); |
3ef1eef4 | 3098 | |
235ae7be | 3099 | start_sequence (); |
15a7a8ec | 3100 | |
235ae7be | 3101 | need_clobber = 0; |
bbf6f052 | 3102 | for (i = 0; |
3a94c984 | 3103 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
bbf6f052 RK |
3104 | i++) |
3105 | { | |
3106 | rtx xpart = operand_subword (x, i, 1, mode); | |
3107 | rtx ypart = operand_subword (y, i, 1, mode); | |
3108 | ||
3109 | /* If we can't get a part of Y, put Y into memory if it is a | |
3110 | constant. Otherwise, force it into a register. If we still | |
3111 | can't get a part of Y, abort. */ | |
3112 | if (ypart == 0 && CONSTANT_P (y)) | |
3113 | { | |
3114 | y = force_const_mem (mode, y); | |
3115 | ypart = operand_subword (y, i, 1, mode); | |
3116 | } | |
3117 | else if (ypart == 0) | |
3118 | ypart = operand_subword_force (y, i, mode); | |
3119 | ||
3120 | if (xpart == 0 || ypart == 0) | |
3121 | abort (); | |
3122 | ||
235ae7be DM |
3123 | need_clobber |= (GET_CODE (xpart) == SUBREG); |
3124 | ||
bbf6f052 RK |
3125 | last_insn = emit_move_insn (xpart, ypart); |
3126 | } | |
6551fa4d | 3127 | |
2f937369 | 3128 | seq = get_insns (); |
235ae7be DM |
3129 | end_sequence (); |
3130 | ||
3131 | /* Show the output dies here. This is necessary for SUBREGs | |
3132 | of pseudos since we cannot track their lifetimes correctly; | |
3133 | hard regs shouldn't appear here except as return values. | |
3134 | We never want to emit such a clobber after reload. */ | |
3135 | if (x != y | |
3136 | && ! (reload_in_progress || reload_completed) | |
3137 | && need_clobber != 0) | |
bb93b973 | 3138 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
235ae7be DM |
3139 | |
3140 | emit_insn (seq); | |
3141 | ||
bbf6f052 RK |
3142 | return last_insn; |
3143 | } | |
3144 | else | |
3145 | abort (); | |
3146 | } | |
51286de6 RH |
3147 | |
3148 | /* If Y is representable exactly in a narrower mode, and the target can | |
3149 | perform the extension directly from constant or memory, then emit the | |
3150 | move as an extension. */ | |
3151 | ||
3152 | static rtx | |
502b8322 | 3153 | compress_float_constant (rtx x, rtx y) |
51286de6 RH |
3154 | { |
3155 | enum machine_mode dstmode = GET_MODE (x); | |
3156 | enum machine_mode orig_srcmode = GET_MODE (y); | |
3157 | enum machine_mode srcmode; | |
3158 | REAL_VALUE_TYPE r; | |
3159 | ||
3160 | REAL_VALUE_FROM_CONST_DOUBLE (r, y); | |
3161 | ||
3162 | for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); | |
3163 | srcmode != orig_srcmode; | |
3164 | srcmode = GET_MODE_WIDER_MODE (srcmode)) | |
3165 | { | |
3166 | enum insn_code ic; | |
3167 | rtx trunc_y, last_insn; | |
3168 | ||
3169 | /* Skip if the target can't extend this way. */ | |
3170 | ic = can_extend_p (dstmode, srcmode, 0); | |
3171 | if (ic == CODE_FOR_nothing) | |
3172 | continue; | |
3173 | ||
3174 | /* Skip if the narrowed value isn't exact. */ | |
3175 | if (! exact_real_truncate (srcmode, &r)) | |
3176 | continue; | |
3177 | ||
3178 | trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); | |
3179 | ||
3180 | if (LEGITIMATE_CONSTANT_P (trunc_y)) | |
3181 | { | |
3182 | /* Skip if the target needs extra instructions to perform | |
3183 | the extension. */ | |
3184 | if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) | |
3185 | continue; | |
3186 | } | |
3187 | else if (float_extend_from_mem[dstmode][srcmode]) | |
3188 | trunc_y = validize_mem (force_const_mem (srcmode, trunc_y)); | |
3189 | else | |
3190 | continue; | |
3191 | ||
3192 | emit_unop_insn (ic, x, trunc_y, UNKNOWN); | |
3193 | last_insn = get_last_insn (); | |
3194 | ||
f8cfc6aa | 3195 | if (REG_P (x)) |
0c19a26f | 3196 | set_unique_reg_note (last_insn, REG_EQUAL, y); |
51286de6 RH |
3197 | |
3198 | return last_insn; | |
3199 | } | |
3200 | ||
3201 | return NULL_RTX; | |
3202 | } | |
bbf6f052 RK |
3203 | \f |
3204 | /* Pushing data onto the stack. */ | |
3205 | ||
3206 | /* Push a block of length SIZE (perhaps variable) | |
3207 | and return an rtx to address the beginning of the block. | |
3208 | Note that it is not possible for the value returned to be a QUEUED. | |
3209 | The value may be virtual_outgoing_args_rtx. | |
3210 | ||
3211 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
3212 | BELOW nonzero means this padding comes at low addresses; | |
3213 | otherwise, the padding comes at high addresses. */ | |
3214 | ||
3215 | rtx | |
502b8322 | 3216 | push_block (rtx size, int extra, int below) |
bbf6f052 | 3217 | { |
b3694847 | 3218 | rtx temp; |
88f63c77 RK |
3219 | |
3220 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
bbf6f052 RK |
3221 | if (CONSTANT_P (size)) |
3222 | anti_adjust_stack (plus_constant (size, extra)); | |
f8cfc6aa | 3223 | else if (REG_P (size) && extra == 0) |
bbf6f052 RK |
3224 | anti_adjust_stack (size); |
3225 | else | |
3226 | { | |
ce48579b | 3227 | temp = copy_to_mode_reg (Pmode, size); |
bbf6f052 | 3228 | if (extra != 0) |
906c4e36 | 3229 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
bbf6f052 RK |
3230 | temp, 0, OPTAB_LIB_WIDEN); |
3231 | anti_adjust_stack (temp); | |
3232 | } | |
3233 | ||
f73ad30e | 3234 | #ifndef STACK_GROWS_DOWNWARD |
f73ad30e | 3235 | if (0) |
f73ad30e JH |
3236 | #else |
3237 | if (1) | |
bbf6f052 | 3238 | #endif |
f73ad30e | 3239 | { |
f73ad30e JH |
3240 | temp = virtual_outgoing_args_rtx; |
3241 | if (extra != 0 && below) | |
3242 | temp = plus_constant (temp, extra); | |
3243 | } | |
3244 | else | |
3245 | { | |
3246 | if (GET_CODE (size) == CONST_INT) | |
3247 | temp = plus_constant (virtual_outgoing_args_rtx, | |
3a94c984 | 3248 | -INTVAL (size) - (below ? 0 : extra)); |
f73ad30e JH |
3249 | else if (extra != 0 && !below) |
3250 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3bdf5ad1 | 3251 | negate_rtx (Pmode, plus_constant (size, extra))); |
f73ad30e JH |
3252 | else |
3253 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3254 | negate_rtx (Pmode, size)); | |
3255 | } | |
bbf6f052 RK |
3256 | |
3257 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
3258 | } | |
3259 | ||
21d93687 RK |
3260 | #ifdef PUSH_ROUNDING |
3261 | ||
566aa174 | 3262 | /* Emit single push insn. */ |
21d93687 | 3263 | |
566aa174 | 3264 | static void |
502b8322 | 3265 | emit_single_push_insn (enum machine_mode mode, rtx x, tree type) |
566aa174 | 3266 | { |
566aa174 | 3267 | rtx dest_addr; |
918a6124 | 3268 | unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
566aa174 | 3269 | rtx dest; |
371b8fc0 JH |
3270 | enum insn_code icode; |
3271 | insn_operand_predicate_fn pred; | |
566aa174 | 3272 | |
371b8fc0 JH |
3273 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
3274 | /* If there is push pattern, use it. Otherwise try old way of throwing | |
3275 | MEM representing push operation to move expander. */ | |
3276 | icode = push_optab->handlers[(int) mode].insn_code; | |
3277 | if (icode != CODE_FOR_nothing) | |
3278 | { | |
3279 | if (((pred = insn_data[(int) icode].operand[0].predicate) | |
505ddab6 | 3280 | && !((*pred) (x, mode)))) |
371b8fc0 JH |
3281 | x = force_reg (mode, x); |
3282 | emit_insn (GEN_FCN (icode) (x)); | |
3283 | return; | |
3284 | } | |
566aa174 JH |
3285 | if (GET_MODE_SIZE (mode) == rounded_size) |
3286 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
329d586f KH |
3287 | /* If we are to pad downward, adjust the stack pointer first and |
3288 | then store X into the stack location using an offset. This is | |
3289 | because emit_move_insn does not know how to pad; it does not have | |
3290 | access to type. */ | |
3291 | else if (FUNCTION_ARG_PADDING (mode, type) == downward) | |
3292 | { | |
3293 | unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); | |
3294 | HOST_WIDE_INT offset; | |
3295 | ||
3296 | emit_move_insn (stack_pointer_rtx, | |
3297 | expand_binop (Pmode, | |
3298 | #ifdef STACK_GROWS_DOWNWARD | |
3299 | sub_optab, | |
3300 | #else | |
3301 | add_optab, | |
3302 | #endif | |
3303 | stack_pointer_rtx, | |
3304 | GEN_INT (rounded_size), | |
3305 | NULL_RTX, 0, OPTAB_LIB_WIDEN)); | |
3306 | ||
3307 | offset = (HOST_WIDE_INT) padding_size; | |
3308 | #ifdef STACK_GROWS_DOWNWARD | |
3309 | if (STACK_PUSH_CODE == POST_DEC) | |
3310 | /* We have already decremented the stack pointer, so get the | |
3311 | previous value. */ | |
3312 | offset += (HOST_WIDE_INT) rounded_size; | |
3313 | #else | |
3314 | if (STACK_PUSH_CODE == POST_INC) | |
3315 | /* We have already incremented the stack pointer, so get the | |
3316 | previous value. */ | |
3317 | offset -= (HOST_WIDE_INT) rounded_size; | |
3318 | #endif | |
3319 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset)); | |
3320 | } | |
566aa174 JH |
3321 | else |
3322 | { | |
3323 | #ifdef STACK_GROWS_DOWNWARD | |
329d586f | 3324 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ |
566aa174 | 3325 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
505ddab6 | 3326 | GEN_INT (-(HOST_WIDE_INT) rounded_size)); |
566aa174 | 3327 | #else |
329d586f | 3328 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ |
566aa174 JH |
3329 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
3330 | GEN_INT (rounded_size)); | |
3331 | #endif | |
3332 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3333 | } | |
3334 | ||
3335 | dest = gen_rtx_MEM (mode, dest_addr); | |
3336 | ||
566aa174 JH |
3337 | if (type != 0) |
3338 | { | |
3339 | set_mem_attributes (dest, type, 1); | |
c3d32120 RK |
3340 | |
3341 | if (flag_optimize_sibling_calls) | |
3342 | /* Function incoming arguments may overlap with sibling call | |
3343 | outgoing arguments and we cannot allow reordering of reads | |
3344 | from function arguments with stores to outgoing arguments | |
3345 | of sibling calls. */ | |
3346 | set_mem_alias_set (dest, 0); | |
566aa174 JH |
3347 | } |
3348 | emit_move_insn (dest, x); | |
566aa174 | 3349 | } |
21d93687 | 3350 | #endif |
566aa174 | 3351 | |
bbf6f052 RK |
3352 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3353 | type TYPE. | |
3354 | MODE is redundant except when X is a CONST_INT (since they don't | |
3355 | carry mode info). | |
3356 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3357 | needed only if X is BLKmode. | |
3358 | ||
f1eaaf73 | 3359 | ALIGN (in bits) is maximum alignment we can assume. |
bbf6f052 | 3360 | |
cd048831 RK |
3361 | If PARTIAL and REG are both nonzero, then copy that many of the first |
3362 | words of X into registers starting with REG, and push the rest of X. | |
bbf6f052 RK |
3363 | The amount of space pushed is decreased by PARTIAL words, |
3364 | rounded *down* to a multiple of PARM_BOUNDARY. | |
3365 | REG must be a hard register in this case. | |
cd048831 RK |
3366 | If REG is zero but PARTIAL is not, take any all others actions for an |
3367 | argument partially in registers, but do not actually load any | |
3368 | registers. | |
bbf6f052 RK |
3369 | |
3370 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
6dc42e49 | 3371 | This is ignored if an argument block has already been allocated. |
bbf6f052 RK |
3372 | |
3373 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3374 | the bottom of the argument block for this call. We use indexing off there | |
3375 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3376 | argument block has not been preallocated. | |
3377 | ||
e5e809f4 JL |
3378 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3379 | ||
3380 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3381 | for arguments passed in registers. If nonzero, it will be the number | |
3382 | of bytes required. */ | |
bbf6f052 RK |
3383 | |
3384 | void | |
502b8322 AJ |
3385 | emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
3386 | unsigned int align, int partial, rtx reg, int extra, | |
3387 | rtx args_addr, rtx args_so_far, int reg_parm_stack_space, | |
3388 | rtx alignment_pad) | |
bbf6f052 RK |
3389 | { |
3390 | rtx xinner; | |
3391 | enum direction stack_direction | |
3392 | #ifdef STACK_GROWS_DOWNWARD | |
3393 | = downward; | |
3394 | #else | |
3395 | = upward; | |
3396 | #endif | |
3397 | ||
3398 | /* Decide where to pad the argument: `downward' for below, | |
3399 | `upward' for above, or `none' for don't pad it. | |
3400 | Default is below for small data on big-endian machines; else above. */ | |
3401 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3402 | ||
0fb7aeda | 3403 | /* Invert direction if stack is post-decrement. |
9e0e11bf GK |
3404 | FIXME: why? */ |
3405 | if (STACK_PUSH_CODE == POST_DEC) | |
bbf6f052 RK |
3406 | if (where_pad != none) |
3407 | where_pad = (where_pad == downward ? upward : downward); | |
3408 | ||
3409 | xinner = x = protect_from_queue (x, 0); | |
3410 | ||
3411 | if (mode == BLKmode) | |
3412 | { | |
3413 | /* Copy a block into the stack, entirely or partially. */ | |
3414 | ||
b3694847 | 3415 | rtx temp; |
bbf6f052 | 3416 | int used = partial * UNITS_PER_WORD; |
531547e9 | 3417 | int offset; |
bbf6f052 | 3418 | int skip; |
3a94c984 | 3419 | |
531547e9 FJ |
3420 | if (reg && GET_CODE (reg) == PARALLEL) |
3421 | { | |
3422 | /* Use the size of the elt to compute offset. */ | |
3423 | rtx elt = XEXP (XVECEXP (reg, 0, 0), 0); | |
3424 | used = partial * GET_MODE_SIZE (GET_MODE (elt)); | |
3425 | offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3426 | } | |
3427 | else | |
3428 | offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3429 | ||
bbf6f052 RK |
3430 | if (size == 0) |
3431 | abort (); | |
3432 | ||
3433 | used -= offset; | |
3434 | ||
3435 | /* USED is now the # of bytes we need not copy to the stack | |
3436 | because registers will take care of them. */ | |
3437 | ||
3438 | if (partial != 0) | |
f4ef873c | 3439 | xinner = adjust_address (xinner, BLKmode, used); |
bbf6f052 RK |
3440 | |
3441 | /* If the partial register-part of the arg counts in its stack size, | |
3442 | skip the part of stack space corresponding to the registers. | |
3443 | Otherwise, start copying to the beginning of the stack space, | |
3444 | by setting SKIP to 0. */ | |
e5e809f4 | 3445 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
bbf6f052 RK |
3446 | |
3447 | #ifdef PUSH_ROUNDING | |
3448 | /* Do it with several push insns if that doesn't take lots of insns | |
3449 | and if there is no difficulty with push insns that skip bytes | |
3450 | on the stack for alignment purposes. */ | |
3451 | if (args_addr == 0 | |
f73ad30e | 3452 | && PUSH_ARGS |
bbf6f052 RK |
3453 | && GET_CODE (size) == CONST_INT |
3454 | && skip == 0 | |
f26aca6d | 3455 | && MEM_ALIGN (xinner) >= align |
15914757 | 3456 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
bbf6f052 RK |
3457 | /* Here we avoid the case of a structure whose weak alignment |
3458 | forces many pushes of a small amount of data, | |
3459 | and such small pushes do rounding that causes trouble. */ | |
e1565e65 | 3460 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
19caa751 | 3461 | || align >= BIGGEST_ALIGNMENT |
f1eaaf73 DE |
3462 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3463 | == (align / BITS_PER_UNIT))) | |
bbf6f052 RK |
3464 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3465 | { | |
3466 | /* Push padding now if padding above and stack grows down, | |
3467 | or if padding below and stack grows up. | |
3468 | But if space already allocated, this has already been done. */ | |
3469 | if (extra && args_addr == 0 | |
3470 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3471 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 | 3472 | |
8fd3cf4e | 3473 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); |
bbf6f052 RK |
3474 | } |
3475 | else | |
3a94c984 | 3476 | #endif /* PUSH_ROUNDING */ |
bbf6f052 | 3477 | { |
7ab923cc JJ |
3478 | rtx target; |
3479 | ||
bbf6f052 RK |
3480 | /* Otherwise make space on the stack and copy the data |
3481 | to the address of that space. */ | |
3482 | ||
3483 | /* Deduct words put into registers from the size we must copy. */ | |
3484 | if (partial != 0) | |
3485 | { | |
3486 | if (GET_CODE (size) == CONST_INT) | |
906c4e36 | 3487 | size = GEN_INT (INTVAL (size) - used); |
bbf6f052 RK |
3488 | else |
3489 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
906c4e36 RK |
3490 | GEN_INT (used), NULL_RTX, 0, |
3491 | OPTAB_LIB_WIDEN); | |
bbf6f052 RK |
3492 | } |
3493 | ||
3494 | /* Get the address of the stack space. | |
3495 | In this case, we do not deal with EXTRA separately. | |
3496 | A single stack adjust will do. */ | |
3497 | if (! args_addr) | |
3498 | { | |
3499 | temp = push_block (size, extra, where_pad == downward); | |
3500 | extra = 0; | |
3501 | } | |
3502 | else if (GET_CODE (args_so_far) == CONST_INT) | |
3503 | temp = memory_address (BLKmode, | |
3504 | plus_constant (args_addr, | |
3505 | skip + INTVAL (args_so_far))); | |
3506 | else | |
3507 | temp = memory_address (BLKmode, | |
38a448ca RH |
3508 | plus_constant (gen_rtx_PLUS (Pmode, |
3509 | args_addr, | |
3510 | args_so_far), | |
bbf6f052 | 3511 | skip)); |
4ca79136 RH |
3512 | |
3513 | if (!ACCUMULATE_OUTGOING_ARGS) | |
3514 | { | |
3515 | /* If the source is referenced relative to the stack pointer, | |
3516 | copy it to another register to stabilize it. We do not need | |
3517 | to do this if we know that we won't be changing sp. */ | |
3518 | ||
3519 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) | |
3520 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3521 | temp = copy_to_reg (temp); | |
3522 | } | |
3523 | ||
3a94c984 | 3524 | target = gen_rtx_MEM (BLKmode, temp); |
7ab923cc | 3525 | |
3a94c984 KH |
3526 | if (type != 0) |
3527 | { | |
3528 | set_mem_attributes (target, type, 1); | |
3529 | /* Function incoming arguments may overlap with sibling call | |
3530 | outgoing arguments and we cannot allow reordering of reads | |
3531 | from function arguments with stores to outgoing arguments | |
3532 | of sibling calls. */ | |
ba4828e0 | 3533 | set_mem_alias_set (target, 0); |
3a94c984 | 3534 | } |
4ca79136 | 3535 | |
44bb111a RH |
3536 | /* ALIGN may well be better aligned than TYPE, e.g. due to |
3537 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3538 | set_mem_align (target, align); | |
4ca79136 | 3539 | |
44bb111a | 3540 | emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); |
bbf6f052 RK |
3541 | } |
3542 | } | |
3543 | else if (partial > 0) | |
3544 | { | |
3545 | /* Scalar partly in registers. */ | |
3546 | ||
3547 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3548 | int i; | |
3549 | int not_stack; | |
3550 | /* # words of start of argument | |
3551 | that we must make space for but need not store. */ | |
3552 | int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); | |
3553 | int args_offset = INTVAL (args_so_far); | |
3554 | int skip; | |
3555 | ||
3556 | /* Push padding now if padding above and stack grows down, | |
3557 | or if padding below and stack grows up. | |
3558 | But if space already allocated, this has already been done. */ | |
3559 | if (extra && args_addr == 0 | |
3560 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3561 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3562 | |
3563 | /* If we make space by pushing it, we might as well push | |
3564 | the real data. Otherwise, we can leave OFFSET nonzero | |
3565 | and leave the space uninitialized. */ | |
3566 | if (args_addr == 0) | |
3567 | offset = 0; | |
3568 | ||
3569 | /* Now NOT_STACK gets the number of words that we don't need to | |
3570 | allocate on the stack. */ | |
3571 | not_stack = partial - offset; | |
3572 | ||
3573 | /* If the partial register-part of the arg counts in its stack size, | |
3574 | skip the part of stack space corresponding to the registers. | |
3575 | Otherwise, start copying to the beginning of the stack space, | |
3576 | by setting SKIP to 0. */ | |
e5e809f4 | 3577 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
bbf6f052 RK |
3578 | |
3579 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3580 | x = validize_mem (force_const_mem (mode, x)); | |
3581 | ||
3582 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3583 | SUBREGs of such registers are not allowed. */ | |
f8cfc6aa | 3584 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
bbf6f052 RK |
3585 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) |
3586 | x = copy_to_reg (x); | |
3587 | ||
3588 | /* Loop over all the words allocated on the stack for this arg. */ | |
3589 | /* We can do it by words, because any scalar bigger than a word | |
3590 | has a size a multiple of a word. */ | |
3591 | #ifndef PUSH_ARGS_REVERSED | |
3592 | for (i = not_stack; i < size; i++) | |
3593 | #else | |
3594 | for (i = size - 1; i >= not_stack; i--) | |
3595 | #endif | |
3596 | if (i >= not_stack + offset) | |
3597 | emit_push_insn (operand_subword_force (x, i, mode), | |
906c4e36 RK |
3598 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3599 | 0, args_addr, | |
3600 | GEN_INT (args_offset + ((i - not_stack + skip) | |
e5e809f4 | 3601 | * UNITS_PER_WORD)), |
4fc026cd | 3602 | reg_parm_stack_space, alignment_pad); |
bbf6f052 RK |
3603 | } |
3604 | else | |
3605 | { | |
3606 | rtx addr; | |
3bdf5ad1 | 3607 | rtx dest; |
bbf6f052 RK |
3608 | |
3609 | /* Push padding now if padding above and stack grows down, | |
3610 | or if padding below and stack grows up. | |
3611 | But if space already allocated, this has already been done. */ | |
3612 | if (extra && args_addr == 0 | |
3613 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3614 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3615 | |
3616 | #ifdef PUSH_ROUNDING | |
f73ad30e | 3617 | if (args_addr == 0 && PUSH_ARGS) |
566aa174 | 3618 | emit_single_push_insn (mode, x, type); |
bbf6f052 RK |
3619 | else |
3620 | #endif | |
921b3427 RK |
3621 | { |
3622 | if (GET_CODE (args_so_far) == CONST_INT) | |
3623 | addr | |
3624 | = memory_address (mode, | |
3a94c984 | 3625 | plus_constant (args_addr, |
921b3427 | 3626 | INTVAL (args_so_far))); |
3a94c984 | 3627 | else |
38a448ca RH |
3628 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3629 | args_so_far)); | |
566aa174 JH |
3630 | dest = gen_rtx_MEM (mode, addr); |
3631 | if (type != 0) | |
3632 | { | |
3633 | set_mem_attributes (dest, type, 1); | |
3634 | /* Function incoming arguments may overlap with sibling call | |
3635 | outgoing arguments and we cannot allow reordering of reads | |
3636 | from function arguments with stores to outgoing arguments | |
3637 | of sibling calls. */ | |
ba4828e0 | 3638 | set_mem_alias_set (dest, 0); |
566aa174 | 3639 | } |
bbf6f052 | 3640 | |
566aa174 | 3641 | emit_move_insn (dest, x); |
566aa174 | 3642 | } |
bbf6f052 RK |
3643 | } |
3644 | ||
bbf6f052 RK |
3645 | /* If part should go in registers, copy that part |
3646 | into the appropriate registers. Do this now, at the end, | |
3647 | since mem-to-mem copies above may do function calls. */ | |
cd048831 | 3648 | if (partial > 0 && reg != 0) |
fffa9c1d JW |
3649 | { |
3650 | /* Handle calls that pass values in multiple non-contiguous locations. | |
3651 | The Irix 6 ABI has examples of this. */ | |
3652 | if (GET_CODE (reg) == PARALLEL) | |
6e985040 | 3653 | emit_group_load (reg, x, type, -1); |
fffa9c1d JW |
3654 | else |
3655 | move_block_to_reg (REGNO (reg), x, partial, mode); | |
3656 | } | |
bbf6f052 RK |
3657 | |
3658 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
906c4e36 | 3659 | anti_adjust_stack (GEN_INT (extra)); |
3a94c984 | 3660 | |
3ea2292a | 3661 | if (alignment_pad && args_addr == 0) |
4fc026cd | 3662 | anti_adjust_stack (alignment_pad); |
bbf6f052 RK |
3663 | } |
3664 | \f | |
296b4ed9 RK |
3665 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
3666 | operations. */ | |
3667 | ||
3668 | static rtx | |
502b8322 | 3669 | get_subtarget (rtx x) |
296b4ed9 RK |
3670 | { |
3671 | return ((x == 0 | |
3672 | /* Only registers can be subtargets. */ | |
f8cfc6aa | 3673 | || !REG_P (x) |
296b4ed9 RK |
3674 | /* If the register is readonly, it can't be set more than once. */ |
3675 | || RTX_UNCHANGING_P (x) | |
3676 | /* Don't use hard regs to avoid extending their life. */ | |
3677 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
3678 | /* Avoid subtargets inside loops, | |
3679 | since they hide some invariant expressions. */ | |
3680 | || preserve_subexpressions_p ()) | |
3681 | ? 0 : x); | |
3682 | } | |
3683 | ||
bbf6f052 RK |
3684 | /* Expand an assignment that stores the value of FROM into TO. |
3685 | If WANT_VALUE is nonzero, return an rtx for the value of TO. | |
709f5be1 RS |
3686 | (This may contain a QUEUED rtx; |
3687 | if the value is constant, this rtx is a constant.) | |
b90f141a | 3688 | Otherwise, the returned value is NULL_RTX. */ |
bbf6f052 RK |
3689 | |
3690 | rtx | |
b90f141a | 3691 | expand_assignment (tree to, tree from, int want_value) |
bbf6f052 | 3692 | { |
b3694847 | 3693 | rtx to_rtx = 0; |
bbf6f052 RK |
3694 | rtx result; |
3695 | ||
3696 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
3697 | ||
3698 | if (TREE_CODE (to) == ERROR_MARK) | |
709f5be1 RS |
3699 | { |
3700 | result = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
3701 | return want_value ? result : NULL_RTX; | |
3702 | } | |
bbf6f052 RK |
3703 | |
3704 | /* Assignment of a structure component needs special treatment | |
3705 | if the structure component's rtx is not simply a MEM. | |
6be58303 JW |
3706 | Assignment of an array element at a constant index, and assignment of |
3707 | an array element in an unaligned packed structure field, has the same | |
3708 | problem. */ | |
bbf6f052 | 3709 | |
08293add | 3710 | if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF |
7c02ae17 DE |
3711 | || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF |
3712 | || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) | |
bbf6f052 RK |
3713 | { |
3714 | enum machine_mode mode1; | |
770ae6cc | 3715 | HOST_WIDE_INT bitsize, bitpos; |
a06ef755 | 3716 | rtx orig_to_rtx; |
7bb0943f | 3717 | tree offset; |
bbf6f052 RK |
3718 | int unsignedp; |
3719 | int volatilep = 0; | |
0088fcb1 RK |
3720 | tree tem; |
3721 | ||
3722 | push_temp_slots (); | |
839c4796 | 3723 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
a06ef755 | 3724 | &unsignedp, &volatilep); |
bbf6f052 RK |
3725 | |
3726 | /* If we are going to use store_bit_field and extract_bit_field, | |
3727 | make sure to_rtx will be safe for multiple use. */ | |
3728 | ||
3729 | if (mode1 == VOIDmode && want_value) | |
3730 | tem = stabilize_reference (tem); | |
3731 | ||
1ed1b4fb RK |
3732 | orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); |
3733 | ||
7bb0943f RS |
3734 | if (offset != 0) |
3735 | { | |
e3c8ea67 | 3736 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
7bb0943f | 3737 | |
3c0cb5de | 3738 | if (!MEM_P (to_rtx)) |
7bb0943f | 3739 | abort (); |
bd070e1a | 3740 | |
bd070e1a | 3741 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 | 3742 | if (GET_MODE (offset_rtx) != Pmode) |
267b28bd | 3743 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
fa06ab5c RK |
3744 | #else |
3745 | if (GET_MODE (offset_rtx) != ptr_mode) | |
3746 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
bd070e1a | 3747 | #endif |
bd070e1a | 3748 | |
9a7b9f4f JL |
3749 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
3750 | to call force_reg for that case. Avoid that case. */ | |
3c0cb5de | 3751 | if (MEM_P (to_rtx) |
89752202 | 3752 | && GET_MODE (to_rtx) == BLKmode |
9a7b9f4f | 3753 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
a06ef755 | 3754 | && bitsize > 0 |
3a94c984 | 3755 | && (bitpos % bitsize) == 0 |
89752202 | 3756 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 3757 | && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 3758 | { |
e3c8ea67 | 3759 | to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
3760 | bitpos = 0; |
3761 | } | |
3762 | ||
0d4903b8 | 3763 | to_rtx = offset_address (to_rtx, offset_rtx, |
d50a16c4 EB |
3764 | highest_pow2_factor_for_target (to, |
3765 | offset)); | |
7bb0943f | 3766 | } |
c5c76735 | 3767 | |
3c0cb5de | 3768 | if (MEM_P (to_rtx)) |
998d7deb | 3769 | { |
998d7deb RH |
3770 | /* If the field is at offset zero, we could have been given the |
3771 | DECL_RTX of the parent struct. Don't munge it. */ | |
3772 | to_rtx = shallow_copy_rtx (to_rtx); | |
3773 | ||
6f1087be | 3774 | set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); |
998d7deb | 3775 | } |
effbcc6a | 3776 | |
a06ef755 RK |
3777 | /* Deal with volatile and readonly fields. The former is only done |
3778 | for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ | |
3c0cb5de | 3779 | if (volatilep && MEM_P (to_rtx)) |
a06ef755 RK |
3780 | { |
3781 | if (to_rtx == orig_to_rtx) | |
3782 | to_rtx = copy_rtx (to_rtx); | |
3783 | MEM_VOLATILE_P (to_rtx) = 1; | |
bbf6f052 RK |
3784 | } |
3785 | ||
956d6950 | 3786 | if (TREE_CODE (to) == COMPONENT_REF |
d76bc29c EB |
3787 | && TREE_READONLY (TREE_OPERAND (to, 1)) |
3788 | /* We can't assert that a MEM won't be set more than once | |
3789 | if the component is not addressable because another | |
3790 | non-addressable component may be referenced by the same MEM. */ | |
3c0cb5de | 3791 | && ! (MEM_P (to_rtx) && ! can_address_p (to))) |
956d6950 | 3792 | { |
a06ef755 | 3793 | if (to_rtx == orig_to_rtx) |
956d6950 | 3794 | to_rtx = copy_rtx (to_rtx); |
956d6950 JL |
3795 | RTX_UNCHANGING_P (to_rtx) = 1; |
3796 | } | |
3797 | ||
3c0cb5de | 3798 | if (MEM_P (to_rtx) && ! can_address_p (to)) |
a06ef755 RK |
3799 | { |
3800 | if (to_rtx == orig_to_rtx) | |
3801 | to_rtx = copy_rtx (to_rtx); | |
3802 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
3803 | } | |
3804 | ||
60ba25bf JJ |
3805 | while (mode1 == VOIDmode && !want_value |
3806 | && bitpos + bitsize <= BITS_PER_WORD | |
3807 | && bitsize < BITS_PER_WORD | |
3808 | && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD | |
3809 | && !TREE_SIDE_EFFECTS (to) | |
3810 | && !TREE_THIS_VOLATILE (to)) | |
df62f18a | 3811 | { |
60ba25bf | 3812 | tree src, op0, op1; |
df62f18a JJ |
3813 | rtx value; |
3814 | HOST_WIDE_INT count = bitpos; | |
60ba25bf JJ |
3815 | optab binop; |
3816 | ||
3817 | src = from; | |
3818 | STRIP_NOPS (src); | |
3819 | if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE | |
3820 | || TREE_CODE_CLASS (TREE_CODE (src)) != '2') | |
3821 | break; | |
3822 | ||
3823 | op0 = TREE_OPERAND (src, 0); | |
3824 | op1 = TREE_OPERAND (src, 1); | |
3825 | STRIP_NOPS (op0); | |
3826 | ||
3827 | if (! operand_equal_p (to, op0, 0)) | |
3828 | break; | |
df62f18a JJ |
3829 | |
3830 | if (BYTES_BIG_ENDIAN) | |
3831 | count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize; | |
3832 | ||
3833 | /* Special case some bitfield op= exp. */ | |
60ba25bf | 3834 | switch (TREE_CODE (src)) |
df62f18a JJ |
3835 | { |
3836 | case PLUS_EXPR: | |
3837 | case MINUS_EXPR: | |
3838 | if (count <= 0) | |
3839 | break; | |
3840 | ||
3841 | /* For now, just optimize the case of the topmost bitfield | |
60ba25bf JJ |
3842 | where we don't need to do any masking and also |
3843 | 1 bit bitfields where xor can be used. | |
df62f18a JJ |
3844 | We might win by one instruction for the other bitfields |
3845 | too if insv/extv instructions aren't used, so that | |
3846 | can be added later. */ | |
60ba25bf JJ |
3847 | if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)) |
3848 | && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST)) | |
df62f18a | 3849 | break; |
60ba25bf | 3850 | value = expand_expr (op1, NULL_RTX, VOIDmode, 0); |
df62f18a JJ |
3851 | value = protect_from_queue (value, 0); |
3852 | to_rtx = protect_from_queue (to_rtx, 1); | |
60ba25bf JJ |
3853 | binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab; |
3854 | if (bitsize == 1 | |
3855 | && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))) | |
3856 | { | |
3857 | value = expand_and (GET_MODE (to_rtx), value, const1_rtx, | |
3858 | NULL_RTX); | |
3859 | binop = xor_optab; | |
3860 | } | |
df62f18a JJ |
3861 | value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx), |
3862 | value, build_int_2 (count, 0), | |
3863 | NULL_RTX, 1); | |
60ba25bf | 3864 | result = expand_binop (GET_MODE (to_rtx), binop, to_rtx, |
df62f18a JJ |
3865 | value, to_rtx, 1, OPTAB_WIDEN); |
3866 | if (result != to_rtx) | |
3867 | emit_move_insn (to_rtx, result); | |
3868 | free_temp_slots (); | |
3869 | pop_temp_slots (); | |
3870 | return NULL_RTX; | |
3871 | default: | |
3872 | break; | |
3873 | } | |
60ba25bf JJ |
3874 | |
3875 | break; | |
df62f18a JJ |
3876 | } |
3877 | ||
a06ef755 RK |
3878 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, |
3879 | (want_value | |
3880 | /* Spurious cast for HPUX compiler. */ | |
3881 | ? ((enum machine_mode) | |
3882 | TYPE_MODE (TREE_TYPE (to))) | |
3883 | : VOIDmode), | |
3884 | unsignedp, TREE_TYPE (tem), get_alias_set (to)); | |
a69beca1 | 3885 | |
a06ef755 RK |
3886 | preserve_temp_slots (result); |
3887 | free_temp_slots (); | |
3888 | pop_temp_slots (); | |
a69beca1 | 3889 | |
a06ef755 RK |
3890 | /* If the value is meaningful, convert RESULT to the proper mode. |
3891 | Otherwise, return nothing. */ | |
3892 | return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)), | |
3893 | TYPE_MODE (TREE_TYPE (from)), | |
3894 | result, | |
8df83eae | 3895 | TYPE_UNSIGNED (TREE_TYPE (to))) |
a06ef755 | 3896 | : NULL_RTX); |
bbf6f052 RK |
3897 | } |
3898 | ||
cd1db108 RS |
3899 | /* If the rhs is a function call and its value is not an aggregate, |
3900 | call the function before we start to compute the lhs. | |
3901 | This is needed for correct code for cases such as | |
3902 | val = setjmp (buf) on machines where reference to val | |
1ad87b63 RK |
3903 | requires loading up part of an address in a separate insn. |
3904 | ||
1858863b JW |
3905 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
3906 | since it might be a promoted variable where the zero- or sign- extension | |
3907 | needs to be done. Handling this in the normal way is safe because no | |
3908 | computation is done before the call. */ | |
61f71b34 | 3909 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) |
b35cd3c1 | 3910 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
1858863b | 3911 | && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
f8cfc6aa | 3912 | && REG_P (DECL_RTL (to)))) |
cd1db108 | 3913 | { |
0088fcb1 RK |
3914 | rtx value; |
3915 | ||
3916 | push_temp_slots (); | |
3917 | value = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
cd1db108 | 3918 | if (to_rtx == 0) |
37a08a29 | 3919 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
aaf87c45 | 3920 | |
fffa9c1d JW |
3921 | /* Handle calls that return values in multiple non-contiguous locations. |
3922 | The Irix 6 ABI has examples of this. */ | |
3923 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
3924 | emit_group_load (to_rtx, value, TREE_TYPE (from), |
3925 | int_size_in_bytes (TREE_TYPE (from))); | |
fffa9c1d | 3926 | else if (GET_MODE (to_rtx) == BLKmode) |
44bb111a | 3927 | emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); |
aaf87c45 | 3928 | else |
6419e5b0 | 3929 | { |
5ae6cd0d | 3930 | if (POINTER_TYPE_P (TREE_TYPE (to))) |
6419e5b0 | 3931 | value = convert_memory_address (GET_MODE (to_rtx), value); |
6419e5b0 DT |
3932 | emit_move_insn (to_rtx, value); |
3933 | } | |
cd1db108 RS |
3934 | preserve_temp_slots (to_rtx); |
3935 | free_temp_slots (); | |
0088fcb1 | 3936 | pop_temp_slots (); |
709f5be1 | 3937 | return want_value ? to_rtx : NULL_RTX; |
cd1db108 RS |
3938 | } |
3939 | ||
bbf6f052 RK |
3940 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
3941 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
3942 | ||
3943 | if (to_rtx == 0) | |
37a08a29 | 3944 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
bbf6f052 | 3945 | |
86d38d25 | 3946 | /* Don't move directly into a return register. */ |
14a774a9 | 3947 | if (TREE_CODE (to) == RESULT_DECL |
f8cfc6aa | 3948 | && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL)) |
86d38d25 | 3949 | { |
0088fcb1 RK |
3950 | rtx temp; |
3951 | ||
3952 | push_temp_slots (); | |
3953 | temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); | |
14a774a9 RK |
3954 | |
3955 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
3956 | emit_group_load (to_rtx, temp, TREE_TYPE (from), |
3957 | int_size_in_bytes (TREE_TYPE (from))); | |
14a774a9 RK |
3958 | else |
3959 | emit_move_insn (to_rtx, temp); | |
3960 | ||
86d38d25 RS |
3961 | preserve_temp_slots (to_rtx); |
3962 | free_temp_slots (); | |
0088fcb1 | 3963 | pop_temp_slots (); |
709f5be1 | 3964 | return want_value ? to_rtx : NULL_RTX; |
86d38d25 RS |
3965 | } |
3966 | ||
bbf6f052 RK |
3967 | /* In case we are returning the contents of an object which overlaps |
3968 | the place the value is being stored, use a safe function when copying | |
3969 | a value through a pointer into a structure value return block. */ | |
3970 | if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF | |
3971 | && current_function_returns_struct | |
3972 | && !current_function_returns_pcc_struct) | |
3973 | { | |
0088fcb1 RK |
3974 | rtx from_rtx, size; |
3975 | ||
3976 | push_temp_slots (); | |
33a20d10 | 3977 | size = expr_size (from); |
37a08a29 | 3978 | from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); |
bbf6f052 | 3979 | |
8f99553f JM |
3980 | emit_library_call (memmove_libfunc, LCT_NORMAL, |
3981 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, | |
3982 | XEXP (from_rtx, 0), Pmode, | |
3983 | convert_to_mode (TYPE_MODE (sizetype), | |
3984 | size, TYPE_UNSIGNED (sizetype)), | |
3985 | TYPE_MODE (sizetype)); | |
bbf6f052 RK |
3986 | |
3987 | preserve_temp_slots (to_rtx); | |
3988 | free_temp_slots (); | |
0088fcb1 | 3989 | pop_temp_slots (); |
709f5be1 | 3990 | return want_value ? to_rtx : NULL_RTX; |
bbf6f052 RK |
3991 | } |
3992 | ||
3993 | /* Compute FROM and store the value in the rtx we got. */ | |
3994 | ||
0088fcb1 | 3995 | push_temp_slots (); |
bbf6f052 RK |
3996 | result = store_expr (from, to_rtx, want_value); |
3997 | preserve_temp_slots (result); | |
3998 | free_temp_slots (); | |
0088fcb1 | 3999 | pop_temp_slots (); |
709f5be1 | 4000 | return want_value ? result : NULL_RTX; |
bbf6f052 RK |
4001 | } |
4002 | ||
4003 | /* Generate code for computing expression EXP, | |
4004 | and storing the value into TARGET. | |
bbf6f052 RK |
4005 | TARGET may contain a QUEUED rtx. |
4006 | ||
8403445a | 4007 | If WANT_VALUE & 1 is nonzero, return a copy of the value |
709f5be1 RS |
4008 | not in TARGET, so that we can be sure to use the proper |
4009 | value in a containing expression even if TARGET has something | |
4010 | else stored in it. If possible, we copy the value through a pseudo | |
4011 | and return that pseudo. Or, if the value is constant, we try to | |
4012 | return the constant. In some cases, we return a pseudo | |
4013 | copied *from* TARGET. | |
4014 | ||
4015 | If the mode is BLKmode then we may return TARGET itself. | |
4016 | It turns out that in BLKmode it doesn't cause a problem. | |
4017 | because C has no operators that could combine two different | |
4018 | assignments into the same BLKmode object with different values | |
4019 | with no sequence point. Will other languages need this to | |
4020 | be more thorough? | |
4021 | ||
8403445a | 4022 | If WANT_VALUE & 1 is 0, we return NULL, to make sure |
709f5be1 | 4023 | to catch quickly any cases where the caller uses the value |
8403445a AM |
4024 | and fails to set WANT_VALUE. |
4025 | ||
4026 | If WANT_VALUE & 2 is set, this is a store into a call param on the | |
4027 | stack, and block moves may need to be treated specially. */ | |
bbf6f052 RK |
4028 | |
4029 | rtx | |
502b8322 | 4030 | store_expr (tree exp, rtx target, int want_value) |
bbf6f052 | 4031 | { |
b3694847 | 4032 | rtx temp; |
0fab64a3 | 4033 | rtx alt_rtl = NULL_RTX; |
1bbd65cd | 4034 | rtx mark = mark_queue (); |
bbf6f052 | 4035 | int dont_return_target = 0; |
e5408e52 | 4036 | int dont_store_target = 0; |
bbf6f052 | 4037 | |
847311f4 AL |
4038 | if (VOID_TYPE_P (TREE_TYPE (exp))) |
4039 | { | |
4040 | /* C++ can generate ?: expressions with a throw expression in one | |
4041 | branch and an rvalue in the other. Here, we resolve attempts to | |
4d6922ee | 4042 | store the throw expression's nonexistent result. */ |
847311f4 AL |
4043 | if (want_value) |
4044 | abort (); | |
4045 | expand_expr (exp, const0_rtx, VOIDmode, 0); | |
4046 | return NULL_RTX; | |
4047 | } | |
bbf6f052 RK |
4048 | if (TREE_CODE (exp) == COMPOUND_EXPR) |
4049 | { | |
4050 | /* Perform first part of compound expression, then assign from second | |
4051 | part. */ | |
8403445a AM |
4052 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
4053 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
bbf6f052 | 4054 | emit_queue (); |
709f5be1 | 4055 | return store_expr (TREE_OPERAND (exp, 1), target, want_value); |
bbf6f052 RK |
4056 | } |
4057 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
4058 | { | |
4059 | /* For conditional expression, get safe form of the target. Then | |
4060 | test the condition, doing the appropriate assignment on either | |
4061 | side. This avoids the creation of unnecessary temporaries. | |
4062 | For non-BLKmode, it is more efficient not to do this. */ | |
4063 | ||
4064 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
4065 | ||
4066 | emit_queue (); | |
4067 | target = protect_from_queue (target, 1); | |
4068 | ||
dabf8373 | 4069 | do_pending_stack_adjust (); |
bbf6f052 RK |
4070 | NO_DEFER_POP; |
4071 | jumpifnot (TREE_OPERAND (exp, 0), lab1); | |
956d6950 | 4072 | start_cleanup_deferral (); |
8403445a | 4073 | store_expr (TREE_OPERAND (exp, 1), target, want_value & 2); |
956d6950 | 4074 | end_cleanup_deferral (); |
bbf6f052 RK |
4075 | emit_queue (); |
4076 | emit_jump_insn (gen_jump (lab2)); | |
4077 | emit_barrier (); | |
4078 | emit_label (lab1); | |
956d6950 | 4079 | start_cleanup_deferral (); |
8403445a | 4080 | store_expr (TREE_OPERAND (exp, 2), target, want_value & 2); |
956d6950 | 4081 | end_cleanup_deferral (); |
bbf6f052 RK |
4082 | emit_queue (); |
4083 | emit_label (lab2); | |
4084 | OK_DEFER_POP; | |
a3a58acc | 4085 | |
8403445a | 4086 | return want_value & 1 ? target : NULL_RTX; |
bbf6f052 | 4087 | } |
bbf6f052 | 4088 | else if (queued_subexp_p (target)) |
709f5be1 RS |
4089 | /* If target contains a postincrement, let's not risk |
4090 | using it as the place to generate the rhs. */ | |
bbf6f052 RK |
4091 | { |
4092 | if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode) | |
4093 | { | |
4094 | /* Expand EXP into a new pseudo. */ | |
4095 | temp = gen_reg_rtx (GET_MODE (target)); | |
8403445a AM |
4096 | temp = expand_expr (exp, temp, GET_MODE (target), |
4097 | (want_value & 2 | |
4098 | ? EXPAND_STACK_PARM : EXPAND_NORMAL)); | |
bbf6f052 RK |
4099 | } |
4100 | else | |
8403445a AM |
4101 | temp = expand_expr (exp, NULL_RTX, GET_MODE (target), |
4102 | (want_value & 2 | |
4103 | ? EXPAND_STACK_PARM : EXPAND_NORMAL)); | |
709f5be1 RS |
4104 | |
4105 | /* If target is volatile, ANSI requires accessing the value | |
4106 | *from* the target, if it is accessed. So make that happen. | |
4107 | In no case return the target itself. */ | |
8403445a | 4108 | if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0) |
709f5be1 | 4109 | dont_return_target = 1; |
bbf6f052 | 4110 | } |
8403445a | 4111 | else if ((want_value & 1) != 0 |
3c0cb5de | 4112 | && MEM_P (target) |
8403445a | 4113 | && ! MEM_VOLATILE_P (target) |
12f06d17 CH |
4114 | && GET_MODE (target) != BLKmode) |
4115 | /* If target is in memory and caller wants value in a register instead, | |
4116 | arrange that. Pass TARGET as target for expand_expr so that, | |
4117 | if EXP is another assignment, WANT_VALUE will be nonzero for it. | |
4118 | We know expand_expr will not use the target in that case. | |
4119 | Don't do this if TARGET is volatile because we are supposed | |
4120 | to write it and then read it. */ | |
4121 | { | |
8403445a AM |
4122 | temp = expand_expr (exp, target, GET_MODE (target), |
4123 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
12f06d17 | 4124 | if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) |
e5408e52 JJ |
4125 | { |
4126 | /* If TEMP is already in the desired TARGET, only copy it from | |
4127 | memory and don't store it there again. */ | |
4128 | if (temp == target | |
4129 | || (rtx_equal_p (temp, target) | |
4130 | && ! side_effects_p (temp) && ! side_effects_p (target))) | |
4131 | dont_store_target = 1; | |
4132 | temp = copy_to_reg (temp); | |
4133 | } | |
12f06d17 CH |
4134 | dont_return_target = 1; |
4135 | } | |
1499e0a8 | 4136 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
09da1532 | 4137 | /* If this is a scalar in a register that is stored in a wider mode |
1499e0a8 RK |
4138 | than the declared mode, compute the result into its declared mode |
4139 | and then convert to the wider mode. Our value is the computed | |
4140 | expression. */ | |
4141 | { | |
b76b08ef RK |
4142 | rtx inner_target = 0; |
4143 | ||
5a32d038 | 4144 | /* If we don't want a value, we can do the conversion inside EXP, |
f635a84d RK |
4145 | which will often result in some optimizations. Do the conversion |
4146 | in two steps: first change the signedness, if needed, then | |
ab6c58f1 RK |
4147 | the extend. But don't do this if the type of EXP is a subtype |
4148 | of something else since then the conversion might involve | |
4149 | more than just converting modes. */ | |
8403445a AM |
4150 | if ((want_value & 1) == 0 |
4151 | && INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
ab6c58f1 | 4152 | && TREE_TYPE (TREE_TYPE (exp)) == 0) |
f635a84d | 4153 | { |
8df83eae | 4154 | if (TYPE_UNSIGNED (TREE_TYPE (exp)) |
f635a84d | 4155 | != SUBREG_PROMOTED_UNSIGNED_P (target)) |
ceef8ce4 | 4156 | exp = convert |
ae2bcd98 | 4157 | (lang_hooks.types.signed_or_unsigned_type |
ceef8ce4 | 4158 | (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp); |
f635a84d | 4159 | |
ae2bcd98 | 4160 | exp = convert (lang_hooks.types.type_for_mode |
b0c48229 NB |
4161 | (GET_MODE (SUBREG_REG (target)), |
4162 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
f635a84d | 4163 | exp); |
b76b08ef RK |
4164 | |
4165 | inner_target = SUBREG_REG (target); | |
f635a84d | 4166 | } |
3a94c984 | 4167 | |
8403445a AM |
4168 | temp = expand_expr (exp, inner_target, VOIDmode, |
4169 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
b258707c | 4170 | |
7abec5be | 4171 | /* If TEMP is a MEM and we want a result value, make the access |
502b8322 AJ |
4172 | now so it gets done only once. Strictly speaking, this is |
4173 | only necessary if the MEM is volatile, or if the address | |
7abec5be RH |
4174 | overlaps TARGET. But not performing the load twice also |
4175 | reduces the amount of rtl we generate and then have to CSE. */ | |
3c0cb5de | 4176 | if (MEM_P (temp) && (want_value & 1) != 0) |
766f36c7 RK |
4177 | temp = copy_to_reg (temp); |
4178 | ||
b258707c RS |
4179 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
4180 | sure that we properly convert it. */ | |
4181 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1f1b0541 RH |
4182 | { |
4183 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4184 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4185 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
4186 | GET_MODE (target), temp, | |
4187 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4188 | } | |
b258707c | 4189 | |
1499e0a8 RK |
4190 | convert_move (SUBREG_REG (target), temp, |
4191 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
4192 | |
4193 | /* If we promoted a constant, change the mode back down to match | |
4194 | target. Otherwise, the caller might get confused by a result whose | |
4195 | mode is larger than expected. */ | |
4196 | ||
8403445a | 4197 | if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target)) |
3dbecef9 | 4198 | { |
b3ca30df JJ |
4199 | if (GET_MODE (temp) != VOIDmode) |
4200 | { | |
4201 | temp = gen_lowpart_SUBREG (GET_MODE (target), temp); | |
4202 | SUBREG_PROMOTED_VAR_P (temp) = 1; | |
0fb7aeda | 4203 | SUBREG_PROMOTED_UNSIGNED_SET (temp, |
7879b81e | 4204 | SUBREG_PROMOTED_UNSIGNED_P (target)); |
b3ca30df JJ |
4205 | } |
4206 | else | |
4207 | temp = convert_modes (GET_MODE (target), | |
4208 | GET_MODE (SUBREG_REG (target)), | |
4209 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
4210 | } |
4211 | ||
8403445a | 4212 | return want_value & 1 ? temp : NULL_RTX; |
1499e0a8 | 4213 | } |
bbf6f052 RK |
4214 | else |
4215 | { | |
0fab64a3 MM |
4216 | temp = expand_expr_real (exp, target, GET_MODE (target), |
4217 | (want_value & 2 | |
4218 | ? EXPAND_STACK_PARM : EXPAND_NORMAL), | |
4219 | &alt_rtl); | |
766f36c7 | 4220 | /* Return TARGET if it's a specified hardware register. |
709f5be1 RS |
4221 | If TARGET is a volatile mem ref, either return TARGET |
4222 | or return a reg copied *from* TARGET; ANSI requires this. | |
4223 | ||
4224 | Otherwise, if TEMP is not TARGET, return TEMP | |
4225 | if it is constant (for efficiency), | |
4226 | or if we really want the correct value. */ | |
f8cfc6aa | 4227 | if (!(target && REG_P (target) |
bbf6f052 | 4228 | && REGNO (target) < FIRST_PSEUDO_REGISTER) |
3c0cb5de | 4229 | && !(MEM_P (target) && MEM_VOLATILE_P (target)) |
effbcc6a | 4230 | && ! rtx_equal_p (temp, target) |
8403445a | 4231 | && (CONSTANT_P (temp) || (want_value & 1) != 0)) |
bbf6f052 RK |
4232 | dont_return_target = 1; |
4233 | } | |
4234 | ||
b258707c RS |
4235 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
4236 | the same as that of TARGET, adjust the constant. This is needed, for | |
4237 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
4238 | value. */ | |
4239 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
c1da1f33 | 4240 | && TREE_CODE (exp) != ERROR_MARK |
b258707c RS |
4241 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
4242 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
8df83eae | 4243 | temp, TYPE_UNSIGNED (TREE_TYPE (exp))); |
b258707c | 4244 | |
bbf6f052 | 4245 | /* If value was not generated in the target, store it there. |
1bbd65cd EB |
4246 | Convert the value to TARGET's type first if necessary and emit the |
4247 | pending incrementations that have been queued when expanding EXP. | |
4248 | Note that we cannot emit the whole queue blindly because this will | |
4249 | effectively disable the POST_INC optimization later. | |
4250 | ||
37a08a29 | 4251 | If TEMP and TARGET compare equal according to rtx_equal_p, but |
f3f2255a R |
4252 | one or both of them are volatile memory refs, we have to distinguish |
4253 | two cases: | |
4254 | - expand_expr has used TARGET. In this case, we must not generate | |
4255 | another copy. This can be detected by TARGET being equal according | |
4256 | to == . | |
4257 | - expand_expr has not used TARGET - that means that the source just | |
4258 | happens to have the same RTX form. Since temp will have been created | |
4259 | by expand_expr, it will compare unequal according to == . | |
4260 | We must generate a copy in this case, to reach the correct number | |
4261 | of volatile memory references. */ | |
bbf6f052 | 4262 | |
6036acbb | 4263 | if ((! rtx_equal_p (temp, target) |
f3f2255a R |
4264 | || (temp != target && (side_effects_p (temp) |
4265 | || side_effects_p (target)))) | |
e5408e52 | 4266 | && TREE_CODE (exp) != ERROR_MARK |
a9772b60 | 4267 | && ! dont_store_target |
9c5c5f2c MM |
4268 | /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, |
4269 | but TARGET is not valid memory reference, TEMP will differ | |
4270 | from TARGET although it is really the same location. */ | |
0fab64a3 | 4271 | && !(alt_rtl && rtx_equal_p (alt_rtl, target)) |
e56fc090 HPN |
4272 | /* If there's nothing to copy, don't bother. Don't call expr_size |
4273 | unless necessary, because some front-ends (C++) expr_size-hook | |
4274 | aborts on objects that are not supposed to be bit-copied or | |
4275 | bit-initialized. */ | |
4276 | && expr_size (exp) != const0_rtx) | |
bbf6f052 | 4277 | { |
1bbd65cd | 4278 | emit_insns_enqueued_after_mark (mark); |
bbf6f052 | 4279 | target = protect_from_queue (target, 1); |
e6d55fd7 | 4280 | temp = protect_from_queue (temp, 0); |
bbf6f052 | 4281 | if (GET_MODE (temp) != GET_MODE (target) |
f0348c25 | 4282 | && GET_MODE (temp) != VOIDmode) |
bbf6f052 | 4283 | { |
8df83eae | 4284 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
bbf6f052 RK |
4285 | if (dont_return_target) |
4286 | { | |
4287 | /* In this case, we will return TEMP, | |
4288 | so make sure it has the proper mode. | |
4289 | But don't forget to store the value into TARGET. */ | |
4290 | temp = convert_to_mode (GET_MODE (target), temp, unsignedp); | |
4291 | emit_move_insn (target, temp); | |
4292 | } | |
4293 | else | |
4294 | convert_move (target, temp, unsignedp); | |
4295 | } | |
4296 | ||
4297 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4298 | { | |
c24ae149 RK |
4299 | /* Handle copying a string constant into an array. The string |
4300 | constant may be shorter than the array. So copy just the string's | |
4301 | actual length, and clear the rest. First get the size of the data | |
4302 | type of the string, which is actually the size of the target. */ | |
4303 | rtx size = expr_size (exp); | |
bbf6f052 | 4304 | |
e87b4f3f RS |
4305 | if (GET_CODE (size) == CONST_INT |
4306 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) | |
8403445a AM |
4307 | emit_block_move (target, temp, size, |
4308 | (want_value & 2 | |
4309 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
e87b4f3f | 4310 | else |
bbf6f052 | 4311 | { |
e87b4f3f RS |
4312 | /* Compute the size of the data to copy from the string. */ |
4313 | tree copy_size | |
c03b7665 | 4314 | = size_binop (MIN_EXPR, |
b50d17a1 | 4315 | make_tree (sizetype, size), |
fed3cef0 | 4316 | size_int (TREE_STRING_LENGTH (exp))); |
8403445a AM |
4317 | rtx copy_size_rtx |
4318 | = expand_expr (copy_size, NULL_RTX, VOIDmode, | |
4319 | (want_value & 2 | |
4320 | ? EXPAND_STACK_PARM : EXPAND_NORMAL)); | |
e87b4f3f RS |
4321 | rtx label = 0; |
4322 | ||
4323 | /* Copy that much. */ | |
267b28bd | 4324 | copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, |
8df83eae | 4325 | TYPE_UNSIGNED (sizetype)); |
8403445a AM |
4326 | emit_block_move (target, temp, copy_size_rtx, |
4327 | (want_value & 2 | |
4328 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
e87b4f3f | 4329 | |
88f63c77 RK |
4330 | /* Figure out how much is left in TARGET that we have to clear. |
4331 | Do all calculations in ptr_mode. */ | |
e87b4f3f RS |
4332 | if (GET_CODE (copy_size_rtx) == CONST_INT) |
4333 | { | |
c24ae149 RK |
4334 | size = plus_constant (size, -INTVAL (copy_size_rtx)); |
4335 | target = adjust_address (target, BLKmode, | |
4336 | INTVAL (copy_size_rtx)); | |
e87b4f3f RS |
4337 | } |
4338 | else | |
4339 | { | |
fa06ab5c | 4340 | size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, |
906c4e36 RK |
4341 | copy_size_rtx, NULL_RTX, 0, |
4342 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4343 | |
c24ae149 RK |
4344 | #ifdef POINTERS_EXTEND_UNSIGNED |
4345 | if (GET_MODE (copy_size_rtx) != Pmode) | |
267b28bd | 4346 | copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx, |
8df83eae | 4347 | TYPE_UNSIGNED (sizetype)); |
c24ae149 RK |
4348 | #endif |
4349 | ||
4350 | target = offset_address (target, copy_size_rtx, | |
4351 | highest_pow2_factor (copy_size)); | |
e87b4f3f | 4352 | label = gen_label_rtx (); |
c5d5d461 | 4353 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
a06ef755 | 4354 | GET_MODE (size), 0, label); |
e87b4f3f RS |
4355 | } |
4356 | ||
4357 | if (size != const0_rtx) | |
37a08a29 | 4358 | clear_storage (target, size); |
22619c3f | 4359 | |
e87b4f3f RS |
4360 | if (label) |
4361 | emit_label (label); | |
bbf6f052 RK |
4362 | } |
4363 | } | |
fffa9c1d JW |
4364 | /* Handle calls that return values in multiple non-contiguous locations. |
4365 | The Irix 6 ABI has examples of this. */ | |
4366 | else if (GET_CODE (target) == PARALLEL) | |
6e985040 AM |
4367 | emit_group_load (target, temp, TREE_TYPE (exp), |
4368 | int_size_in_bytes (TREE_TYPE (exp))); | |
bbf6f052 | 4369 | else if (GET_MODE (temp) == BLKmode) |
8403445a AM |
4370 | emit_block_move (target, temp, expr_size (exp), |
4371 | (want_value & 2 | |
4372 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
bbf6f052 | 4373 | else |
b0dccb00 RH |
4374 | { |
4375 | temp = force_operand (temp, target); | |
4376 | if (temp != target) | |
4377 | emit_move_insn (target, temp); | |
4378 | } | |
bbf6f052 | 4379 | } |
709f5be1 | 4380 | |
766f36c7 | 4381 | /* If we don't want a value, return NULL_RTX. */ |
8403445a | 4382 | if ((want_value & 1) == 0) |
766f36c7 RK |
4383 | return NULL_RTX; |
4384 | ||
4385 | /* If we are supposed to return TEMP, do so as long as it isn't a MEM. | |
4386 | ??? The latter test doesn't seem to make sense. */ | |
3c0cb5de | 4387 | else if (dont_return_target && !MEM_P (temp)) |
bbf6f052 | 4388 | return temp; |
766f36c7 RK |
4389 | |
4390 | /* Return TARGET itself if it is a hard register. */ | |
8403445a AM |
4391 | else if ((want_value & 1) != 0 |
4392 | && GET_MODE (target) != BLKmode | |
f8cfc6aa | 4393 | && ! (REG_P (target) |
766f36c7 | 4394 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
709f5be1 | 4395 | return copy_to_reg (target); |
3a94c984 | 4396 | |
766f36c7 | 4397 | else |
709f5be1 | 4398 | return target; |
bbf6f052 RK |
4399 | } |
4400 | \f | |
1ea7e6ad | 4401 | /* Examine CTOR. Discover how many scalar fields are set to nonzero |
6de9cd9a DN |
4402 | values and place it in *P_NZ_ELTS. Discover how many scalar fields |
4403 | are set to non-constant values and place it in *P_NC_ELTS. */ | |
9de08200 | 4404 | |
6de9cd9a DN |
4405 | static void |
4406 | categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, | |
4407 | HOST_WIDE_INT *p_nc_elts) | |
9de08200 | 4408 | { |
6de9cd9a DN |
4409 | HOST_WIDE_INT nz_elts, nc_elts; |
4410 | tree list; | |
9de08200 | 4411 | |
6de9cd9a DN |
4412 | nz_elts = 0; |
4413 | nc_elts = 0; | |
4414 | ||
4415 | for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list)) | |
9de08200 | 4416 | { |
6de9cd9a DN |
4417 | tree value = TREE_VALUE (list); |
4418 | tree purpose = TREE_PURPOSE (list); | |
4419 | HOST_WIDE_INT mult; | |
9de08200 | 4420 | |
6de9cd9a DN |
4421 | mult = 1; |
4422 | if (TREE_CODE (purpose) == RANGE_EXPR) | |
4423 | { | |
4424 | tree lo_index = TREE_OPERAND (purpose, 0); | |
4425 | tree hi_index = TREE_OPERAND (purpose, 1); | |
9de08200 | 4426 | |
6de9cd9a DN |
4427 | if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1)) |
4428 | mult = (tree_low_cst (hi_index, 1) | |
4429 | - tree_low_cst (lo_index, 1) + 1); | |
4430 | } | |
9de08200 | 4431 | |
6de9cd9a DN |
4432 | switch (TREE_CODE (value)) |
4433 | { | |
4434 | case CONSTRUCTOR: | |
4435 | { | |
4436 | HOST_WIDE_INT nz = 0, nc = 0; | |
4437 | categorize_ctor_elements_1 (value, &nz, &nc); | |
4438 | nz_elts += mult * nz; | |
4439 | nc_elts += mult * nc; | |
4440 | } | |
4441 | break; | |
9de08200 | 4442 | |
6de9cd9a DN |
4443 | case INTEGER_CST: |
4444 | case REAL_CST: | |
4445 | if (!initializer_zerop (value)) | |
4446 | nz_elts += mult; | |
4447 | break; | |
4448 | case COMPLEX_CST: | |
4449 | if (!initializer_zerop (TREE_REALPART (value))) | |
4450 | nz_elts += mult; | |
4451 | if (!initializer_zerop (TREE_IMAGPART (value))) | |
4452 | nz_elts += mult; | |
4453 | break; | |
4454 | case VECTOR_CST: | |
4455 | { | |
4456 | tree v; | |
4457 | for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v)) | |
4458 | if (!initializer_zerop (TREE_VALUE (v))) | |
4459 | nz_elts += mult; | |
4460 | } | |
4461 | break; | |
69ef87e2 | 4462 | |
6de9cd9a DN |
4463 | default: |
4464 | nz_elts += mult; | |
4465 | if (!initializer_constant_valid_p (value, TREE_TYPE (value))) | |
4466 | nc_elts += mult; | |
4467 | break; | |
4468 | } | |
4469 | } | |
69ef87e2 | 4470 | |
6de9cd9a DN |
4471 | *p_nz_elts += nz_elts; |
4472 | *p_nc_elts += nc_elts; | |
4473 | } | |
4474 | ||
4475 | void | |
4476 | categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts, | |
4477 | HOST_WIDE_INT *p_nc_elts) | |
4478 | { | |
4479 | *p_nz_elts = 0; | |
4480 | *p_nc_elts = 0; | |
4481 | categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts); | |
4482 | } | |
4483 | ||
4484 | /* Count the number of scalars in TYPE. Return -1 on overflow or | |
4485 | variable-sized. */ | |
4486 | ||
4487 | HOST_WIDE_INT | |
4488 | count_type_elements (tree type) | |
4489 | { | |
4490 | const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); | |
4491 | switch (TREE_CODE (type)) | |
4492 | { | |
4493 | case ARRAY_TYPE: | |
4494 | { | |
4495 | tree telts = array_type_nelts (type); | |
4496 | if (telts && host_integerp (telts, 1)) | |
4497 | { | |
4498 | HOST_WIDE_INT n = tree_low_cst (telts, 1); | |
4499 | HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type)); | |
4500 | if (n == 0) | |
4501 | return 0; | |
4502 | if (max / n < m) | |
4503 | return n * m; | |
4504 | } | |
4505 | return -1; | |
4506 | } | |
4507 | ||
4508 | case RECORD_TYPE: | |
4509 | { | |
4510 | HOST_WIDE_INT n = 0, t; | |
4511 | tree f; | |
4512 | ||
4513 | for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) | |
4514 | if (TREE_CODE (f) == FIELD_DECL) | |
4515 | { | |
4516 | t = count_type_elements (TREE_TYPE (f)); | |
4517 | if (t < 0) | |
4518 | return -1; | |
4519 | n += t; | |
4520 | } | |
4521 | ||
4522 | return n; | |
4523 | } | |
9de08200 | 4524 | |
6de9cd9a DN |
4525 | case UNION_TYPE: |
4526 | case QUAL_UNION_TYPE: | |
4527 | { | |
4528 | /* Ho hum. How in the world do we guess here? Clearly it isn't | |
4529 | right to count the fields. Guess based on the number of words. */ | |
4530 | HOST_WIDE_INT n = int_size_in_bytes (type); | |
4531 | if (n < 0) | |
4532 | return -1; | |
4533 | return n / UNITS_PER_WORD; | |
4534 | } | |
4535 | ||
4536 | case COMPLEX_TYPE: | |
4537 | return 2; | |
4538 | ||
4539 | case VECTOR_TYPE: | |
4540 | /* ??? This is broke. We should encode the vector width in the tree. */ | |
4541 | return GET_MODE_NUNITS (TYPE_MODE (type)); | |
4542 | ||
4543 | case INTEGER_TYPE: | |
4544 | case REAL_TYPE: | |
4545 | case ENUMERAL_TYPE: | |
4546 | case BOOLEAN_TYPE: | |
4547 | case CHAR_TYPE: | |
4548 | case POINTER_TYPE: | |
4549 | case OFFSET_TYPE: | |
4550 | case REFERENCE_TYPE: | |
9de08200 | 4551 | return 1; |
3a94c984 | 4552 | |
6de9cd9a DN |
4553 | case VOID_TYPE: |
4554 | case METHOD_TYPE: | |
4555 | case FILE_TYPE: | |
4556 | case SET_TYPE: | |
4557 | case FUNCTION_TYPE: | |
4558 | case LANG_TYPE: | |
e9a25f70 | 4559 | default: |
6de9cd9a | 4560 | abort (); |
9de08200 | 4561 | } |
9de08200 RK |
4562 | } |
4563 | ||
4564 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
4565 | ||
40209195 | 4566 | int |
502b8322 | 4567 | mostly_zeros_p (tree exp) |
9de08200 | 4568 | { |
9de08200 | 4569 | if (TREE_CODE (exp) == CONSTRUCTOR) |
6de9cd9a | 4570 | |
9de08200 | 4571 | { |
6de9cd9a DN |
4572 | HOST_WIDE_INT nz_elts, nc_elts, elts; |
4573 | ||
4574 | /* If there are no ranges of true bits, it is all zero. */ | |
e1a43f73 | 4575 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) |
6de9cd9a DN |
4576 | return CONSTRUCTOR_ELTS (exp) == NULL_TREE; |
4577 | ||
4578 | categorize_ctor_elements (exp, &nz_elts, &nc_elts); | |
4579 | elts = count_type_elements (TREE_TYPE (exp)); | |
9de08200 | 4580 | |
6de9cd9a | 4581 | return nz_elts < elts / 4; |
9de08200 RK |
4582 | } |
4583 | ||
6de9cd9a | 4584 | return initializer_zerop (exp); |
9de08200 RK |
4585 | } |
4586 | \f | |
e1a43f73 PB |
4587 | /* Helper function for store_constructor. |
4588 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
4589 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
04050c69 | 4590 | CLEARED is as for store_constructor. |
23cb1766 | 4591 | ALIAS_SET is the alias set to use for any stores. |
23ccec44 JW |
4592 | |
4593 | This provides a recursive shortcut back to store_constructor when it isn't | |
4594 | necessary to go through store_field. This is so that we can pass through | |
4595 | the cleared field to let store_constructor know that we may not have to | |
4596 | clear a substructure if the outer structure has already been cleared. */ | |
e1a43f73 PB |
4597 | |
4598 | static void | |
502b8322 AJ |
4599 | store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, |
4600 | HOST_WIDE_INT bitpos, enum machine_mode mode, | |
4601 | tree exp, tree type, int cleared, int alias_set) | |
e1a43f73 PB |
4602 | { |
4603 | if (TREE_CODE (exp) == CONSTRUCTOR | |
6c89c39a RK |
4604 | /* We can only call store_constructor recursively if the size and |
4605 | bit position are on a byte boundary. */ | |
23ccec44 | 4606 | && bitpos % BITS_PER_UNIT == 0 |
6c89c39a | 4607 | && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) |
cc2902df | 4608 | /* If we have a nonzero bitpos for a register target, then we just |
23ccec44 JW |
4609 | let store_field do the bitfield handling. This is unlikely to |
4610 | generate unnecessary clear instructions anyways. */ | |
3c0cb5de | 4611 | && (bitpos == 0 || MEM_P (target))) |
e1a43f73 | 4612 | { |
3c0cb5de | 4613 | if (MEM_P (target)) |
61cb205c RK |
4614 | target |
4615 | = adjust_address (target, | |
4616 | GET_MODE (target) == BLKmode | |
4617 | || 0 != (bitpos | |
4618 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
4619 | ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | |
23cb1766 | 4620 | |
e0339ef7 | 4621 | |
04050c69 | 4622 | /* Update the alias set, if required. */ |
3c0cb5de | 4623 | if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) |
10b76d73 | 4624 | && MEM_ALIAS_SET (target) != 0) |
70072ed9 RK |
4625 | { |
4626 | target = copy_rtx (target); | |
4627 | set_mem_alias_set (target, alias_set); | |
4628 | } | |
e0339ef7 | 4629 | |
dbb5c281 | 4630 | store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
4631 | } |
4632 | else | |
a06ef755 RK |
4633 | store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
4634 | alias_set); | |
e1a43f73 PB |
4635 | } |
4636 | ||
bbf6f052 | 4637 | /* Store the value of constructor EXP into the rtx TARGET. |
04050c69 RK |
4638 | TARGET is either a REG or a MEM; we know it cannot conflict, since |
4639 | safe_from_p has been called. | |
dbb5c281 RK |
4640 | CLEARED is true if TARGET is known to have been zero'd. |
4641 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
b7010412 RK |
4642 | may not be the same as the size of EXP if we are assigning to a field |
4643 | which has been packed to exclude padding bits. */ | |
bbf6f052 RK |
4644 | |
4645 | static void | |
502b8322 | 4646 | store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
bbf6f052 | 4647 | { |
4af3895e | 4648 | tree type = TREE_TYPE (exp); |
a5efcd63 | 4649 | #ifdef WORD_REGISTER_OPERATIONS |
13eb1f7f | 4650 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
a5efcd63 | 4651 | #endif |
4af3895e | 4652 | |
e44842fe RK |
4653 | if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE |
4654 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
bbf6f052 | 4655 | { |
b3694847 | 4656 | tree elt; |
bbf6f052 | 4657 | |
dbb5c281 RK |
4658 | /* If size is zero or the target is already cleared, do nothing. */ |
4659 | if (size == 0 || cleared) | |
2c430630 | 4660 | cleared = 1; |
04050c69 | 4661 | /* We either clear the aggregate or indicate the value is dead. */ |
2c430630 RS |
4662 | else if ((TREE_CODE (type) == UNION_TYPE |
4663 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
4664 | && ! CONSTRUCTOR_ELTS (exp)) | |
04050c69 | 4665 | /* If the constructor is empty, clear the union. */ |
a59f8640 | 4666 | { |
dbb5c281 | 4667 | clear_storage (target, expr_size (exp)); |
04050c69 | 4668 | cleared = 1; |
a59f8640 | 4669 | } |
4af3895e JVA |
4670 | |
4671 | /* If we are building a static constructor into a register, | |
4672 | set the initial value as zero so we can fold the value into | |
67225c15 RK |
4673 | a constant. But if more than one register is involved, |
4674 | this probably loses. */ | |
f8cfc6aa | 4675 | else if (REG_P (target) && TREE_STATIC (exp) |
67225c15 | 4676 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) |
9de08200 | 4677 | { |
04050c69 | 4678 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
9de08200 RK |
4679 | cleared = 1; |
4680 | } | |
4681 | ||
4682 | /* If the constructor has fewer fields than the structure | |
4683 | or if we are initializing the structure to mostly zeros, | |
0d97bf4c | 4684 | clear the whole structure first. Don't do this if TARGET is a |
fcf1b822 RK |
4685 | register whose mode size isn't equal to SIZE since clear_storage |
4686 | can't handle this case. */ | |
7c50e202 OH |
4687 | else if (size > 0 |
4688 | && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type)) | |
4689 | || mostly_zeros_p (exp)) | |
f8cfc6aa | 4690 | && (!REG_P (target) |
dbb5c281 | 4691 | || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) |
04050c69 | 4692 | == size))) |
9de08200 | 4693 | { |
337f4314 RK |
4694 | rtx xtarget = target; |
4695 | ||
4696 | if (readonly_fields_p (type)) | |
4697 | { | |
4698 | xtarget = copy_rtx (xtarget); | |
4699 | RTX_UNCHANGING_P (xtarget) = 1; | |
4700 | } | |
4701 | ||
dbb5c281 | 4702 | clear_storage (xtarget, GEN_INT (size)); |
9de08200 RK |
4703 | cleared = 1; |
4704 | } | |
dbb5c281 RK |
4705 | |
4706 | if (! cleared) | |
4707 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
bbf6f052 RK |
4708 | |
4709 | /* Store each element of the constructor into | |
4710 | the corresponding field of TARGET. */ | |
4711 | ||
4712 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) | |
4713 | { | |
b3694847 | 4714 | tree field = TREE_PURPOSE (elt); |
34c73909 | 4715 | tree value = TREE_VALUE (elt); |
b3694847 | 4716 | enum machine_mode mode; |
770ae6cc RK |
4717 | HOST_WIDE_INT bitsize; |
4718 | HOST_WIDE_INT bitpos = 0; | |
770ae6cc | 4719 | tree offset; |
b50d17a1 | 4720 | rtx to_rtx = target; |
bbf6f052 | 4721 | |
f32fd778 RS |
4722 | /* Just ignore missing fields. |
4723 | We cleared the whole structure, above, | |
4724 | if any fields are missing. */ | |
4725 | if (field == 0) | |
4726 | continue; | |
4727 | ||
6de9cd9a | 4728 | if (cleared && initializer_zerop (value)) |
e1a43f73 | 4729 | continue; |
9de08200 | 4730 | |
770ae6cc RK |
4731 | if (host_integerp (DECL_SIZE (field), 1)) |
4732 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
14a774a9 RK |
4733 | else |
4734 | bitsize = -1; | |
4735 | ||
bbf6f052 RK |
4736 | mode = DECL_MODE (field); |
4737 | if (DECL_BIT_FIELD (field)) | |
4738 | mode = VOIDmode; | |
4739 | ||
770ae6cc RK |
4740 | offset = DECL_FIELD_OFFSET (field); |
4741 | if (host_integerp (offset, 0) | |
4742 | && host_integerp (bit_position (field), 0)) | |
4743 | { | |
4744 | bitpos = int_bit_position (field); | |
4745 | offset = 0; | |
4746 | } | |
b50d17a1 | 4747 | else |
770ae6cc | 4748 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); |
3a94c984 | 4749 | |
b50d17a1 RK |
4750 | if (offset) |
4751 | { | |
4752 | rtx offset_rtx; | |
4753 | ||
6fce44af RK |
4754 | offset |
4755 | = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, | |
4756 | make_tree (TREE_TYPE (exp), | |
4757 | target)); | |
bbf6f052 | 4758 | |
b50d17a1 | 4759 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
3c0cb5de | 4760 | if (!MEM_P (to_rtx)) |
b50d17a1 RK |
4761 | abort (); |
4762 | ||
bd070e1a | 4763 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 | 4764 | if (GET_MODE (offset_rtx) != Pmode) |
267b28bd | 4765 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
fa06ab5c RK |
4766 | #else |
4767 | if (GET_MODE (offset_rtx) != ptr_mode) | |
4768 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
bd070e1a | 4769 | #endif |
bd070e1a | 4770 | |
0d4903b8 RK |
4771 | to_rtx = offset_address (to_rtx, offset_rtx, |
4772 | highest_pow2_factor (offset)); | |
b50d17a1 | 4773 | } |
c5c76735 | 4774 | |
4e44c1ef | 4775 | if (TREE_READONLY (field)) |
cf04eb80 | 4776 | { |
3c0cb5de | 4777 | if (MEM_P (to_rtx)) |
effbcc6a RK |
4778 | to_rtx = copy_rtx (to_rtx); |
4779 | ||
cf04eb80 RK |
4780 | RTX_UNCHANGING_P (to_rtx) = 1; |
4781 | } | |
4782 | ||
34c73909 R |
4783 | #ifdef WORD_REGISTER_OPERATIONS |
4784 | /* If this initializes a field that is smaller than a word, at the | |
4785 | start of a word, try to widen it to a full word. | |
4786 | This special case allows us to output C++ member function | |
4787 | initializations in a form that the optimizers can understand. */ | |
f8cfc6aa | 4788 | if (REG_P (target) |
34c73909 R |
4789 | && bitsize < BITS_PER_WORD |
4790 | && bitpos % BITS_PER_WORD == 0 | |
4791 | && GET_MODE_CLASS (mode) == MODE_INT | |
4792 | && TREE_CODE (value) == INTEGER_CST | |
13eb1f7f RK |
4793 | && exp_size >= 0 |
4794 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
34c73909 R |
4795 | { |
4796 | tree type = TREE_TYPE (value); | |
04050c69 | 4797 | |
34c73909 R |
4798 | if (TYPE_PRECISION (type) < BITS_PER_WORD) |
4799 | { | |
ae2bcd98 | 4800 | type = lang_hooks.types.type_for_size |
8df83eae | 4801 | (BITS_PER_WORD, TYPE_UNSIGNED (type)); |
34c73909 R |
4802 | value = convert (type, value); |
4803 | } | |
04050c69 | 4804 | |
34c73909 R |
4805 | if (BYTES_BIG_ENDIAN) |
4806 | value | |
4807 | = fold (build (LSHIFT_EXPR, type, value, | |
4808 | build_int_2 (BITS_PER_WORD - bitsize, 0))); | |
4809 | bitsize = BITS_PER_WORD; | |
4810 | mode = word_mode; | |
4811 | } | |
4812 | #endif | |
10b76d73 | 4813 | |
3c0cb5de | 4814 | if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) |
10b76d73 RK |
4815 | && DECL_NONADDRESSABLE_P (field)) |
4816 | { | |
4817 | to_rtx = copy_rtx (to_rtx); | |
4818 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
4819 | } | |
4820 | ||
c5c76735 | 4821 | store_constructor_field (to_rtx, bitsize, bitpos, mode, |
8b6000fc | 4822 | value, type, cleared, |
10b76d73 | 4823 | get_alias_set (TREE_TYPE (field))); |
bbf6f052 RK |
4824 | } |
4825 | } | |
e6834654 SS |
4826 | else if (TREE_CODE (type) == ARRAY_TYPE |
4827 | || TREE_CODE (type) == VECTOR_TYPE) | |
bbf6f052 | 4828 | { |
b3694847 SS |
4829 | tree elt; |
4830 | int i; | |
e1a43f73 | 4831 | int need_to_clear; |
5c5214a9 | 4832 | tree domain; |
4af3895e | 4833 | tree elttype = TREE_TYPE (type); |
e6834654 | 4834 | int const_bounds_p; |
ae0ed63a JM |
4835 | HOST_WIDE_INT minelt = 0; |
4836 | HOST_WIDE_INT maxelt = 0; | |
997404de JH |
4837 | int icode = 0; |
4838 | rtx *vector = NULL; | |
4839 | int elt_size = 0; | |
4840 | unsigned n_elts = 0; | |
85f3d674 | 4841 | |
5c5214a9 ZW |
4842 | if (TREE_CODE (type) == ARRAY_TYPE) |
4843 | domain = TYPE_DOMAIN (type); | |
4844 | else | |
4845 | /* Vectors do not have domains; look up the domain of | |
4846 | the array embedded in the debug representation type. | |
4847 | FIXME Would probably be more efficient to treat vectors | |
4848 | separately from arrays. */ | |
e6834654 | 4849 | { |
e6834654 SS |
4850 | domain = TYPE_DEBUG_REPRESENTATION_TYPE (type); |
4851 | domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain))); | |
997404de JH |
4852 | if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) |
4853 | { | |
4854 | enum machine_mode mode = GET_MODE (target); | |
4855 | ||
4856 | icode = (int) vec_init_optab->handlers[mode].insn_code; | |
4857 | if (icode != CODE_FOR_nothing) | |
4858 | { | |
4859 | unsigned int i; | |
4860 | ||
4861 | elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); | |
4862 | n_elts = (GET_MODE_SIZE (mode) / elt_size); | |
4863 | vector = alloca (n_elts); | |
4864 | for (i = 0; i < n_elts; i++) | |
4865 | vector [i] = CONST0_RTX (GET_MODE_INNER (mode)); | |
4866 | } | |
4867 | } | |
e6834654 SS |
4868 | } |
4869 | ||
4870 | const_bounds_p = (TYPE_MIN_VALUE (domain) | |
4871 | && TYPE_MAX_VALUE (domain) | |
4872 | && host_integerp (TYPE_MIN_VALUE (domain), 0) | |
4873 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
4874 | ||
85f3d674 RK |
4875 | /* If we have constant bounds for the range of the type, get them. */ |
4876 | if (const_bounds_p) | |
4877 | { | |
4878 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
4879 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
4880 | } | |
bbf6f052 | 4881 | |
e1a43f73 | 4882 | /* If the constructor has fewer elements than the array, |
38e01259 | 4883 | clear the whole array first. Similarly if this is |
e1a43f73 | 4884 | static constructor of a non-BLKmode object. */ |
f8cfc6aa | 4885 | if (cleared || (REG_P (target) && TREE_STATIC (exp))) |
e1a43f73 PB |
4886 | need_to_clear = 1; |
4887 | else | |
4888 | { | |
4889 | HOST_WIDE_INT count = 0, zero_count = 0; | |
85f3d674 RK |
4890 | need_to_clear = ! const_bounds_p; |
4891 | ||
e1a43f73 PB |
4892 | /* This loop is a more accurate version of the loop in |
4893 | mostly_zeros_p (it handles RANGE_EXPR in an index). | |
4894 | It is also needed to check for missing elements. */ | |
4895 | for (elt = CONSTRUCTOR_ELTS (exp); | |
85f3d674 | 4896 | elt != NULL_TREE && ! need_to_clear; |
df0faff1 | 4897 | elt = TREE_CHAIN (elt)) |
e1a43f73 PB |
4898 | { |
4899 | tree index = TREE_PURPOSE (elt); | |
4900 | HOST_WIDE_INT this_node_count; | |
19caa751 | 4901 | |
e1a43f73 PB |
4902 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4903 | { | |
4904 | tree lo_index = TREE_OPERAND (index, 0); | |
4905 | tree hi_index = TREE_OPERAND (index, 1); | |
05bccae2 | 4906 | |
19caa751 RK |
4907 | if (! host_integerp (lo_index, 1) |
4908 | || ! host_integerp (hi_index, 1)) | |
e1a43f73 PB |
4909 | { |
4910 | need_to_clear = 1; | |
4911 | break; | |
4912 | } | |
19caa751 RK |
4913 | |
4914 | this_node_count = (tree_low_cst (hi_index, 1) | |
4915 | - tree_low_cst (lo_index, 1) + 1); | |
e1a43f73 PB |
4916 | } |
4917 | else | |
4918 | this_node_count = 1; | |
85f3d674 | 4919 | |
e1a43f73 PB |
4920 | count += this_node_count; |
4921 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4922 | zero_count += this_node_count; | |
4923 | } | |
85f3d674 | 4924 | |
8e958f70 | 4925 | /* Clear the entire array first if there are any missing elements, |
0f41302f | 4926 | or if the incidence of zero elements is >= 75%. */ |
85f3d674 RK |
4927 | if (! need_to_clear |
4928 | && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) | |
e1a43f73 PB |
4929 | need_to_clear = 1; |
4930 | } | |
85f3d674 | 4931 | |
997404de | 4932 | if (need_to_clear && size > 0 && !vector) |
9de08200 RK |
4933 | { |
4934 | if (! cleared) | |
725e58b1 RK |
4935 | { |
4936 | if (REG_P (target)) | |
4937 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
4938 | else | |
dbb5c281 | 4939 | clear_storage (target, GEN_INT (size)); |
725e58b1 | 4940 | } |
dbb5c281 | 4941 | cleared = 1; |
9de08200 | 4942 | } |
df4556a3 | 4943 | else if (REG_P (target)) |
dbb5c281 RK |
4944 | /* Inform later passes that the old value is dead. */ |
4945 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
bbf6f052 RK |
4946 | |
4947 | /* Store each element of the constructor into | |
4948 | the corresponding element of TARGET, determined | |
4949 | by counting the elements. */ | |
4950 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
4951 | elt; | |
4952 | elt = TREE_CHAIN (elt), i++) | |
4953 | { | |
b3694847 | 4954 | enum machine_mode mode; |
19caa751 RK |
4955 | HOST_WIDE_INT bitsize; |
4956 | HOST_WIDE_INT bitpos; | |
bbf6f052 | 4957 | int unsignedp; |
e1a43f73 | 4958 | tree value = TREE_VALUE (elt); |
03dc44a6 RS |
4959 | tree index = TREE_PURPOSE (elt); |
4960 | rtx xtarget = target; | |
bbf6f052 | 4961 | |
6de9cd9a | 4962 | if (cleared && initializer_zerop (value)) |
e1a43f73 | 4963 | continue; |
9de08200 | 4964 | |
8df83eae | 4965 | unsignedp = TYPE_UNSIGNED (elttype); |
14a774a9 RK |
4966 | mode = TYPE_MODE (elttype); |
4967 | if (mode == BLKmode) | |
19caa751 RK |
4968 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) |
4969 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
4970 | : -1); | |
14a774a9 RK |
4971 | else |
4972 | bitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 4973 | |
e1a43f73 PB |
4974 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4975 | { | |
4976 | tree lo_index = TREE_OPERAND (index, 0); | |
4977 | tree hi_index = TREE_OPERAND (index, 1); | |
6af8eb57 | 4978 | rtx index_r, pos_rtx; |
05c0b405 PB |
4979 | HOST_WIDE_INT lo, hi, count; |
4980 | tree position; | |
e1a43f73 | 4981 | |
997404de JH |
4982 | if (vector) |
4983 | abort (); | |
4984 | ||
0f41302f | 4985 | /* If the range is constant and "small", unroll the loop. */ |
85f3d674 RK |
4986 | if (const_bounds_p |
4987 | && host_integerp (lo_index, 0) | |
19caa751 RK |
4988 | && host_integerp (hi_index, 0) |
4989 | && (lo = tree_low_cst (lo_index, 0), | |
4990 | hi = tree_low_cst (hi_index, 0), | |
05c0b405 | 4991 | count = hi - lo + 1, |
3c0cb5de | 4992 | (!MEM_P (target) |
05c0b405 | 4993 | || count <= 2 |
19caa751 RK |
4994 | || (host_integerp (TYPE_SIZE (elttype), 1) |
4995 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
4996 | <= 40 * 8))))) | |
e1a43f73 | 4997 | { |
05c0b405 PB |
4998 | lo -= minelt; hi -= minelt; |
4999 | for (; lo <= hi; lo++) | |
e1a43f73 | 5000 | { |
19caa751 | 5001 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); |
10b76d73 | 5002 | |
3c0cb5de | 5003 | if (MEM_P (target) |
10b76d73 | 5004 | && !MEM_KEEP_ALIAS_SET_P (target) |
e6834654 | 5005 | && TREE_CODE (type) == ARRAY_TYPE |
10b76d73 RK |
5006 | && TYPE_NONALIASED_COMPONENT (type)) |
5007 | { | |
5008 | target = copy_rtx (target); | |
5009 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5010 | } | |
5011 | ||
23cb1766 | 5012 | store_constructor_field |
04050c69 RK |
5013 | (target, bitsize, bitpos, mode, value, type, cleared, |
5014 | get_alias_set (elttype)); | |
e1a43f73 PB |
5015 | } |
5016 | } | |
5017 | else | |
5018 | { | |
6af8eb57 SB |
5019 | rtx loop_start = gen_label_rtx (); |
5020 | rtx loop_end = gen_label_rtx (); | |
5021 | tree exit_cond; | |
e1a43f73 | 5022 | |
6af8eb57 | 5023 | expand_expr (hi_index, NULL_RTX, VOIDmode, 0); |
8df83eae | 5024 | unsignedp = TYPE_UNSIGNED (domain); |
e1a43f73 PB |
5025 | |
5026 | index = build_decl (VAR_DECL, NULL_TREE, domain); | |
5027 | ||
19e7881c | 5028 | index_r |
e1a43f73 PB |
5029 | = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), |
5030 | &unsignedp, 0)); | |
19e7881c | 5031 | SET_DECL_RTL (index, index_r); |
e1a43f73 | 5032 | store_expr (lo_index, index_r, 0); |
6af8eb57 SB |
5033 | |
5034 | /* Build the head of the loop. */ | |
5035 | do_pending_stack_adjust (); | |
5036 | emit_queue (); | |
5037 | emit_label (loop_start); | |
e1a43f73 | 5038 | |
0f41302f | 5039 | /* Assign value to element index. */ |
fed3cef0 RK |
5040 | position |
5041 | = convert (ssizetype, | |
5042 | fold (build (MINUS_EXPR, TREE_TYPE (index), | |
5043 | index, TYPE_MIN_VALUE (domain)))); | |
5044 | position = size_binop (MULT_EXPR, position, | |
5045 | convert (ssizetype, | |
5046 | TYPE_SIZE_UNIT (elttype))); | |
5047 | ||
e1a43f73 | 5048 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); |
0d4903b8 RK |
5049 | xtarget = offset_address (target, pos_rtx, |
5050 | highest_pow2_factor (position)); | |
5051 | xtarget = adjust_address (xtarget, mode, 0); | |
e1a43f73 | 5052 | if (TREE_CODE (value) == CONSTRUCTOR) |
dbb5c281 RK |
5053 | store_constructor (value, xtarget, cleared, |
5054 | bitsize / BITS_PER_UNIT); | |
e1a43f73 PB |
5055 | else |
5056 | store_expr (value, xtarget, 0); | |
5057 | ||
6af8eb57 SB |
5058 | /* Generate a conditional jump to exit the loop. */ |
5059 | exit_cond = build (LT_EXPR, integer_type_node, | |
5060 | index, hi_index); | |
5061 | jumpif (exit_cond, loop_end); | |
e1a43f73 | 5062 | |
6af8eb57 SB |
5063 | /* Update the loop counter, and jump to the head of |
5064 | the loop. */ | |
e1a43f73 PB |
5065 | expand_increment (build (PREINCREMENT_EXPR, |
5066 | TREE_TYPE (index), | |
7b8b9722 | 5067 | index, integer_one_node), 0, 0); |
6af8eb57 SB |
5068 | emit_jump (loop_start); |
5069 | ||
5070 | /* Build the end of the loop. */ | |
e1a43f73 | 5071 | emit_label (loop_end); |
e1a43f73 PB |
5072 | } |
5073 | } | |
19caa751 RK |
5074 | else if ((index != 0 && ! host_integerp (index, 0)) |
5075 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
03dc44a6 | 5076 | { |
03dc44a6 RS |
5077 | tree position; |
5078 | ||
997404de JH |
5079 | if (vector) |
5080 | abort (); | |
5081 | ||
5b6c44ff | 5082 | if (index == 0) |
fed3cef0 | 5083 | index = ssize_int (1); |
5b6c44ff | 5084 | |
e1a43f73 | 5085 | if (minelt) |
fed3cef0 RK |
5086 | index = convert (ssizetype, |
5087 | fold (build (MINUS_EXPR, index, | |
5088 | TYPE_MIN_VALUE (domain)))); | |
19caa751 | 5089 | |
fed3cef0 RK |
5090 | position = size_binop (MULT_EXPR, index, |
5091 | convert (ssizetype, | |
5092 | TYPE_SIZE_UNIT (elttype))); | |
0d4903b8 RK |
5093 | xtarget = offset_address (target, |
5094 | expand_expr (position, 0, VOIDmode, 0), | |
5095 | highest_pow2_factor (position)); | |
5096 | xtarget = adjust_address (xtarget, mode, 0); | |
e1a43f73 | 5097 | store_expr (value, xtarget, 0); |
03dc44a6 | 5098 | } |
997404de JH |
5099 | else if (vector) |
5100 | { | |
5101 | int pos; | |
5102 | ||
5103 | if (index != 0) | |
5104 | pos = tree_low_cst (index, 0) - minelt; | |
5105 | else | |
5106 | pos = i; | |
5107 | vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0); | |
5108 | } | |
03dc44a6 RS |
5109 | else |
5110 | { | |
5111 | if (index != 0) | |
19caa751 RK |
5112 | bitpos = ((tree_low_cst (index, 0) - minelt) |
5113 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
03dc44a6 | 5114 | else |
19caa751 RK |
5115 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); |
5116 | ||
3c0cb5de | 5117 | if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) |
e6834654 | 5118 | && TREE_CODE (type) == ARRAY_TYPE |
10b76d73 RK |
5119 | && TYPE_NONALIASED_COMPONENT (type)) |
5120 | { | |
5121 | target = copy_rtx (target); | |
5122 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5123 | } | |
9b9bd3b2 JH |
5124 | store_constructor_field (target, bitsize, bitpos, mode, value, |
5125 | type, cleared, get_alias_set (elttype)); | |
03dc44a6 | 5126 | } |
bbf6f052 | 5127 | } |
997404de JH |
5128 | if (vector) |
5129 | { | |
5130 | emit_insn (GEN_FCN (icode) (target, | |
5131 | gen_rtx_PARALLEL (GET_MODE (target), | |
5132 | gen_rtvec_v (n_elts, vector)))); | |
5133 | } | |
bbf6f052 | 5134 | } |
19caa751 | 5135 | |
3a94c984 | 5136 | /* Set constructor assignments. */ |
071a6595 PB |
5137 | else if (TREE_CODE (type) == SET_TYPE) |
5138 | { | |
e1a43f73 | 5139 | tree elt = CONSTRUCTOR_ELTS (exp); |
19caa751 | 5140 | unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; |
071a6595 PB |
5141 | tree domain = TYPE_DOMAIN (type); |
5142 | tree domain_min, domain_max, bitlength; | |
5143 | ||
9faa82d8 | 5144 | /* The default implementation strategy is to extract the constant |
071a6595 PB |
5145 | parts of the constructor, use that to initialize the target, |
5146 | and then "or" in whatever non-constant ranges we need in addition. | |
5147 | ||
5148 | If a large set is all zero or all ones, it is | |
8f99553f | 5149 | probably better to set it using memset. |
071a6595 PB |
5150 | Also, if a large set has just a single range, it may also be |
5151 | better to first clear all the first clear the set (using | |
8f99553f | 5152 | memset), and set the bits we want. */ |
3a94c984 | 5153 | |
0f41302f | 5154 | /* Check for all zeros. */ |
9376fcd6 | 5155 | if (elt == NULL_TREE && size > 0) |
071a6595 | 5156 | { |
dbb5c281 RK |
5157 | if (!cleared) |
5158 | clear_storage (target, GEN_INT (size)); | |
071a6595 PB |
5159 | return; |
5160 | } | |
5161 | ||
071a6595 PB |
5162 | domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); |
5163 | domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); | |
5164 | bitlength = size_binop (PLUS_EXPR, | |
fed3cef0 RK |
5165 | size_diffop (domain_max, domain_min), |
5166 | ssize_int (1)); | |
071a6595 | 5167 | |
19caa751 | 5168 | nbits = tree_low_cst (bitlength, 1); |
e1a43f73 PB |
5169 | |
5170 | /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that | |
5171 | are "complicated" (more than one range), initialize (the | |
3a94c984 | 5172 | constant parts) by copying from a constant. */ |
e1a43f73 PB |
5173 | if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD |
5174 | || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) | |
071a6595 | 5175 | { |
19caa751 | 5176 | unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); |
b4ee5a72 | 5177 | enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); |
703ad42b | 5178 | char *bit_buffer = alloca (nbits); |
b4ee5a72 | 5179 | HOST_WIDE_INT word = 0; |
19caa751 RK |
5180 | unsigned int bit_pos = 0; |
5181 | unsigned int ibit = 0; | |
5182 | unsigned int offset = 0; /* In bytes from beginning of set. */ | |
5183 | ||
e1a43f73 | 5184 | elt = get_set_constructor_bits (exp, bit_buffer, nbits); |
b4ee5a72 | 5185 | for (;;) |
071a6595 | 5186 | { |
b4ee5a72 PB |
5187 | if (bit_buffer[ibit]) |
5188 | { | |
b09f3348 | 5189 | if (BYTES_BIG_ENDIAN) |
b4ee5a72 PB |
5190 | word |= (1 << (set_word_size - 1 - bit_pos)); |
5191 | else | |
5192 | word |= 1 << bit_pos; | |
5193 | } | |
19caa751 | 5194 | |
b4ee5a72 PB |
5195 | bit_pos++; ibit++; |
5196 | if (bit_pos >= set_word_size || ibit == nbits) | |
071a6595 | 5197 | { |
dbb5c281 | 5198 | if (word != 0 || ! cleared) |
e1a43f73 | 5199 | { |
053ee101 | 5200 | rtx datum = gen_int_mode (word, mode); |
e1a43f73 | 5201 | rtx to_rtx; |
19caa751 | 5202 | |
0f41302f MS |
5203 | /* The assumption here is that it is safe to use |
5204 | XEXP if the set is multi-word, but not if | |
5205 | it's single-word. */ | |
3c0cb5de | 5206 | if (MEM_P (target)) |
f4ef873c | 5207 | to_rtx = adjust_address (target, mode, offset); |
3a94c984 | 5208 | else if (offset == 0) |
e1a43f73 PB |
5209 | to_rtx = target; |
5210 | else | |
5211 | abort (); | |
5212 | emit_move_insn (to_rtx, datum); | |
5213 | } | |
19caa751 | 5214 | |
b4ee5a72 PB |
5215 | if (ibit == nbits) |
5216 | break; | |
5217 | word = 0; | |
5218 | bit_pos = 0; | |
5219 | offset += set_word_size / BITS_PER_UNIT; | |
071a6595 PB |
5220 | } |
5221 | } | |
071a6595 | 5222 | } |
dbb5c281 | 5223 | else if (!cleared) |
19caa751 RK |
5224 | /* Don't bother clearing storage if the set is all ones. */ |
5225 | if (TREE_CHAIN (elt) != NULL_TREE | |
5226 | || (TREE_PURPOSE (elt) == NULL_TREE | |
5227 | ? nbits != 1 | |
5228 | : ( ! host_integerp (TREE_VALUE (elt), 0) | |
5229 | || ! host_integerp (TREE_PURPOSE (elt), 0) | |
5230 | || (tree_low_cst (TREE_VALUE (elt), 0) | |
5231 | - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 | |
5232 | != (HOST_WIDE_INT) nbits)))) | |
dbb5c281 | 5233 | clear_storage (target, expr_size (exp)); |
3a94c984 | 5234 | |
e1a43f73 | 5235 | for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) |
071a6595 | 5236 | { |
3a94c984 | 5237 | /* Start of range of element or NULL. */ |
071a6595 | 5238 | tree startbit = TREE_PURPOSE (elt); |
3a94c984 | 5239 | /* End of range of element, or element value. */ |
071a6595 PB |
5240 | tree endbit = TREE_VALUE (elt); |
5241 | HOST_WIDE_INT startb, endb; | |
19caa751 | 5242 | rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; |
071a6595 PB |
5243 | |
5244 | bitlength_rtx = expand_expr (bitlength, | |
19caa751 | 5245 | NULL_RTX, MEM, EXPAND_CONST_ADDRESS); |
071a6595 | 5246 | |
3a94c984 | 5247 | /* Handle non-range tuple element like [ expr ]. */ |
071a6595 PB |
5248 | if (startbit == NULL_TREE) |
5249 | { | |
5250 | startbit = save_expr (endbit); | |
5251 | endbit = startbit; | |
5252 | } | |
19caa751 | 5253 | |
071a6595 PB |
5254 | startbit = convert (sizetype, startbit); |
5255 | endbit = convert (sizetype, endbit); | |
5256 | if (! integer_zerop (domain_min)) | |
5257 | { | |
5258 | startbit = size_binop (MINUS_EXPR, startbit, domain_min); | |
5259 | endbit = size_binop (MINUS_EXPR, endbit, domain_min); | |
5260 | } | |
3a94c984 | 5261 | startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, |
071a6595 | 5262 | EXPAND_CONST_ADDRESS); |
3a94c984 | 5263 | endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, |
071a6595 PB |
5264 | EXPAND_CONST_ADDRESS); |
5265 | ||
5266 | if (REG_P (target)) | |
5267 | { | |
1da68f56 RK |
5268 | targetx |
5269 | = assign_temp | |
ae2bcd98 | 5270 | ((build_qualified_type (lang_hooks.types.type_for_mode |
b0c48229 | 5271 | (GET_MODE (target), 0), |
1da68f56 RK |
5272 | TYPE_QUAL_CONST)), |
5273 | 0, 1, 1); | |
071a6595 PB |
5274 | emit_move_insn (targetx, target); |
5275 | } | |
19caa751 | 5276 | |
3c0cb5de | 5277 | else if (MEM_P (target)) |
071a6595 PB |
5278 | targetx = target; |
5279 | else | |
5280 | abort (); | |
5281 | ||
4ca79136 RH |
5282 | /* Optimization: If startbit and endbit are constants divisible |
5283 | by BITS_PER_UNIT, call memset instead. */ | |
8f99553f | 5284 | if (TREE_CODE (startbit) == INTEGER_CST |
071a6595 PB |
5285 | && TREE_CODE (endbit) == INTEGER_CST |
5286 | && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 | |
e1a43f73 | 5287 | && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) |
071a6595 | 5288 | { |
ebb1b59a | 5289 | emit_library_call (memset_libfunc, LCT_NORMAL, |
071a6595 | 5290 | VOIDmode, 3, |
e1a43f73 PB |
5291 | plus_constant (XEXP (targetx, 0), |
5292 | startb / BITS_PER_UNIT), | |
071a6595 | 5293 | Pmode, |
3b6f75e2 | 5294 | constm1_rtx, TYPE_MODE (integer_type_node), |
071a6595 | 5295 | GEN_INT ((endb - startb) / BITS_PER_UNIT), |
3b6f75e2 | 5296 | TYPE_MODE (sizetype)); |
071a6595 PB |
5297 | } |
5298 | else | |
68d28100 RH |
5299 | emit_library_call (setbits_libfunc, LCT_NORMAL, |
5300 | VOIDmode, 4, XEXP (targetx, 0), | |
ebb1b59a | 5301 | Pmode, bitlength_rtx, TYPE_MODE (sizetype), |
19caa751 RK |
5302 | startbit_rtx, TYPE_MODE (sizetype), |
5303 | endbit_rtx, TYPE_MODE (sizetype)); | |
5304 | ||
071a6595 PB |
5305 | if (REG_P (target)) |
5306 | emit_move_insn (target, targetx); | |
5307 | } | |
5308 | } | |
bbf6f052 RK |
5309 | |
5310 | else | |
5311 | abort (); | |
5312 | } | |
5313 | ||
5314 | /* Store the value of EXP (an expression tree) | |
5315 | into a subfield of TARGET which has mode MODE and occupies | |
5316 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
5317 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
5318 | ||
5319 | If VALUE_MODE is VOIDmode, return nothing in particular. | |
5320 | UNSIGNEDP is not used in this case. | |
5321 | ||
5322 | Otherwise, return an rtx for the value stored. This rtx | |
5323 | has mode VALUE_MODE if that is convenient to do. | |
5324 | In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. | |
5325 | ||
a06ef755 | 5326 | TYPE is the type of the underlying object, |
ece32014 MM |
5327 | |
5328 | ALIAS_SET is the alias set for the destination. This value will | |
5329 | (in general) be different from that for TARGET, since TARGET is a | |
5330 | reference to the containing structure. */ | |
bbf6f052 RK |
5331 | |
5332 | static rtx | |
502b8322 AJ |
5333 | store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
5334 | enum machine_mode mode, tree exp, enum machine_mode value_mode, | |
5335 | int unsignedp, tree type, int alias_set) | |
bbf6f052 | 5336 | { |
906c4e36 | 5337 | HOST_WIDE_INT width_mask = 0; |
bbf6f052 | 5338 | |
e9a25f70 JL |
5339 | if (TREE_CODE (exp) == ERROR_MARK) |
5340 | return const0_rtx; | |
5341 | ||
2be6a7e9 RK |
5342 | /* If we have nothing to store, do nothing unless the expression has |
5343 | side-effects. */ | |
5344 | if (bitsize == 0) | |
5345 | return expand_expr (exp, const0_rtx, VOIDmode, 0); | |
6a87d634 | 5346 | else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT) |
906c4e36 | 5347 | width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; |
bbf6f052 RK |
5348 | |
5349 | /* If we are storing into an unaligned field of an aligned union that is | |
5350 | in a register, we may have the mode of TARGET being an integer mode but | |
5351 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5352 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5353 | the store if the field being stored is the entire width of TARGET). Then | |
5354 | call ourselves recursively to store the field into a BLKmode version of | |
5355 | that object. Finally, load from the object into TARGET. This is not | |
5356 | very efficient in general, but should only be slightly more expensive | |
5357 | than the otherwise-required unaligned accesses. Perhaps this can be | |
85a43a2f RK |
5358 | cleaned up later. It's tempting to make OBJECT readonly, but it's set |
5359 | twice, once with emit_move_insn and once via store_field. */ | |
bbf6f052 RK |
5360 | |
5361 | if (mode == BLKmode | |
f8cfc6aa | 5362 | && (REG_P (target) || GET_CODE (target) == SUBREG)) |
bbf6f052 | 5363 | { |
85a43a2f | 5364 | rtx object = assign_temp (type, 0, 1, 1); |
c4e59f51 | 5365 | rtx blk_object = adjust_address (object, BLKmode, 0); |
bbf6f052 | 5366 | |
8752c357 | 5367 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
bbf6f052 RK |
5368 | emit_move_insn (object, target); |
5369 | ||
a06ef755 RK |
5370 | store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
5371 | alias_set); | |
bbf6f052 RK |
5372 | |
5373 | emit_move_insn (target, object); | |
5374 | ||
a06ef755 | 5375 | /* We want to return the BLKmode version of the data. */ |
46093b97 | 5376 | return blk_object; |
bbf6f052 | 5377 | } |
c3b247b4 JM |
5378 | |
5379 | if (GET_CODE (target) == CONCAT) | |
5380 | { | |
5381 | /* We're storing into a struct containing a single __complex. */ | |
5382 | ||
5383 | if (bitpos != 0) | |
5384 | abort (); | |
6de9cd9a | 5385 | return store_expr (exp, target, value_mode != VOIDmode); |
c3b247b4 | 5386 | } |
bbf6f052 RK |
5387 | |
5388 | /* If the structure is in a register or if the component | |
5389 | is a bit field, we cannot use addressing to access it. | |
5390 | Use bit-field techniques or SUBREG to store in it. */ | |
5391 | ||
4fa52007 | 5392 | if (mode == VOIDmode |
6ab06cbb JW |
5393 | || (mode != BLKmode && ! direct_store[(int) mode] |
5394 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5395 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
f8cfc6aa | 5396 | || REG_P (target) |
c980ac49 | 5397 | || GET_CODE (target) == SUBREG |
ccc98036 RS |
5398 | /* If the field isn't aligned enough to store as an ordinary memref, |
5399 | store it as a bit field. */ | |
15b19a7d | 5400 | || (mode != BLKmode |
9e5f281f OH |
5401 | && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) |
5402 | || bitpos % GET_MODE_ALIGNMENT (mode)) | |
5403 | && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) | |
502b8322 | 5404 | || (bitpos % BITS_PER_UNIT != 0))) |
14a774a9 RK |
5405 | /* If the RHS and field are a constant size and the size of the |
5406 | RHS isn't the same size as the bitfield, we must use bitfield | |
5407 | operations. */ | |
05bccae2 RK |
5408 | || (bitsize >= 0 |
5409 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
5410 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) | |
bbf6f052 | 5411 | { |
906c4e36 | 5412 | rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
bbd6cf73 | 5413 | |
ef19912d RK |
5414 | /* If BITSIZE is narrower than the size of the type of EXP |
5415 | we will be narrowing TEMP. Normally, what's wanted are the | |
5416 | low-order bits. However, if EXP's type is a record and this is | |
5417 | big-endian machine, we want the upper BITSIZE bits. */ | |
5418 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
65a07688 | 5419 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) |
ef19912d RK |
5420 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) |
5421 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5422 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5423 | - bitsize), | |
c1853da7 | 5424 | NULL_RTX, 1); |
ef19912d | 5425 | |
bbd6cf73 RK |
5426 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5427 | MODE. */ | |
5428 | if (mode != VOIDmode && mode != BLKmode | |
5429 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5430 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5431 | ||
a281e72d RK |
5432 | /* If the modes of TARGET and TEMP are both BLKmode, both |
5433 | must be in memory and BITPOS must be aligned on a byte | |
5434 | boundary. If so, we simply do a block copy. */ | |
5435 | if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) | |
5436 | { | |
3c0cb5de | 5437 | if (!MEM_P (target) || !MEM_P (temp) |
a281e72d RK |
5438 | || bitpos % BITS_PER_UNIT != 0) |
5439 | abort (); | |
5440 | ||
f4ef873c | 5441 | target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); |
a281e72d | 5442 | emit_block_move (target, temp, |
a06ef755 | 5443 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
44bb111a RH |
5444 | / BITS_PER_UNIT), |
5445 | BLOCK_OP_NORMAL); | |
a281e72d RK |
5446 | |
5447 | return value_mode == VOIDmode ? const0_rtx : target; | |
5448 | } | |
5449 | ||
bbf6f052 | 5450 | /* Store the value in the bitfield. */ |
a06ef755 RK |
5451 | store_bit_field (target, bitsize, bitpos, mode, temp, |
5452 | int_size_in_bytes (type)); | |
5453 | ||
bbf6f052 RK |
5454 | if (value_mode != VOIDmode) |
5455 | { | |
04050c69 RK |
5456 | /* The caller wants an rtx for the value. |
5457 | If possible, avoid refetching from the bitfield itself. */ | |
bbf6f052 | 5458 | if (width_mask != 0 |
3c0cb5de | 5459 | && ! (MEM_P (target) && MEM_VOLATILE_P (target))) |
5c4d7cfb | 5460 | { |
9074de27 | 5461 | tree count; |
5c4d7cfb | 5462 | enum machine_mode tmode; |
86a2c12a | 5463 | |
5c4d7cfb | 5464 | tmode = GET_MODE (temp); |
86a2c12a RS |
5465 | if (tmode == VOIDmode) |
5466 | tmode = value_mode; | |
22273300 JJ |
5467 | |
5468 | if (unsignedp) | |
5469 | return expand_and (tmode, temp, | |
2496c7bd | 5470 | gen_int_mode (width_mask, tmode), |
22273300 JJ |
5471 | NULL_RTX); |
5472 | ||
5c4d7cfb RS |
5473 | count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0); |
5474 | temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5475 | return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5476 | } | |
04050c69 | 5477 | |
bbf6f052 | 5478 | return extract_bit_field (target, bitsize, bitpos, unsignedp, |
04050c69 | 5479 | NULL_RTX, value_mode, VOIDmode, |
a06ef755 | 5480 | int_size_in_bytes (type)); |
bbf6f052 RK |
5481 | } |
5482 | return const0_rtx; | |
5483 | } | |
5484 | else | |
5485 | { | |
5486 | rtx addr = XEXP (target, 0); | |
a06ef755 | 5487 | rtx to_rtx = target; |
bbf6f052 RK |
5488 | |
5489 | /* If a value is wanted, it must be the lhs; | |
5490 | so make the address stable for multiple use. */ | |
5491 | ||
f8cfc6aa | 5492 | if (value_mode != VOIDmode && !REG_P (addr) |
bbf6f052 RK |
5493 | && ! CONSTANT_ADDRESS_P (addr) |
5494 | /* A frame-pointer reference is already stable. */ | |
5495 | && ! (GET_CODE (addr) == PLUS | |
5496 | && GET_CODE (XEXP (addr, 1)) == CONST_INT | |
5497 | && (XEXP (addr, 0) == virtual_incoming_args_rtx | |
5498 | || XEXP (addr, 0) == virtual_stack_vars_rtx))) | |
a06ef755 | 5499 | to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr)); |
bbf6f052 RK |
5500 | |
5501 | /* Now build a reference to just the desired component. */ | |
5502 | ||
a06ef755 RK |
5503 | to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); |
5504 | ||
5505 | if (to_rtx == target) | |
5506 | to_rtx = copy_rtx (to_rtx); | |
792760b9 | 5507 | |
c6df88cb | 5508 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
10b76d73 | 5509 | if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
a06ef755 | 5510 | set_mem_alias_set (to_rtx, alias_set); |
bbf6f052 RK |
5511 | |
5512 | return store_expr (exp, to_rtx, value_mode != VOIDmode); | |
5513 | } | |
5514 | } | |
5515 | \f | |
5516 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
b4e3fabb RK |
5517 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
5518 | codes and find the ultimate containing object, which we return. | |
bbf6f052 RK |
5519 | |
5520 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
5521 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
7bb0943f RS |
5522 | If the position of the field is variable, we store a tree |
5523 | giving the variable offset (in units) in *POFFSET. | |
5524 | This offset is in addition to the bit position. | |
5525 | If the position is not variable, we store 0 in *POFFSET. | |
bbf6f052 RK |
5526 | |
5527 | If any of the extraction expressions is volatile, | |
5528 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
5529 | ||
5530 | If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it | |
5531 | is a mode that can be used to access the field. In that case, *PBITSIZE | |
e7c33f54 RK |
5532 | is redundant. |
5533 | ||
5534 | If the field describes a variable-sized object, *PMODE is set to | |
5535 | VOIDmode and *PBITSIZE is set to -1. An access cannot be made in | |
6d2f8887 | 5536 | this case, but the address of the object can be found. */ |
bbf6f052 RK |
5537 | |
5538 | tree | |
502b8322 AJ |
5539 | get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, |
5540 | HOST_WIDE_INT *pbitpos, tree *poffset, | |
5541 | enum machine_mode *pmode, int *punsignedp, | |
5542 | int *pvolatilep) | |
bbf6f052 RK |
5543 | { |
5544 | tree size_tree = 0; | |
5545 | enum machine_mode mode = VOIDmode; | |
fed3cef0 | 5546 | tree offset = size_zero_node; |
770ae6cc | 5547 | tree bit_offset = bitsize_zero_node; |
770ae6cc | 5548 | tree tem; |
bbf6f052 | 5549 | |
770ae6cc RK |
5550 | /* First get the mode, signedness, and size. We do this from just the |
5551 | outermost expression. */ | |
bbf6f052 RK |
5552 | if (TREE_CODE (exp) == COMPONENT_REF) |
5553 | { | |
5554 | size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | |
5555 | if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) | |
5556 | mode = DECL_MODE (TREE_OPERAND (exp, 1)); | |
770ae6cc | 5557 | |
a150de29 | 5558 | *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1)); |
bbf6f052 RK |
5559 | } |
5560 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
5561 | { | |
5562 | size_tree = TREE_OPERAND (exp, 1); | |
a150de29 | 5563 | *punsignedp = BIT_FIELD_REF_UNSIGNED (exp); |
bbf6f052 RK |
5564 | } |
5565 | else | |
5566 | { | |
5567 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
8df83eae | 5568 | *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
770ae6cc | 5569 | |
ab87f8c8 JL |
5570 | if (mode == BLKmode) |
5571 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
770ae6cc RK |
5572 | else |
5573 | *pbitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 5574 | } |
3a94c984 | 5575 | |
770ae6cc | 5576 | if (size_tree != 0) |
bbf6f052 | 5577 | { |
770ae6cc | 5578 | if (! host_integerp (size_tree, 1)) |
e7c33f54 RK |
5579 | mode = BLKmode, *pbitsize = -1; |
5580 | else | |
770ae6cc | 5581 | *pbitsize = tree_low_cst (size_tree, 1); |
bbf6f052 RK |
5582 | } |
5583 | ||
5584 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
5585 | and find the ultimate containing object. */ | |
bbf6f052 RK |
5586 | while (1) |
5587 | { | |
770ae6cc RK |
5588 | if (TREE_CODE (exp) == BIT_FIELD_REF) |
5589 | bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); | |
5590 | else if (TREE_CODE (exp) == COMPONENT_REF) | |
bbf6f052 | 5591 | { |
770ae6cc | 5592 | tree field = TREE_OPERAND (exp, 1); |
44de5aeb | 5593 | tree this_offset = component_ref_field_offset (exp); |
bbf6f052 | 5594 | |
e7f3c83f RK |
5595 | /* If this field hasn't been filled in yet, don't go |
5596 | past it. This should only happen when folding expressions | |
5597 | made during type construction. */ | |
770ae6cc | 5598 | if (this_offset == 0) |
e7f3c83f RK |
5599 | break; |
5600 | ||
7156dead | 5601 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
770ae6cc RK |
5602 | bit_offset = size_binop (PLUS_EXPR, bit_offset, |
5603 | DECL_FIELD_BIT_OFFSET (field)); | |
e6d8c385 | 5604 | |
a06ef755 | 5605 | /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
bbf6f052 | 5606 | } |
7156dead | 5607 | |
b4e3fabb RK |
5608 | else if (TREE_CODE (exp) == ARRAY_REF |
5609 | || TREE_CODE (exp) == ARRAY_RANGE_REF) | |
bbf6f052 | 5610 | { |
742920c7 | 5611 | tree index = TREE_OPERAND (exp, 1); |
44de5aeb RK |
5612 | tree low_bound = array_ref_low_bound (exp); |
5613 | tree unit_size = array_ref_element_size (exp); | |
742920c7 | 5614 | |
770ae6cc RK |
5615 | /* We assume all arrays have sizes that are a multiple of a byte. |
5616 | First subtract the lower bound, if any, in the type of the | |
5617 | index, then convert to sizetype and multiply by the size of the | |
5618 | array element. */ | |
44de5aeb | 5619 | if (! integer_zerop (low_bound)) |
770ae6cc RK |
5620 | index = fold (build (MINUS_EXPR, TREE_TYPE (index), |
5621 | index, low_bound)); | |
f8dac6eb | 5622 | |
770ae6cc RK |
5623 | offset = size_binop (PLUS_EXPR, offset, |
5624 | size_binop (MULT_EXPR, | |
5625 | convert (sizetype, index), | |
7156dead | 5626 | unit_size)); |
bbf6f052 | 5627 | } |
7156dead | 5628 | |
c1853da7 RK |
5629 | /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal |
5630 | conversions that don't change the mode, and all view conversions | |
5631 | except those that need to "step up" the alignment. */ | |
bbf6f052 | 5632 | else if (TREE_CODE (exp) != NON_LVALUE_EXPR |
c1853da7 RK |
5633 | && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR |
5634 | && ! ((TYPE_ALIGN (TREE_TYPE (exp)) | |
5635 | > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
5636 | && STRICT_ALIGNMENT | |
5637 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
5638 | < BIGGEST_ALIGNMENT) | |
5639 | && (TYPE_ALIGN_OK (TREE_TYPE (exp)) | |
5640 | || TYPE_ALIGN_OK (TREE_TYPE | |
5641 | (TREE_OPERAND (exp, 0)))))) | |
bbf6f052 RK |
5642 | && ! ((TREE_CODE (exp) == NOP_EXPR |
5643 | || TREE_CODE (exp) == CONVERT_EXPR) | |
5644 | && (TYPE_MODE (TREE_TYPE (exp)) | |
5645 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) | |
5646 | break; | |
7bb0943f RS |
5647 | |
5648 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
5649 | if (TREE_THIS_VOLATILE (exp)) | |
5650 | *pvolatilep = 1; | |
839c4796 | 5651 | |
bbf6f052 RK |
5652 | exp = TREE_OPERAND (exp, 0); |
5653 | } | |
5654 | ||
770ae6cc RK |
5655 | /* If OFFSET is constant, see if we can return the whole thing as a |
5656 | constant bit position. Otherwise, split it up. */ | |
5657 | if (host_integerp (offset, 0) | |
5658 | && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), | |
5659 | bitsize_unit_node)) | |
5660 | && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) | |
5661 | && host_integerp (tem, 0)) | |
5662 | *pbitpos = tree_low_cst (tem, 0), *poffset = 0; | |
5663 | else | |
5664 | *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; | |
b50d17a1 | 5665 | |
bbf6f052 | 5666 | *pmode = mode; |
bbf6f052 RK |
5667 | return exp; |
5668 | } | |
921b3427 | 5669 | |
44de5aeb RK |
5670 | /* Return a tree of sizetype representing the size, in bytes, of the element |
5671 | of EXP, an ARRAY_REF. */ | |
5672 | ||
5673 | tree | |
5674 | array_ref_element_size (tree exp) | |
5675 | { | |
5676 | tree aligned_size = TREE_OPERAND (exp, 3); | |
5677 | tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5678 | ||
5679 | /* If a size was specified in the ARRAY_REF, it's the size measured | |
5680 | in alignment units of the element type. So multiply by that value. */ | |
5681 | if (aligned_size) | |
5682 | return size_binop (MULT_EXPR, aligned_size, | |
5683 | size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT)); | |
5684 | ||
5685 | /* Otherwise, take the size from that of the element type. Substitute | |
5686 | any PLACEHOLDER_EXPR that we have. */ | |
5687 | else | |
5688 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); | |
5689 | } | |
5690 | ||
5691 | /* Return a tree representing the lower bound of the array mentioned in | |
5692 | EXP, an ARRAY_REF. */ | |
5693 | ||
5694 | tree | |
5695 | array_ref_low_bound (tree exp) | |
5696 | { | |
5697 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5698 | ||
5699 | /* If a lower bound is specified in EXP, use it. */ | |
5700 | if (TREE_OPERAND (exp, 2)) | |
5701 | return TREE_OPERAND (exp, 2); | |
5702 | ||
5703 | /* Otherwise, if there is a domain type and it has a lower bound, use it, | |
5704 | substituting for a PLACEHOLDER_EXPR as needed. */ | |
5705 | if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
5706 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); | |
5707 | ||
5708 | /* Otherwise, return a zero of the appropriate type. */ | |
5709 | return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node); | |
5710 | } | |
5711 | ||
5712 | /* Return a tree representing the offset, in bytes, of the field referenced | |
5713 | by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ | |
5714 | ||
5715 | tree | |
5716 | component_ref_field_offset (tree exp) | |
5717 | { | |
5718 | tree aligned_offset = TREE_OPERAND (exp, 2); | |
5719 | tree field = TREE_OPERAND (exp, 1); | |
5720 | ||
5721 | /* If an offset was specified in the COMPONENT_REF, it's the offset measured | |
5722 | in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that | |
5723 | value. */ | |
5724 | if (aligned_offset) | |
5725 | return size_binop (MULT_EXPR, aligned_offset, | |
5726 | size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT)); | |
5727 | ||
5728 | /* Otherwise, take the offset from that of the field. Substitute | |
5729 | any PLACEHOLDER_EXPR that we have. */ | |
5730 | else | |
5731 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); | |
5732 | } | |
5733 | ||
ed239f5a RK |
5734 | /* Return 1 if T is an expression that get_inner_reference handles. */ |
5735 | ||
5736 | int | |
502b8322 | 5737 | handled_component_p (tree t) |
ed239f5a RK |
5738 | { |
5739 | switch (TREE_CODE (t)) | |
5740 | { | |
5741 | case BIT_FIELD_REF: | |
5742 | case COMPONENT_REF: | |
5743 | case ARRAY_REF: | |
5744 | case ARRAY_RANGE_REF: | |
5745 | case NON_LVALUE_EXPR: | |
5746 | case VIEW_CONVERT_EXPR: | |
5747 | return 1; | |
5748 | ||
1a8c4ca6 EB |
5749 | /* ??? Sure they are handled, but get_inner_reference may return |
5750 | a different PBITSIZE, depending upon whether the expression is | |
5751 | wrapped up in a NOP_EXPR or not, e.g. for bitfields. */ | |
ed239f5a RK |
5752 | case NOP_EXPR: |
5753 | case CONVERT_EXPR: | |
5754 | return (TYPE_MODE (TREE_TYPE (t)) | |
5755 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0)))); | |
5756 | ||
5757 | default: | |
5758 | return 0; | |
5759 | } | |
5760 | } | |
bbf6f052 | 5761 | \f |
3fe44edd RK |
5762 | /* Given an rtx VALUE that may contain additions and multiplications, return |
5763 | an equivalent value that just refers to a register, memory, or constant. | |
5764 | This is done by generating instructions to perform the arithmetic and | |
5765 | returning a pseudo-register containing the value. | |
c45a13a6 RK |
5766 | |
5767 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
bbf6f052 RK |
5768 | |
5769 | rtx | |
502b8322 | 5770 | force_operand (rtx value, rtx target) |
bbf6f052 | 5771 | { |
8a28dbcc | 5772 | rtx op1, op2; |
bbf6f052 | 5773 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
b3694847 | 5774 | rtx subtarget = get_subtarget (target); |
8a28dbcc | 5775 | enum rtx_code code = GET_CODE (value); |
bbf6f052 | 5776 | |
50654f6c ZD |
5777 | /* Check for subreg applied to an expression produced by loop optimizer. */ |
5778 | if (code == SUBREG | |
f8cfc6aa | 5779 | && !REG_P (SUBREG_REG (value)) |
3c0cb5de | 5780 | && !MEM_P (SUBREG_REG (value))) |
50654f6c ZD |
5781 | { |
5782 | value = simplify_gen_subreg (GET_MODE (value), | |
5783 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5784 | force_operand (SUBREG_REG (value), | |
5785 | NULL_RTX)), | |
5786 | GET_MODE (SUBREG_REG (value)), | |
5787 | SUBREG_BYTE (value)); | |
5788 | code = GET_CODE (value); | |
5789 | } | |
5790 | ||
8b015896 | 5791 | /* Check for a PIC address load. */ |
8a28dbcc | 5792 | if ((code == PLUS || code == MINUS) |
8b015896 RH |
5793 | && XEXP (value, 0) == pic_offset_table_rtx |
5794 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
5795 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
5796 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
5797 | { | |
5798 | if (!subtarget) | |
5799 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
5800 | emit_move_insn (subtarget, value); | |
5801 | return subtarget; | |
5802 | } | |
5803 | ||
8a28dbcc | 5804 | if (code == ZERO_EXTEND || code == SIGN_EXTEND) |
bbf6f052 | 5805 | { |
8a28dbcc JH |
5806 | if (!target) |
5807 | target = gen_reg_rtx (GET_MODE (value)); | |
ce0f3925 | 5808 | convert_move (target, force_operand (XEXP (value, 0), NULL), |
8a28dbcc JH |
5809 | code == ZERO_EXTEND); |
5810 | return target; | |
bbf6f052 RK |
5811 | } |
5812 | ||
ec8e098d | 5813 | if (ARITHMETIC_P (value)) |
bbf6f052 RK |
5814 | { |
5815 | op2 = XEXP (value, 1); | |
f8cfc6aa | 5816 | if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) |
bbf6f052 | 5817 | subtarget = 0; |
8a28dbcc | 5818 | if (code == MINUS && GET_CODE (op2) == CONST_INT) |
bbf6f052 | 5819 | { |
8a28dbcc | 5820 | code = PLUS; |
bbf6f052 RK |
5821 | op2 = negate_rtx (GET_MODE (value), op2); |
5822 | } | |
5823 | ||
5824 | /* Check for an addition with OP2 a constant integer and our first | |
8a28dbcc JH |
5825 | operand a PLUS of a virtual register and something else. In that |
5826 | case, we want to emit the sum of the virtual register and the | |
5827 | constant first and then add the other value. This allows virtual | |
5828 | register instantiation to simply modify the constant rather than | |
5829 | creating another one around this addition. */ | |
5830 | if (code == PLUS && GET_CODE (op2) == CONST_INT | |
bbf6f052 | 5831 | && GET_CODE (XEXP (value, 0)) == PLUS |
f8cfc6aa | 5832 | && REG_P (XEXP (XEXP (value, 0), 0)) |
bbf6f052 RK |
5833 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER |
5834 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
5835 | { | |
8a28dbcc JH |
5836 | rtx temp = expand_simple_binop (GET_MODE (value), code, |
5837 | XEXP (XEXP (value, 0), 0), op2, | |
5838 | subtarget, 0, OPTAB_LIB_WIDEN); | |
5839 | return expand_simple_binop (GET_MODE (value), code, temp, | |
5840 | force_operand (XEXP (XEXP (value, | |
5841 | 0), 1), 0), | |
5842 | target, 0, OPTAB_LIB_WIDEN); | |
bbf6f052 | 5843 | } |
3a94c984 | 5844 | |
8a28dbcc JH |
5845 | op1 = force_operand (XEXP (value, 0), subtarget); |
5846 | op2 = force_operand (op2, NULL_RTX); | |
5847 | switch (code) | |
5848 | { | |
5849 | case MULT: | |
5850 | return expand_mult (GET_MODE (value), op1, op2, target, 1); | |
5851 | case DIV: | |
5852 | if (!INTEGRAL_MODE_P (GET_MODE (value))) | |
5853 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5854 | target, 1, OPTAB_LIB_WIDEN); | |
5855 | else | |
5856 | return expand_divmod (0, | |
5857 | FLOAT_MODE_P (GET_MODE (value)) | |
5858 | ? RDIV_EXPR : TRUNC_DIV_EXPR, | |
5859 | GET_MODE (value), op1, op2, target, 0); | |
5860 | break; | |
5861 | case MOD: | |
5862 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5863 | target, 0); | |
5864 | break; | |
5865 | case UDIV: | |
5866 | return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, | |
5867 | target, 1); | |
5868 | break; | |
5869 | case UMOD: | |
5870 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5871 | target, 1); | |
5872 | break; | |
5873 | case ASHIFTRT: | |
5874 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5875 | target, 0, OPTAB_LIB_WIDEN); | |
5876 | break; | |
5877 | default: | |
5878 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5879 | target, 1, OPTAB_LIB_WIDEN); | |
5880 | } | |
5881 | } | |
ec8e098d | 5882 | if (UNARY_P (value)) |
8a28dbcc JH |
5883 | { |
5884 | op1 = force_operand (XEXP (value, 0), NULL_RTX); | |
5885 | return expand_simple_unop (GET_MODE (value), code, op1, target, 0); | |
bbf6f052 | 5886 | } |
34e81b5a RK |
5887 | |
5888 | #ifdef INSN_SCHEDULING | |
5889 | /* On machines that have insn scheduling, we want all memory reference to be | |
5890 | explicit, so we need to deal with such paradoxical SUBREGs. */ | |
3c0cb5de | 5891 | if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value)) |
34e81b5a RK |
5892 | && (GET_MODE_SIZE (GET_MODE (value)) |
5893 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) | |
5894 | value | |
5895 | = simplify_gen_subreg (GET_MODE (value), | |
5896 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5897 | force_operand (SUBREG_REG (value), | |
5898 | NULL_RTX)), | |
5899 | GET_MODE (SUBREG_REG (value)), | |
5900 | SUBREG_BYTE (value)); | |
5901 | #endif | |
5902 | ||
bbf6f052 RK |
5903 | return value; |
5904 | } | |
5905 | \f | |
bbf6f052 | 5906 | /* Subroutine of expand_expr: return nonzero iff there is no way that |
e5e809f4 JL |
5907 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
5908 | call is going to be used to determine whether we need a temporary | |
ff439b5f CB |
5909 | for EXP, as opposed to a recursive call to this function. |
5910 | ||
5911 | It is always safe for this routine to return zero since it merely | |
5912 | searches for optimization opportunities. */ | |
bbf6f052 | 5913 | |
8f17b5c5 | 5914 | int |
502b8322 | 5915 | safe_from_p (rtx x, tree exp, int top_p) |
bbf6f052 RK |
5916 | { |
5917 | rtx exp_rtl = 0; | |
5918 | int i, nops; | |
5919 | ||
6676e72f RK |
5920 | if (x == 0 |
5921 | /* If EXP has varying size, we MUST use a target since we currently | |
8f6562d0 PB |
5922 | have no way of allocating temporaries of variable size |
5923 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
5924 | So we assume here that something at a higher level has prevented a | |
f4510f37 | 5925 | clash. This is somewhat bogus, but the best we can do. Only |
e5e809f4 | 5926 | do this when X is BLKmode and when we are at the top level. */ |
d0f062fb | 5927 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f4510f37 | 5928 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
8f6562d0 PB |
5929 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
5930 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
5931 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
5932 | != INTEGER_CST) | |
1da68f56 RK |
5933 | && GET_MODE (x) == BLKmode) |
5934 | /* If X is in the outgoing argument area, it is always safe. */ | |
3c0cb5de | 5935 | || (MEM_P (x) |
1da68f56 RK |
5936 | && (XEXP (x, 0) == virtual_outgoing_args_rtx |
5937 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
5938 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
bbf6f052 RK |
5939 | return 1; |
5940 | ||
5941 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
5942 | find the underlying pseudo. */ | |
5943 | if (GET_CODE (x) == SUBREG) | |
5944 | { | |
5945 | x = SUBREG_REG (x); | |
f8cfc6aa | 5946 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
bbf6f052 RK |
5947 | return 0; |
5948 | } | |
5949 | ||
1da68f56 | 5950 | /* Now look at our tree code and possibly recurse. */ |
bbf6f052 RK |
5951 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
5952 | { | |
5953 | case 'd': | |
a9772b60 | 5954 | exp_rtl = DECL_RTL_IF_SET (exp); |
bbf6f052 RK |
5955 | break; |
5956 | ||
5957 | case 'c': | |
5958 | return 1; | |
5959 | ||
5960 | case 'x': | |
5961 | if (TREE_CODE (exp) == TREE_LIST) | |
f8d4be57 CE |
5962 | { |
5963 | while (1) | |
5964 | { | |
5965 | if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) | |
5966 | return 0; | |
5967 | exp = TREE_CHAIN (exp); | |
5968 | if (!exp) | |
5969 | return 1; | |
5970 | if (TREE_CODE (exp) != TREE_LIST) | |
5971 | return safe_from_p (x, exp, 0); | |
5972 | } | |
5973 | } | |
ff439b5f CB |
5974 | else if (TREE_CODE (exp) == ERROR_MARK) |
5975 | return 1; /* An already-visited SAVE_EXPR? */ | |
bbf6f052 RK |
5976 | else |
5977 | return 0; | |
5978 | ||
350fae66 RK |
5979 | case 's': |
5980 | /* The only case we look at here is the DECL_INITIAL inside a | |
5981 | DECL_EXPR. */ | |
5982 | return (TREE_CODE (exp) != DECL_EXPR | |
5983 | || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL | |
5984 | || !DECL_INITIAL (DECL_EXPR_DECL (exp)) | |
5985 | || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0)); | |
5986 | ||
bbf6f052 RK |
5987 | case '2': |
5988 | case '<': | |
f8d4be57 CE |
5989 | if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) |
5990 | return 0; | |
5d3cc252 | 5991 | /* Fall through. */ |
f8d4be57 CE |
5992 | |
5993 | case '1': | |
5994 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); | |
bbf6f052 RK |
5995 | |
5996 | case 'e': | |
5997 | case 'r': | |
5998 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in | |
5999 | the expression. If it is set, we conflict iff we are that rtx or | |
6000 | both are in memory. Otherwise, we check all operands of the | |
6001 | expression recursively. */ | |
6002 | ||
6003 | switch (TREE_CODE (exp)) | |
6004 | { | |
6005 | case ADDR_EXPR: | |
70072ed9 RK |
6006 | /* If the operand is static or we are static, we can't conflict. |
6007 | Likewise if we don't conflict with the operand at all. */ | |
6008 | if (staticp (TREE_OPERAND (exp, 0)) | |
6009 | || TREE_STATIC (exp) | |
6010 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
6011 | return 1; | |
6012 | ||
6013 | /* Otherwise, the only way this can conflict is if we are taking | |
6014 | the address of a DECL a that address if part of X, which is | |
6015 | very rare. */ | |
6016 | exp = TREE_OPERAND (exp, 0); | |
6017 | if (DECL_P (exp)) | |
6018 | { | |
6019 | if (!DECL_RTL_SET_P (exp) | |
3c0cb5de | 6020 | || !MEM_P (DECL_RTL (exp))) |
70072ed9 RK |
6021 | return 0; |
6022 | else | |
6023 | exp_rtl = XEXP (DECL_RTL (exp), 0); | |
6024 | } | |
6025 | break; | |
bbf6f052 RK |
6026 | |
6027 | case INDIRECT_REF: | |
3c0cb5de | 6028 | if (MEM_P (x) |
1da68f56 RK |
6029 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), |
6030 | get_alias_set (exp))) | |
bbf6f052 RK |
6031 | return 0; |
6032 | break; | |
6033 | ||
6034 | case CALL_EXPR: | |
f9808f81 MM |
6035 | /* Assume that the call will clobber all hard registers and |
6036 | all of memory. */ | |
f8cfc6aa | 6037 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
3c0cb5de | 6038 | || MEM_P (x)) |
f9808f81 | 6039 | return 0; |
bbf6f052 RK |
6040 | break; |
6041 | ||
bbf6f052 | 6042 | case WITH_CLEANUP_EXPR: |
6ad7895a | 6043 | exp_rtl = WITH_CLEANUP_EXPR_RTL (exp); |
bbf6f052 RK |
6044 | break; |
6045 | ||
5dab5552 | 6046 | case CLEANUP_POINT_EXPR: |
bbf6f052 | 6047 | case SAVE_EXPR: |
82c82743 | 6048 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 | 6049 | |
8129842c RS |
6050 | case BIND_EXPR: |
6051 | /* The only operand we look at is operand 1. The rest aren't | |
6052 | part of the expression. */ | |
e5e809f4 | 6053 | return safe_from_p (x, TREE_OPERAND (exp, 1), 0); |
8129842c | 6054 | |
e9a25f70 JL |
6055 | default: |
6056 | break; | |
bbf6f052 RK |
6057 | } |
6058 | ||
6059 | /* If we have an rtx, we do not need to scan our operands. */ | |
6060 | if (exp_rtl) | |
6061 | break; | |
6062 | ||
8f17b5c5 | 6063 | nops = first_rtl_op (TREE_CODE (exp)); |
bbf6f052 RK |
6064 | for (i = 0; i < nops; i++) |
6065 | if (TREE_OPERAND (exp, i) != 0 | |
e5e809f4 | 6066 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
bbf6f052 | 6067 | return 0; |
8f17b5c5 MM |
6068 | |
6069 | /* If this is a language-specific tree code, it may require | |
6070 | special handling. */ | |
dbbbbf3b JDA |
6071 | if ((unsigned int) TREE_CODE (exp) |
6072 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE | |
ae2bcd98 | 6073 | && !lang_hooks.safe_from_p (x, exp)) |
8f17b5c5 | 6074 | return 0; |
bbf6f052 RK |
6075 | } |
6076 | ||
6077 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
6078 | with it. */ | |
6079 | if (exp_rtl) | |
6080 | { | |
6081 | if (GET_CODE (exp_rtl) == SUBREG) | |
6082 | { | |
6083 | exp_rtl = SUBREG_REG (exp_rtl); | |
f8cfc6aa | 6084 | if (REG_P (exp_rtl) |
bbf6f052 RK |
6085 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) |
6086 | return 0; | |
6087 | } | |
6088 | ||
6089 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
1da68f56 | 6090 | are memory and they conflict. */ |
bbf6f052 | 6091 | return ! (rtx_equal_p (x, exp_rtl) |
3c0cb5de | 6092 | || (MEM_P (x) && MEM_P (exp_rtl) |
21117a17 | 6093 | && true_dependence (exp_rtl, VOIDmode, x, |
1da68f56 | 6094 | rtx_addr_varies_p))); |
bbf6f052 RK |
6095 | } |
6096 | ||
6097 | /* If we reach here, it is safe. */ | |
6098 | return 1; | |
6099 | } | |
6100 | ||
01c8a7c8 RK |
6101 | /* Subroutine of expand_expr: return rtx if EXP is a |
6102 | variable or parameter; else return 0. */ | |
6103 | ||
6104 | static rtx | |
502b8322 | 6105 | var_rtx (tree exp) |
01c8a7c8 RK |
6106 | { |
6107 | STRIP_NOPS (exp); | |
6108 | switch (TREE_CODE (exp)) | |
6109 | { | |
6110 | case PARM_DECL: | |
6111 | case VAR_DECL: | |
6112 | return DECL_RTL (exp); | |
6113 | default: | |
6114 | return 0; | |
6115 | } | |
6116 | } | |
14a774a9 | 6117 | \f |
0d4903b8 RK |
6118 | /* Return the highest power of two that EXP is known to be a multiple of. |
6119 | This is used in updating alignment of MEMs in array references. */ | |
6120 | ||
9ceca302 | 6121 | static unsigned HOST_WIDE_INT |
502b8322 | 6122 | highest_pow2_factor (tree exp) |
0d4903b8 | 6123 | { |
9ceca302 | 6124 | unsigned HOST_WIDE_INT c0, c1; |
0d4903b8 RK |
6125 | |
6126 | switch (TREE_CODE (exp)) | |
6127 | { | |
6128 | case INTEGER_CST: | |
e0f1be5c JJ |
6129 | /* We can find the lowest bit that's a one. If the low |
6130 | HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. | |
6131 | We need to handle this case since we can find it in a COND_EXPR, | |
a98ebe2e | 6132 | a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an |
e0f1be5c | 6133 | erroneous program, so return BIGGEST_ALIGNMENT to avoid any |
3a531a8b | 6134 | later ICE. */ |
e0f1be5c | 6135 | if (TREE_CONSTANT_OVERFLOW (exp)) |
1ed1b4fb | 6136 | return BIGGEST_ALIGNMENT; |
e0f1be5c | 6137 | else |
0d4903b8 | 6138 | { |
e0f1be5c JJ |
6139 | /* Note: tree_low_cst is intentionally not used here, |
6140 | we don't care about the upper bits. */ | |
6141 | c0 = TREE_INT_CST_LOW (exp); | |
6142 | c0 &= -c0; | |
6143 | return c0 ? c0 : BIGGEST_ALIGNMENT; | |
0d4903b8 RK |
6144 | } |
6145 | break; | |
6146 | ||
65a07688 | 6147 | case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: |
0d4903b8 RK |
6148 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6149 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6150 | return MIN (c0, c1); | |
6151 | ||
6152 | case MULT_EXPR: | |
6153 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6154 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6155 | return c0 * c1; | |
6156 | ||
6157 | case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: | |
6158 | case CEIL_DIV_EXPR: | |
65a07688 RK |
6159 | if (integer_pow2p (TREE_OPERAND (exp, 1)) |
6160 | && host_integerp (TREE_OPERAND (exp, 1), 1)) | |
6161 | { | |
6162 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6163 | c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); | |
6164 | return MAX (1, c0 / c1); | |
6165 | } | |
6166 | break; | |
0d4903b8 RK |
6167 | |
6168 | case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: | |
6fce44af | 6169 | case SAVE_EXPR: |
0d4903b8 RK |
6170 | return highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6171 | ||
65a07688 RK |
6172 | case COMPOUND_EXPR: |
6173 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6174 | ||
0d4903b8 RK |
6175 | case COND_EXPR: |
6176 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6177 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); | |
6178 | return MIN (c0, c1); | |
6179 | ||
6180 | default: | |
6181 | break; | |
6182 | } | |
6183 | ||
6184 | return 1; | |
6185 | } | |
818c0c94 | 6186 | |
d50a16c4 EB |
6187 | /* Similar, except that the alignment requirements of TARGET are |
6188 | taken into account. Assume it is at least as aligned as its | |
6189 | type, unless it is a COMPONENT_REF in which case the layout of | |
6190 | the structure gives the alignment. */ | |
818c0c94 | 6191 | |
9ceca302 | 6192 | static unsigned HOST_WIDE_INT |
d50a16c4 | 6193 | highest_pow2_factor_for_target (tree target, tree exp) |
818c0c94 | 6194 | { |
d50a16c4 | 6195 | unsigned HOST_WIDE_INT target_align, factor; |
818c0c94 RH |
6196 | |
6197 | factor = highest_pow2_factor (exp); | |
d50a16c4 EB |
6198 | if (TREE_CODE (target) == COMPONENT_REF) |
6199 | target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT; | |
6200 | else | |
6201 | target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT; | |
6202 | return MAX (factor, target_align); | |
818c0c94 | 6203 | } |
0d4903b8 | 6204 | \f |
6de9cd9a DN |
6205 | /* Expands variable VAR. */ |
6206 | ||
6207 | void | |
6208 | expand_var (tree var) | |
6209 | { | |
6210 | if (DECL_EXTERNAL (var)) | |
6211 | return; | |
6212 | ||
6213 | if (TREE_STATIC (var)) | |
6214 | /* If this is an inlined copy of a static local variable, | |
6215 | look up the original decl. */ | |
6216 | var = DECL_ORIGIN (var); | |
6217 | ||
6218 | if (TREE_STATIC (var) | |
6219 | ? !TREE_ASM_WRITTEN (var) | |
6220 | : !DECL_RTL_SET_P (var)) | |
6221 | { | |
6222 | if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var)) | |
6223 | { | |
6224 | /* Prepare a mem & address for the decl. */ | |
6225 | rtx x; | |
6226 | ||
6227 | if (TREE_STATIC (var)) | |
6228 | abort (); | |
6229 | ||
6230 | x = gen_rtx_MEM (DECL_MODE (var), | |
6231 | gen_reg_rtx (Pmode)); | |
6232 | ||
6233 | set_mem_attributes (x, var, 1); | |
6234 | SET_DECL_RTL (var, x); | |
6235 | } | |
673fda6b | 6236 | else if (lang_hooks.expand_decl (var)) |
6de9cd9a DN |
6237 | /* OK. */; |
6238 | else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var)) | |
6239 | expand_decl (var); | |
6240 | else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) | |
6241 | rest_of_decl_compilation (var, NULL, 0, 0); | |
6242 | else if (TREE_CODE (var) == TYPE_DECL | |
6243 | || TREE_CODE (var) == CONST_DECL | |
6244 | || TREE_CODE (var) == FUNCTION_DECL | |
6245 | || TREE_CODE (var) == LABEL_DECL) | |
6246 | /* No expansion needed. */; | |
6247 | else | |
6248 | abort (); | |
6249 | } | |
6250 | } | |
6251 | ||
6252 | /* Expands declarations of variables in list VARS. */ | |
6253 | ||
6254 | static void | |
6255 | expand_vars (tree vars) | |
6256 | { | |
6257 | for (; vars; vars = TREE_CHAIN (vars)) | |
6258 | { | |
6259 | tree var = vars; | |
6260 | ||
6261 | if (DECL_EXTERNAL (var)) | |
6262 | continue; | |
6263 | ||
6264 | expand_var (var); | |
6265 | expand_decl_init (var); | |
6266 | } | |
6267 | } | |
6268 | ||
eb698c58 RS |
6269 | /* Subroutine of expand_expr. Expand the two operands of a binary |
6270 | expression EXP0 and EXP1 placing the results in OP0 and OP1. | |
6271 | The value may be stored in TARGET if TARGET is nonzero. The | |
6272 | MODIFIER argument is as documented by expand_expr. */ | |
6273 | ||
6274 | static void | |
6275 | expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, | |
6276 | enum expand_modifier modifier) | |
6277 | { | |
6278 | if (! safe_from_p (target, exp1, 1)) | |
6279 | target = 0; | |
6280 | if (operand_equal_p (exp0, exp1, 0)) | |
6281 | { | |
6282 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); | |
6283 | *op1 = copy_rtx (*op0); | |
6284 | } | |
6285 | else | |
6286 | { | |
c67e6e14 RS |
6287 | /* If we need to preserve evaluation order, copy exp0 into its own |
6288 | temporary variable so that it can't be clobbered by exp1. */ | |
6289 | if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1)) | |
6290 | exp0 = save_expr (exp0); | |
eb698c58 RS |
6291 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); |
6292 | *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier); | |
6293 | } | |
6294 | } | |
6295 | ||
f47e9b4e | 6296 | \f |
bbf6f052 RK |
6297 | /* expand_expr: generate code for computing expression EXP. |
6298 | An rtx for the computed value is returned. The value is never null. | |
6299 | In the case of a void EXP, const0_rtx is returned. | |
6300 | ||
6301 | The value may be stored in TARGET if TARGET is nonzero. | |
6302 | TARGET is just a suggestion; callers must assume that | |
6303 | the rtx returned may not be the same as TARGET. | |
6304 | ||
6305 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
6306 | ||
6307 | If TMODE is not VOIDmode, it suggests generating the | |
6308 | result in mode TMODE. But this is done only when convenient. | |
6309 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
6310 | TMODE is just a suggestion; callers must assume that | |
6311 | the rtx returned may not have mode TMODE. | |
6312 | ||
d6a5ac33 RK |
6313 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
6314 | probably will not be used. | |
bbf6f052 RK |
6315 | |
6316 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
6317 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
6318 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
6319 | products as above, or REG or MEM, or constant. | |
6320 | Ordinarily in such cases we would output mul or add instructions | |
6321 | and then return a pseudo reg containing the sum. | |
6322 | ||
6323 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
6324 | it also marks a label as absolutely required (it can't be dead). | |
26fcb35a | 6325 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d6a5ac33 RK |
6326 | This is used for outputting expressions used in initializers. |
6327 | ||
6328 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
6329 | with a constant address even if that address is not normally legitimate. | |
8403445a AM |
6330 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. |
6331 | ||
6332 | EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for | |
6333 | a call parameter. Such targets require special care as we haven't yet | |
6334 | marked TARGET so that it's safe from being trashed by libcalls. We | |
6335 | don't want to use TARGET for anything but the final result; | |
6336 | Intermediate values must go elsewhere. Additionally, calls to | |
0fab64a3 MM |
6337 | emit_block_move will be flagged with BLOCK_OP_CALL_PARM. |
6338 | ||
6339 | If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid | |
6340 | address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the | |
6341 | DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a | |
6342 | COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on | |
6343 | recursively. */ | |
bbf6f052 | 6344 | |
6de9cd9a DN |
6345 | static rtx expand_expr_real_1 (tree, rtx, enum machine_mode, |
6346 | enum expand_modifier, rtx *); | |
6347 | ||
bbf6f052 | 6348 | rtx |
0fab64a3 MM |
6349 | expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
6350 | enum expand_modifier modifier, rtx *alt_rtl) | |
6de9cd9a DN |
6351 | { |
6352 | int rn = -1; | |
6353 | rtx ret, last = NULL; | |
6354 | ||
6355 | /* Handle ERROR_MARK before anybody tries to access its type. */ | |
6356 | if (TREE_CODE (exp) == ERROR_MARK | |
6357 | || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK) | |
6358 | { | |
6359 | ret = CONST0_RTX (tmode); | |
6360 | return ret ? ret : const0_rtx; | |
6361 | } | |
6362 | ||
6363 | if (flag_non_call_exceptions) | |
6364 | { | |
6365 | rn = lookup_stmt_eh_region (exp); | |
6366 | /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ | |
6367 | if (rn >= 0) | |
6368 | last = get_last_insn (); | |
6369 | } | |
6370 | ||
6371 | /* If this is an expression of some kind and it has an associated line | |
6372 | number, then emit the line number before expanding the expression. | |
6373 | ||
6374 | We need to save and restore the file and line information so that | |
6375 | errors discovered during expansion are emitted with the right | |
6376 | information. It would be better of the diagnostic routines | |
6377 | used the file/line information embedded in the tree nodes rather | |
6378 | than globals. */ | |
6379 | if (cfun && EXPR_HAS_LOCATION (exp)) | |
6380 | { | |
6381 | location_t saved_location = input_location; | |
6382 | input_location = EXPR_LOCATION (exp); | |
6383 | emit_line_note (input_location); | |
6384 | ||
6385 | /* Record where the insns produced belong. */ | |
1ea463a2 | 6386 | record_block_change (TREE_BLOCK (exp)); |
6de9cd9a DN |
6387 | |
6388 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
6389 | ||
6390 | input_location = saved_location; | |
6391 | } | |
6392 | else | |
6393 | { | |
6394 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
6395 | } | |
6396 | ||
6397 | /* If using non-call exceptions, mark all insns that may trap. | |
6398 | expand_call() will mark CALL_INSNs before we get to this code, | |
6399 | but it doesn't handle libcalls, and these may trap. */ | |
6400 | if (rn >= 0) | |
6401 | { | |
6402 | rtx insn; | |
6403 | for (insn = next_real_insn (last); insn; | |
6404 | insn = next_real_insn (insn)) | |
6405 | { | |
6406 | if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
6407 | /* If we want exceptions for non-call insns, any | |
6408 | may_trap_p instruction may throw. */ | |
6409 | && GET_CODE (PATTERN (insn)) != CLOBBER | |
6410 | && GET_CODE (PATTERN (insn)) != USE | |
6411 | && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn)))) | |
6412 | { | |
6413 | REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn), | |
6414 | REG_NOTES (insn)); | |
6415 | } | |
6416 | } | |
6417 | } | |
6418 | ||
6419 | return ret; | |
6420 | } | |
6421 | ||
6422 | static rtx | |
6423 | expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, | |
6424 | enum expand_modifier modifier, rtx *alt_rtl) | |
bbf6f052 | 6425 | { |
b3694847 | 6426 | rtx op0, op1, temp; |
bbf6f052 | 6427 | tree type = TREE_TYPE (exp); |
8df83eae | 6428 | int unsignedp; |
b3694847 SS |
6429 | enum machine_mode mode; |
6430 | enum tree_code code = TREE_CODE (exp); | |
bbf6f052 | 6431 | optab this_optab; |
68557e14 ML |
6432 | rtx subtarget, original_target; |
6433 | int ignore; | |
bbf6f052 RK |
6434 | tree context; |
6435 | ||
68557e14 | 6436 | mode = TYPE_MODE (type); |
8df83eae RK |
6437 | unsignedp = TYPE_UNSIGNED (type); |
6438 | ||
68557e14 | 6439 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
296b4ed9 | 6440 | subtarget = get_subtarget (target); |
68557e14 ML |
6441 | original_target = target; |
6442 | ignore = (target == const0_rtx | |
6443 | || ((code == NON_LVALUE_EXPR || code == NOP_EXPR | |
6444 | || code == CONVERT_EXPR || code == REFERENCE_EXPR | |
ac79cd5a | 6445 | || code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
68557e14 ML |
6446 | && TREE_CODE (type) == VOID_TYPE)); |
6447 | ||
dd27116b RK |
6448 | /* If we are going to ignore this result, we need only do something |
6449 | if there is a side-effect somewhere in the expression. If there | |
b50d17a1 RK |
6450 | is, short-circuit the most common cases here. Note that we must |
6451 | not call expand_expr with anything but const0_rtx in case this | |
6452 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
bbf6f052 | 6453 | |
dd27116b RK |
6454 | if (ignore) |
6455 | { | |
6456 | if (! TREE_SIDE_EFFECTS (exp)) | |
6457 | return const0_rtx; | |
6458 | ||
14a774a9 RK |
6459 | /* Ensure we reference a volatile object even if value is ignored, but |
6460 | don't do this if all we are doing is taking its address. */ | |
dd27116b RK |
6461 | if (TREE_THIS_VOLATILE (exp) |
6462 | && TREE_CODE (exp) != FUNCTION_DECL | |
14a774a9 RK |
6463 | && mode != VOIDmode && mode != BLKmode |
6464 | && modifier != EXPAND_CONST_ADDRESS) | |
dd27116b | 6465 | { |
37a08a29 | 6466 | temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); |
3c0cb5de | 6467 | if (MEM_P (temp)) |
dd27116b RK |
6468 | temp = copy_to_reg (temp); |
6469 | return const0_rtx; | |
6470 | } | |
6471 | ||
14a774a9 RK |
6472 | if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF |
6473 | || code == INDIRECT_REF || code == BUFFER_REF) | |
37a08a29 RK |
6474 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6475 | modifier); | |
6476 | ||
14a774a9 | 6477 | else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' |
b4e3fabb | 6478 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) |
dd27116b | 6479 | { |
37a08a29 RK |
6480 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6481 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
dd27116b RK |
6482 | return const0_rtx; |
6483 | } | |
6484 | else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) | |
6485 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
6486 | /* If the second operand has no side effects, just evaluate | |
0f41302f | 6487 | the first. */ |
37a08a29 RK |
6488 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6489 | modifier); | |
14a774a9 RK |
6490 | else if (code == BIT_FIELD_REF) |
6491 | { | |
37a08a29 RK |
6492 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6493 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
6494 | expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); | |
14a774a9 RK |
6495 | return const0_rtx; |
6496 | } | |
37a08a29 | 6497 | |
90764a87 | 6498 | target = 0; |
dd27116b | 6499 | } |
bbf6f052 | 6500 | |
e44842fe RK |
6501 | /* If will do cse, generate all results into pseudo registers |
6502 | since 1) that allows cse to find more things | |
6503 | and 2) otherwise cse could produce an insn the machine | |
4977bab6 ZW |
6504 | cannot support. An exception is a CONSTRUCTOR into a multi-word |
6505 | MEM: that's much more likely to be most efficient into the MEM. | |
6506 | Another is a CALL_EXPR which must return in memory. */ | |
e44842fe | 6507 | |
bbf6f052 | 6508 | if (! cse_not_expected && mode != BLKmode && target |
f8cfc6aa | 6509 | && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER) |
4977bab6 | 6510 | && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
61f71b34 | 6511 | && ! (code == CALL_EXPR && aggregate_value_p (exp, exp))) |
8403445a | 6512 | target = 0; |
bbf6f052 | 6513 | |
bbf6f052 RK |
6514 | switch (code) |
6515 | { | |
6516 | case LABEL_DECL: | |
b552441b RS |
6517 | { |
6518 | tree function = decl_function_context (exp); | |
c5c76735 | 6519 | |
6de9cd9a DN |
6520 | temp = label_rtx (exp); |
6521 | temp = gen_rtx_LABEL_REF (Pmode, temp); | |
6522 | ||
d0977240 | 6523 | if (function != current_function_decl |
6de9cd9a DN |
6524 | && function != 0) |
6525 | LABEL_REF_NONLOCAL_P (temp) = 1; | |
6526 | ||
6527 | temp = gen_rtx_MEM (FUNCTION_MODE, temp); | |
26fcb35a | 6528 | return temp; |
b552441b | 6529 | } |
bbf6f052 RK |
6530 | |
6531 | case PARM_DECL: | |
1877be45 | 6532 | if (!DECL_RTL_SET_P (exp)) |
bbf6f052 | 6533 | { |
ddd2d57e | 6534 | error ("%Jprior parameter's size depends on '%D'", exp, exp); |
4af3895e | 6535 | return CONST0_RTX (mode); |
bbf6f052 RK |
6536 | } |
6537 | ||
0f41302f | 6538 | /* ... fall through ... */ |
d6a5ac33 | 6539 | |
bbf6f052 | 6540 | case VAR_DECL: |
2dca20cd RS |
6541 | /* If a static var's type was incomplete when the decl was written, |
6542 | but the type is complete now, lay out the decl now. */ | |
ca06cfe6 RH |
6543 | if (DECL_SIZE (exp) == 0 |
6544 | && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp)) | |
2dca20cd | 6545 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) |
a46666a9 | 6546 | layout_decl (exp, 0); |
921b3427 | 6547 | |
0f41302f | 6548 | /* ... fall through ... */ |
d6a5ac33 | 6549 | |
2dca20cd | 6550 | case FUNCTION_DECL: |
bbf6f052 RK |
6551 | case RESULT_DECL: |
6552 | if (DECL_RTL (exp) == 0) | |
6553 | abort (); | |
d6a5ac33 | 6554 | |
e44842fe RK |
6555 | /* Ensure variable marked as used even if it doesn't go through |
6556 | a parser. If it hasn't be used yet, write out an external | |
6557 | definition. */ | |
6558 | if (! TREE_USED (exp)) | |
6559 | { | |
6560 | assemble_external (exp); | |
6561 | TREE_USED (exp) = 1; | |
6562 | } | |
6563 | ||
dc6d66b3 RK |
6564 | /* Show we haven't gotten RTL for this yet. */ |
6565 | temp = 0; | |
6566 | ||
bbf6f052 RK |
6567 | /* Handle variables inherited from containing functions. */ |
6568 | context = decl_function_context (exp); | |
6569 | ||
bbf6f052 | 6570 | if (context != 0 && context != current_function_decl |
bbf6f052 | 6571 | /* If var is static, we don't need a static chain to access it. */ |
3c0cb5de | 6572 | && ! (MEM_P (DECL_RTL (exp)) |
bbf6f052 RK |
6573 | && CONSTANT_P (XEXP (DECL_RTL (exp), 0)))) |
6574 | { | |
6575 | rtx addr; | |
6576 | ||
6577 | /* Mark as non-local and addressable. */ | |
81feeecb | 6578 | DECL_NONLOCAL (exp) = 1; |
38ee6ed9 JM |
6579 | if (DECL_NO_STATIC_CHAIN (current_function_decl)) |
6580 | abort (); | |
ae2bcd98 | 6581 | lang_hooks.mark_addressable (exp); |
3c0cb5de | 6582 | if (!MEM_P (DECL_RTL (exp))) |
bbf6f052 RK |
6583 | abort (); |
6584 | addr = XEXP (DECL_RTL (exp), 0); | |
3c0cb5de | 6585 | if (MEM_P (addr)) |
792760b9 RK |
6586 | addr |
6587 | = replace_equiv_address (addr, | |
6588 | fix_lexical_addr (XEXP (addr, 0), exp)); | |
bbf6f052 RK |
6589 | else |
6590 | addr = fix_lexical_addr (addr, exp); | |
3bdf5ad1 | 6591 | |
792760b9 | 6592 | temp = replace_equiv_address (DECL_RTL (exp), addr); |
bbf6f052 | 6593 | } |
4af3895e | 6594 | |
bbf6f052 RK |
6595 | /* This is the case of an array whose size is to be determined |
6596 | from its initializer, while the initializer is still being parsed. | |
6597 | See expand_decl. */ | |
d6a5ac33 | 6598 | |
3c0cb5de | 6599 | else if (MEM_P (DECL_RTL (exp)) |
f8cfc6aa | 6600 | && REG_P (XEXP (DECL_RTL (exp), 0))) |
792760b9 | 6601 | temp = validize_mem (DECL_RTL (exp)); |
d6a5ac33 RK |
6602 | |
6603 | /* If DECL_RTL is memory, we are in the normal case and either | |
6604 | the address is not valid or it is not a register and -fforce-addr | |
6605 | is specified, get the address into a register. */ | |
6606 | ||
3c0cb5de | 6607 | else if (MEM_P (DECL_RTL (exp)) |
dc6d66b3 RK |
6608 | && modifier != EXPAND_CONST_ADDRESS |
6609 | && modifier != EXPAND_SUM | |
6610 | && modifier != EXPAND_INITIALIZER | |
6611 | && (! memory_address_p (DECL_MODE (exp), | |
6612 | XEXP (DECL_RTL (exp), 0)) | |
6613 | || (flag_force_addr | |
f8cfc6aa | 6614 | && !REG_P (XEXP (DECL_RTL (exp), 0))))) |
0fab64a3 MM |
6615 | { |
6616 | if (alt_rtl) | |
6617 | *alt_rtl = DECL_RTL (exp); | |
6618 | temp = replace_equiv_address (DECL_RTL (exp), | |
6619 | copy_rtx (XEXP (DECL_RTL (exp), 0))); | |
6620 | } | |
1499e0a8 | 6621 | |
dc6d66b3 | 6622 | /* If we got something, return it. But first, set the alignment |
04956a1a | 6623 | if the address is a register. */ |
dc6d66b3 RK |
6624 | if (temp != 0) |
6625 | { | |
3c0cb5de | 6626 | if (MEM_P (temp) && REG_P (XEXP (temp, 0))) |
bdb429a5 | 6627 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); |
dc6d66b3 RK |
6628 | |
6629 | return temp; | |
6630 | } | |
6631 | ||
1499e0a8 RK |
6632 | /* If the mode of DECL_RTL does not match that of the decl, it |
6633 | must be a promoted value. We return a SUBREG of the wanted mode, | |
6634 | but mark it so that we know that it was already extended. */ | |
6635 | ||
f8cfc6aa | 6636 | if (REG_P (DECL_RTL (exp)) |
7254c5fa | 6637 | && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) |
1499e0a8 | 6638 | { |
1499e0a8 RK |
6639 | /* Get the signedness used for this variable. Ensure we get the |
6640 | same mode we got when the variable was declared. */ | |
78911e8b | 6641 | if (GET_MODE (DECL_RTL (exp)) |
0fb7aeda | 6642 | != promote_mode (type, DECL_MODE (exp), &unsignedp, |
e8dcd824 | 6643 | (TREE_CODE (exp) == RESULT_DECL ? 1 : 0))) |
1499e0a8 RK |
6644 | abort (); |
6645 | ||
ddef6bc7 | 6646 | temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); |
1499e0a8 | 6647 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
7879b81e | 6648 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
1499e0a8 RK |
6649 | return temp; |
6650 | } | |
6651 | ||
bbf6f052 RK |
6652 | return DECL_RTL (exp); |
6653 | ||
6654 | case INTEGER_CST: | |
d8a50944 | 6655 | temp = immed_double_const (TREE_INT_CST_LOW (exp), |
05bccae2 | 6656 | TREE_INT_CST_HIGH (exp), mode); |
bbf6f052 | 6657 | |
d8a50944 RH |
6658 | /* ??? If overflow is set, fold will have done an incomplete job, |
6659 | which can result in (plus xx (const_int 0)), which can get | |
6660 | simplified by validate_replace_rtx during virtual register | |
6661 | instantiation, which can result in unrecognizable insns. | |
6662 | Avoid this by forcing all overflows into registers. */ | |
c2e9dc85 RH |
6663 | if (TREE_CONSTANT_OVERFLOW (exp) |
6664 | && modifier != EXPAND_INITIALIZER) | |
d8a50944 RH |
6665 | temp = force_reg (mode, temp); |
6666 | ||
6667 | return temp; | |
6668 | ||
d744e06e AH |
6669 | case VECTOR_CST: |
6670 | return const_vector_from_tree (exp); | |
6671 | ||
bbf6f052 | 6672 | case CONST_DECL: |
8403445a | 6673 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier); |
bbf6f052 RK |
6674 | |
6675 | case REAL_CST: | |
6676 | /* If optimized, generate immediate CONST_DOUBLE | |
3a94c984 KH |
6677 | which will be turned into memory by reload if necessary. |
6678 | ||
bbf6f052 RK |
6679 | We used to force a register so that loop.c could see it. But |
6680 | this does not allow gen_* patterns to perform optimizations with | |
6681 | the constants. It also produces two insns in cases like "x = 1.0;". | |
6682 | On most machines, floating-point constants are not permitted in | |
6683 | many insns, so we'd end up copying it to a register in any case. | |
6684 | ||
6685 | Now, we do the copying in expand_binop, if appropriate. */ | |
5692c7bc ZW |
6686 | return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), |
6687 | TYPE_MODE (TREE_TYPE (exp))); | |
bbf6f052 RK |
6688 | |
6689 | case COMPLEX_CST: | |
9ad58e09 RS |
6690 | /* Handle evaluating a complex constant in a CONCAT target. */ |
6691 | if (original_target && GET_CODE (original_target) == CONCAT) | |
6692 | { | |
6693 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6694 | rtx rtarg, itarg; | |
6695 | ||
6696 | rtarg = XEXP (original_target, 0); | |
6697 | itarg = XEXP (original_target, 1); | |
6698 | ||
6699 | /* Move the real and imaginary parts separately. */ | |
6700 | op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0); | |
6701 | op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0); | |
6702 | ||
6703 | if (op0 != rtarg) | |
6704 | emit_move_insn (rtarg, op0); | |
6705 | if (op1 != itarg) | |
6706 | emit_move_insn (itarg, op1); | |
6707 | ||
6708 | return original_target; | |
6709 | } | |
6710 | ||
71c0e7fc | 6711 | /* ... fall through ... */ |
9ad58e09 | 6712 | |
bbf6f052 | 6713 | case STRING_CST: |
afc6aaab | 6714 | temp = output_constant_def (exp, 1); |
bbf6f052 | 6715 | |
afc6aaab | 6716 | /* temp contains a constant address. |
bbf6f052 RK |
6717 | On RISC machines where a constant address isn't valid, |
6718 | make some insns to get that address into a register. */ | |
afc6aaab | 6719 | if (modifier != EXPAND_CONST_ADDRESS |
bbf6f052 RK |
6720 | && modifier != EXPAND_INITIALIZER |
6721 | && modifier != EXPAND_SUM | |
afc6aaab ZW |
6722 | && (! memory_address_p (mode, XEXP (temp, 0)) |
6723 | || flag_force_addr)) | |
6724 | return replace_equiv_address (temp, | |
6725 | copy_rtx (XEXP (temp, 0))); | |
6726 | return temp; | |
bbf6f052 RK |
6727 | |
6728 | case SAVE_EXPR: | |
82c82743 RH |
6729 | { |
6730 | tree val = TREE_OPERAND (exp, 0); | |
6731 | rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); | |
e5e809f4 | 6732 | |
82c82743 RH |
6733 | if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val)) |
6734 | { | |
6735 | /* We can indeed still hit this case, typically via builtin | |
6736 | expanders calling save_expr immediately before expanding | |
6737 | something. Assume this means that we only have to deal | |
6738 | with non-BLKmode values. */ | |
6739 | if (GET_MODE (ret) == BLKmode) | |
6740 | abort (); | |
1499e0a8 | 6741 | |
82c82743 RH |
6742 | val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); |
6743 | DECL_ARTIFICIAL (val) = 1; | |
6744 | TREE_OPERAND (exp, 0) = val; | |
1499e0a8 | 6745 | |
82c82743 RH |
6746 | if (!CONSTANT_P (ret)) |
6747 | ret = copy_to_reg (ret); | |
6748 | SET_DECL_RTL (val, ret); | |
6749 | } | |
1499e0a8 | 6750 | |
82c82743 RH |
6751 | return ret; |
6752 | } | |
bbf6f052 | 6753 | |
679163cf MS |
6754 | case UNSAVE_EXPR: |
6755 | { | |
6756 | rtx temp; | |
6757 | temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); | |
24965e7a | 6758 | TREE_OPERAND (exp, 0) |
ae2bcd98 | 6759 | = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0)); |
679163cf MS |
6760 | return temp; |
6761 | } | |
6762 | ||
70e6ca43 APB |
6763 | case GOTO_EXPR: |
6764 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) | |
6765 | expand_goto (TREE_OPERAND (exp, 0)); | |
6766 | else | |
6767 | expand_computed_goto (TREE_OPERAND (exp, 0)); | |
6768 | return const0_rtx; | |
6769 | ||
6af8eb57 SB |
6770 | /* These are lowered during gimplification, so we should never ever |
6771 | see them here. */ | |
6772 | case LOOP_EXPR: | |
bbf6f052 | 6773 | case EXIT_EXPR: |
6af8eb57 | 6774 | abort (); |
bbf6f052 | 6775 | |
f42e28dd APB |
6776 | case LABELED_BLOCK_EXPR: |
6777 | if (LABELED_BLOCK_BODY (exp)) | |
4dfa0342 | 6778 | expand_expr_stmt (LABELED_BLOCK_BODY (exp)); |
30f7a378 | 6779 | /* Should perhaps use expand_label, but this is simpler and safer. */ |
0a5fee32 | 6780 | do_pending_stack_adjust (); |
f42e28dd APB |
6781 | emit_label (label_rtx (LABELED_BLOCK_LABEL (exp))); |
6782 | return const0_rtx; | |
6783 | ||
6784 | case EXIT_BLOCK_EXPR: | |
6785 | if (EXIT_BLOCK_RETURN (exp)) | |
ab87f8c8 | 6786 | sorry ("returned value in block_exit_expr"); |
f42e28dd APB |
6787 | expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp))); |
6788 | return const0_rtx; | |
6789 | ||
bbf6f052 RK |
6790 | case BIND_EXPR: |
6791 | { | |
6de9cd9a DN |
6792 | tree block = BIND_EXPR_BLOCK (exp); |
6793 | int mark_ends; | |
bbf6f052 | 6794 | |
4dfa0342 RH |
6795 | /* If we're in functions-as-trees mode, this BIND_EXPR represents |
6796 | the block, so we need to emit NOTE_INSN_BLOCK_* notes. */ | |
6797 | mark_ends = (block != NULL_TREE); | |
6798 | expand_start_bindings_and_block (mark_ends ? 0 : 2, block); | |
bbf6f052 | 6799 | |
6de9cd9a DN |
6800 | /* If VARS have not yet been expanded, expand them now. */ |
6801 | expand_vars (BIND_EXPR_VARS (exp)); | |
6802 | ||
6803 | /* TARGET was clobbered early in this function. The correct | |
6804 | indicator or whether or not we need the value of this | |
6805 | expression is the IGNORE variable. */ | |
6806 | temp = expand_expr (BIND_EXPR_BODY (exp), | |
6807 | ignore ? const0_rtx : target, | |
6808 | tmode, modifier); | |
bbf6f052 | 6809 | |
6de9cd9a | 6810 | expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0); |
bbf6f052 RK |
6811 | |
6812 | return temp; | |
6813 | } | |
6814 | ||
bbf6f052 | 6815 | case CONSTRUCTOR: |
dd27116b RK |
6816 | /* If we don't need the result, just ensure we evaluate any |
6817 | subexpressions. */ | |
6818 | if (ignore) | |
6819 | { | |
6820 | tree elt; | |
37a08a29 | 6821 | |
dd27116b | 6822 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
37a08a29 RK |
6823 | expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); |
6824 | ||
dd27116b RK |
6825 | return const0_rtx; |
6826 | } | |
3207b172 | 6827 | |
4af3895e JVA |
6828 | /* All elts simple constants => refer to a constant in memory. But |
6829 | if this is a non-BLKmode mode, let it store a field at a time | |
6830 | since that should make a CONST_INT or CONST_DOUBLE when we | |
3207b172 | 6831 | fold. Likewise, if we have a target we can use, it is best to |
d720b9d1 RK |
6832 | store directly into the target unless the type is large enough |
6833 | that memcpy will be used. If we are making an initializer and | |
00182e1e AH |
6834 | all operands are constant, put it in memory as well. |
6835 | ||
6836 | FIXME: Avoid trying to fill vector constructors piece-meal. | |
6837 | Output them with output_constant_def below unless we're sure | |
6838 | they're zeros. This should go away when vector initializers | |
6839 | are treated like VECTOR_CST instead of arrays. | |
6840 | */ | |
dd27116b | 6841 | else if ((TREE_STATIC (exp) |
3207b172 | 6842 | && ((mode == BLKmode |
e5e809f4 | 6843 | && ! (target != 0 && safe_from_p (target, exp, 1))) |
d720b9d1 | 6844 | || TREE_ADDRESSABLE (exp) |
19caa751 | 6845 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) |
3a94c984 | 6846 | && (! MOVE_BY_PIECES_P |
19caa751 RK |
6847 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), |
6848 | TYPE_ALIGN (type))) | |
6de9cd9a | 6849 | && ! mostly_zeros_p (exp)))) |
f59700f9 RK |
6850 | || ((modifier == EXPAND_INITIALIZER |
6851 | || modifier == EXPAND_CONST_ADDRESS) | |
6852 | && TREE_CONSTANT (exp))) | |
bbf6f052 | 6853 | { |
bd7cf17e | 6854 | rtx constructor = output_constant_def (exp, 1); |
19caa751 | 6855 | |
b552441b RS |
6856 | if (modifier != EXPAND_CONST_ADDRESS |
6857 | && modifier != EXPAND_INITIALIZER | |
792760b9 RK |
6858 | && modifier != EXPAND_SUM) |
6859 | constructor = validize_mem (constructor); | |
6860 | ||
bbf6f052 RK |
6861 | return constructor; |
6862 | } | |
bbf6f052 RK |
6863 | else |
6864 | { | |
e9ac02a6 JW |
6865 | /* Handle calls that pass values in multiple non-contiguous |
6866 | locations. The Irix 6 ABI has examples of this. */ | |
e5e809f4 | 6867 | if (target == 0 || ! safe_from_p (target, exp, 1) |
8403445a AM |
6868 | || GET_CODE (target) == PARALLEL |
6869 | || modifier == EXPAND_STACK_PARM) | |
1da68f56 RK |
6870 | target |
6871 | = assign_temp (build_qualified_type (type, | |
6872 | (TYPE_QUALS (type) | |
6873 | | (TREE_READONLY (exp) | |
6874 | * TYPE_QUAL_CONST))), | |
c24ae149 | 6875 | 0, TREE_ADDRESSABLE (exp), 1); |
07604beb | 6876 | |
dbb5c281 | 6877 | store_constructor (exp, target, 0, int_expr_size (exp)); |
bbf6f052 RK |
6878 | return target; |
6879 | } | |
6880 | ||
6881 | case INDIRECT_REF: | |
6882 | { | |
6883 | tree exp1 = TREE_OPERAND (exp, 0); | |
3a94c984 | 6884 | |
6de9cd9a DN |
6885 | if (modifier != EXPAND_WRITE) |
6886 | { | |
6887 | tree t; | |
6888 | ||
6889 | t = fold_read_from_constant_string (exp); | |
6890 | if (t) | |
6891 | return expand_expr (t, target, tmode, modifier); | |
6892 | } | |
bbf6f052 | 6893 | |
405f0da6 JW |
6894 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
6895 | op0 = memory_address (mode, op0); | |
38a448ca | 6896 | temp = gen_rtx_MEM (mode, op0); |
3bdf5ad1 | 6897 | set_mem_attributes (temp, exp, 0); |
1125706f | 6898 | |
14a774a9 RK |
6899 | /* If we are writing to this object and its type is a record with |
6900 | readonly fields, we must mark it as readonly so it will | |
6901 | conflict with readonly references to those fields. */ | |
37a08a29 | 6902 | if (modifier == EXPAND_WRITE && readonly_fields_p (type)) |
14a774a9 RK |
6903 | RTX_UNCHANGING_P (temp) = 1; |
6904 | ||
8c8a8e34 JW |
6905 | return temp; |
6906 | } | |
bbf6f052 RK |
6907 | |
6908 | case ARRAY_REF: | |
6de9cd9a DN |
6909 | |
6910 | #ifdef ENABLE_CHECKING | |
742920c7 RK |
6911 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) |
6912 | abort (); | |
6de9cd9a | 6913 | #endif |
bbf6f052 | 6914 | |
bbf6f052 | 6915 | { |
742920c7 | 6916 | tree array = TREE_OPERAND (exp, 0); |
44de5aeb | 6917 | tree low_bound = array_ref_low_bound (exp); |
fed3cef0 | 6918 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
08293add | 6919 | HOST_WIDE_INT i; |
b50d17a1 | 6920 | |
d4c89139 PB |
6921 | /* Optimize the special-case of a zero lower bound. |
6922 | ||
6923 | We convert the low_bound to sizetype to avoid some problems | |
6924 | with constant folding. (E.g. suppose the lower bound is 1, | |
6925 | and its mode is QI. Without the conversion, (ARRAY | |
6926 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
fed3cef0 | 6927 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
d4c89139 | 6928 | |
742920c7 | 6929 | if (! integer_zerop (low_bound)) |
fed3cef0 | 6930 | index = size_diffop (index, convert (sizetype, low_bound)); |
742920c7 | 6931 | |
742920c7 | 6932 | /* Fold an expression like: "foo"[2]. |
ad2e7dd0 RK |
6933 | This is not done in fold so it won't happen inside &. |
6934 | Don't fold if this is for wide characters since it's too | |
6935 | difficult to do correctly and this is a very rare case. */ | |
742920c7 | 6936 | |
017e1b43 RH |
6937 | if (modifier != EXPAND_CONST_ADDRESS |
6938 | && modifier != EXPAND_INITIALIZER | |
6de9cd9a DN |
6939 | && modifier != EXPAND_MEMORY) |
6940 | { | |
6941 | tree t = fold_read_from_constant_string (exp); | |
6942 | ||
6943 | if (t) | |
6944 | return expand_expr (t, target, tmode, modifier); | |
6945 | } | |
bbf6f052 | 6946 | |
742920c7 RK |
6947 | /* If this is a constant index into a constant array, |
6948 | just get the value from the array. Handle both the cases when | |
6949 | we have an explicit constructor and when our operand is a variable | |
6950 | that was declared const. */ | |
4af3895e | 6951 | |
017e1b43 RH |
6952 | if (modifier != EXPAND_CONST_ADDRESS |
6953 | && modifier != EXPAND_INITIALIZER | |
6954 | && modifier != EXPAND_MEMORY | |
6955 | && TREE_CODE (array) == CONSTRUCTOR | |
6956 | && ! TREE_SIDE_EFFECTS (array) | |
05bccae2 | 6957 | && TREE_CODE (index) == INTEGER_CST |
3a94c984 | 6958 | && 0 > compare_tree_int (index, |
05bccae2 RK |
6959 | list_length (CONSTRUCTOR_ELTS |
6960 | (TREE_OPERAND (exp, 0))))) | |
742920c7 | 6961 | { |
05bccae2 RK |
6962 | tree elem; |
6963 | ||
6964 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
6965 | i = TREE_INT_CST_LOW (index); | |
6966 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) | |
6967 | ; | |
6968 | ||
6969 | if (elem) | |
37a08a29 RK |
6970 | return expand_expr (fold (TREE_VALUE (elem)), target, tmode, |
6971 | modifier); | |
742920c7 | 6972 | } |
3a94c984 | 6973 | |
742920c7 | 6974 | else if (optimize >= 1 |
cb5fa0f8 RK |
6975 | && modifier != EXPAND_CONST_ADDRESS |
6976 | && modifier != EXPAND_INITIALIZER | |
017e1b43 | 6977 | && modifier != EXPAND_MEMORY |
742920c7 RK |
6978 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) |
6979 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
beb0c2e0 RH |
6980 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK |
6981 | && targetm.binds_local_p (array)) | |
742920c7 | 6982 | { |
08293add | 6983 | if (TREE_CODE (index) == INTEGER_CST) |
742920c7 RK |
6984 | { |
6985 | tree init = DECL_INITIAL (array); | |
6986 | ||
742920c7 RK |
6987 | if (TREE_CODE (init) == CONSTRUCTOR) |
6988 | { | |
665f2503 | 6989 | tree elem; |
742920c7 | 6990 | |
05bccae2 | 6991 | for (elem = CONSTRUCTOR_ELTS (init); |
5cb1bea4 JM |
6992 | (elem |
6993 | && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); | |
05bccae2 RK |
6994 | elem = TREE_CHAIN (elem)) |
6995 | ; | |
6996 | ||
c54b0a5e | 6997 | if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) |
742920c7 | 6998 | return expand_expr (fold (TREE_VALUE (elem)), target, |
37a08a29 | 6999 | tmode, modifier); |
742920c7 RK |
7000 | } |
7001 | else if (TREE_CODE (init) == STRING_CST | |
05bccae2 RK |
7002 | && 0 > compare_tree_int (index, |
7003 | TREE_STRING_LENGTH (init))) | |
5c80f6e6 JJ |
7004 | { |
7005 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
7006 | enum machine_mode mode = TYPE_MODE (type); | |
7007 | ||
7008 | if (GET_MODE_CLASS (mode) == MODE_INT | |
7009 | && GET_MODE_SIZE (mode) == 1) | |
21ef78aa DE |
7010 | return gen_int_mode (TREE_STRING_POINTER (init) |
7011 | [TREE_INT_CST_LOW (index)], mode); | |
5c80f6e6 | 7012 | } |
742920c7 RK |
7013 | } |
7014 | } | |
7015 | } | |
afc6aaab | 7016 | goto normal_inner_ref; |
bbf6f052 RK |
7017 | |
7018 | case COMPONENT_REF: | |
4af3895e | 7019 | /* If the operand is a CONSTRUCTOR, we can just extract the |
afc6aaab ZW |
7020 | appropriate field if it is present. */ |
7021 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) | |
4af3895e JVA |
7022 | { |
7023 | tree elt; | |
7024 | ||
7025 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
7026 | elt = TREE_CHAIN (elt)) | |
86b5812c RK |
7027 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) |
7028 | /* We can normally use the value of the field in the | |
7029 | CONSTRUCTOR. However, if this is a bitfield in | |
7030 | an integral mode that we can fit in a HOST_WIDE_INT, | |
7031 | we must mask only the number of bits in the bitfield, | |
7032 | since this is done implicitly by the constructor. If | |
7033 | the bitfield does not meet either of those conditions, | |
7034 | we can't do this optimization. */ | |
7035 | && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) | |
7036 | || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) | |
7037 | == MODE_INT) | |
7038 | && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) | |
7039 | <= HOST_BITS_PER_WIDE_INT)))) | |
7040 | { | |
8403445a AM |
7041 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt)) |
7042 | && modifier == EXPAND_STACK_PARM) | |
7043 | target = 0; | |
3a94c984 | 7044 | op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); |
86b5812c RK |
7045 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) |
7046 | { | |
9df2c88c RK |
7047 | HOST_WIDE_INT bitsize |
7048 | = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); | |
22273300 JJ |
7049 | enum machine_mode imode |
7050 | = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); | |
86b5812c | 7051 | |
8df83eae | 7052 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) |
86b5812c RK |
7053 | { |
7054 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
22273300 | 7055 | op0 = expand_and (imode, op0, op1, target); |
86b5812c RK |
7056 | } |
7057 | else | |
7058 | { | |
7059 | tree count | |
e5e809f4 JL |
7060 | = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize, |
7061 | 0); | |
86b5812c RK |
7062 | |
7063 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
7064 | target, 0); | |
7065 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
7066 | target, 0); | |
7067 | } | |
7068 | } | |
7069 | ||
7070 | return op0; | |
7071 | } | |
4af3895e | 7072 | } |
afc6aaab | 7073 | goto normal_inner_ref; |
4af3895e | 7074 | |
afc6aaab ZW |
7075 | case BIT_FIELD_REF: |
7076 | case ARRAY_RANGE_REF: | |
7077 | normal_inner_ref: | |
bbf6f052 RK |
7078 | { |
7079 | enum machine_mode mode1; | |
770ae6cc | 7080 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 7081 | tree offset; |
bbf6f052 | 7082 | int volatilep = 0; |
839c4796 | 7083 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
a06ef755 | 7084 | &mode1, &unsignedp, &volatilep); |
f47e9b4e | 7085 | rtx orig_op0; |
bbf6f052 | 7086 | |
e7f3c83f RK |
7087 | /* If we got back the original object, something is wrong. Perhaps |
7088 | we are evaluating an expression too early. In any event, don't | |
7089 | infinitely recurse. */ | |
7090 | if (tem == exp) | |
7091 | abort (); | |
7092 | ||
3d27140a | 7093 | /* If TEM's type is a union of variable size, pass TARGET to the inner |
b74f5ff2 RK |
7094 | computation, since it will need a temporary and TARGET is known |
7095 | to have to do. This occurs in unchecked conversion in Ada. */ | |
3a94c984 | 7096 | |
f47e9b4e RK |
7097 | orig_op0 = op0 |
7098 | = expand_expr (tem, | |
7099 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
7100 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
7101 | != INTEGER_CST) | |
8403445a | 7102 | && modifier != EXPAND_STACK_PARM |
f47e9b4e RK |
7103 | ? target : NULL_RTX), |
7104 | VOIDmode, | |
7105 | (modifier == EXPAND_INITIALIZER | |
8403445a AM |
7106 | || modifier == EXPAND_CONST_ADDRESS |
7107 | || modifier == EXPAND_STACK_PARM) | |
f47e9b4e | 7108 | ? modifier : EXPAND_NORMAL); |
bbf6f052 | 7109 | |
8c8a8e34 | 7110 | /* If this is a constant, put it into a register if it is a |
14a774a9 | 7111 | legitimate constant and OFFSET is 0 and memory if it isn't. */ |
8c8a8e34 JW |
7112 | if (CONSTANT_P (op0)) |
7113 | { | |
7114 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); | |
14a774a9 RK |
7115 | if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) |
7116 | && offset == 0) | |
8c8a8e34 JW |
7117 | op0 = force_reg (mode, op0); |
7118 | else | |
7119 | op0 = validize_mem (force_const_mem (mode, op0)); | |
7120 | } | |
7121 | ||
8d2e5f72 RK |
7122 | /* Otherwise, if this object not in memory and we either have an |
7123 | offset or a BLKmode result, put it there. This case can't occur in | |
7124 | C, but can in Ada if we have unchecked conversion of an expression | |
7125 | from a scalar type to an array or record type or for an | |
7126 | ARRAY_RANGE_REF whose type is BLKmode. */ | |
3c0cb5de | 7127 | else if (!MEM_P (op0) |
8d2e5f72 RK |
7128 | && (offset != 0 |
7129 | || (code == ARRAY_RANGE_REF && mode == BLKmode))) | |
7130 | { | |
82c82743 RH |
7131 | tree nt = build_qualified_type (TREE_TYPE (tem), |
7132 | (TYPE_QUALS (TREE_TYPE (tem)) | |
7133 | | TYPE_QUAL_CONST)); | |
7134 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
450b1728 | 7135 | |
82c82743 RH |
7136 | emit_move_insn (memloc, op0); |
7137 | op0 = memloc; | |
8d2e5f72 RK |
7138 | } |
7139 | ||
7bb0943f RS |
7140 | if (offset != 0) |
7141 | { | |
8403445a AM |
7142 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, |
7143 | EXPAND_SUM); | |
7bb0943f | 7144 | |
3c0cb5de | 7145 | if (!MEM_P (op0)) |
7bb0943f | 7146 | abort (); |
2d48c13d | 7147 | |
2d48c13d | 7148 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 | 7149 | if (GET_MODE (offset_rtx) != Pmode) |
267b28bd | 7150 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
fa06ab5c RK |
7151 | #else |
7152 | if (GET_MODE (offset_rtx) != ptr_mode) | |
7153 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
2d48c13d JL |
7154 | #endif |
7155 | ||
e82407b5 EB |
7156 | if (GET_MODE (op0) == BLKmode |
7157 | /* A constant address in OP0 can have VOIDmode, we must | |
7158 | not try to call force_reg in that case. */ | |
efd07ca7 | 7159 | && GET_MODE (XEXP (op0, 0)) != VOIDmode |
14a774a9 | 7160 | && bitsize != 0 |
3a94c984 | 7161 | && (bitpos % bitsize) == 0 |
89752202 | 7162 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 7163 | && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 7164 | { |
e3c8ea67 | 7165 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
7166 | bitpos = 0; |
7167 | } | |
7168 | ||
0d4903b8 RK |
7169 | op0 = offset_address (op0, offset_rtx, |
7170 | highest_pow2_factor (offset)); | |
7bb0943f RS |
7171 | } |
7172 | ||
1ce7f3c2 RK |
7173 | /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, |
7174 | record its alignment as BIGGEST_ALIGNMENT. */ | |
3c0cb5de | 7175 | if (MEM_P (op0) && bitpos == 0 && offset != 0 |
1ce7f3c2 RK |
7176 | && is_aligning_offset (offset, tem)) |
7177 | set_mem_align (op0, BIGGEST_ALIGNMENT); | |
7178 | ||
bbf6f052 | 7179 | /* Don't forget about volatility even if this is a bitfield. */ |
3c0cb5de | 7180 | if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) |
bbf6f052 | 7181 | { |
f47e9b4e RK |
7182 | if (op0 == orig_op0) |
7183 | op0 = copy_rtx (op0); | |
7184 | ||
bbf6f052 RK |
7185 | MEM_VOLATILE_P (op0) = 1; |
7186 | } | |
7187 | ||
010f87c4 JJ |
7188 | /* The following code doesn't handle CONCAT. |
7189 | Assume only bitpos == 0 can be used for CONCAT, due to | |
7190 | one element arrays having the same mode as its element. */ | |
7191 | if (GET_CODE (op0) == CONCAT) | |
7192 | { | |
7193 | if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0))) | |
7194 | abort (); | |
7195 | return op0; | |
7196 | } | |
7197 | ||
ccc98036 RS |
7198 | /* In cases where an aligned union has an unaligned object |
7199 | as a field, we might be extracting a BLKmode value from | |
7200 | an integer-mode (e.g., SImode) object. Handle this case | |
7201 | by doing the extract into an object as wide as the field | |
7202 | (which we know to be the width of a basic mode), then | |
cb5fa0f8 | 7203 | storing into memory, and changing the mode to BLKmode. */ |
bbf6f052 | 7204 | if (mode1 == VOIDmode |
f8cfc6aa | 7205 | || REG_P (op0) || GET_CODE (op0) == SUBREG |
cb5fa0f8 RK |
7206 | || (mode1 != BLKmode && ! direct_load[(int) mode1] |
7207 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
10c2a453 RK |
7208 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT |
7209 | && modifier != EXPAND_CONST_ADDRESS | |
7210 | && modifier != EXPAND_INITIALIZER) | |
cb5fa0f8 RK |
7211 | /* If the field isn't aligned enough to fetch as a memref, |
7212 | fetch it as a bit field. */ | |
7213 | || (mode1 != BLKmode | |
9e5f281f | 7214 | && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) |
e82407b5 | 7215 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0) |
3c0cb5de | 7216 | || (MEM_P (op0) |
e82407b5 EB |
7217 | && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) |
7218 | || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)))) | |
a8f3bf52 RK |
7219 | && ((modifier == EXPAND_CONST_ADDRESS |
7220 | || modifier == EXPAND_INITIALIZER) | |
7221 | ? STRICT_ALIGNMENT | |
7222 | : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))) | |
9e5f281f | 7223 | || (bitpos % BITS_PER_UNIT != 0))) |
cb5fa0f8 RK |
7224 | /* If the type and the field are a constant size and the |
7225 | size of the type isn't the same size as the bitfield, | |
7226 | we must use bitfield operations. */ | |
7227 | || (bitsize >= 0 | |
7228 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) | |
7229 | == INTEGER_CST) | |
7230 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | |
a06ef755 | 7231 | bitsize))) |
bbf6f052 | 7232 | { |
bbf6f052 RK |
7233 | enum machine_mode ext_mode = mode; |
7234 | ||
14a774a9 | 7235 | if (ext_mode == BLKmode |
3c0cb5de JQ |
7236 | && ! (target != 0 && MEM_P (op0) |
7237 | && MEM_P (target) | |
14a774a9 | 7238 | && bitpos % BITS_PER_UNIT == 0)) |
bbf6f052 RK |
7239 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); |
7240 | ||
7241 | if (ext_mode == BLKmode) | |
a281e72d | 7242 | { |
7a06d606 RK |
7243 | if (target == 0) |
7244 | target = assign_temp (type, 0, 1, 1); | |
7245 | ||
7246 | if (bitsize == 0) | |
7247 | return target; | |
7248 | ||
a281e72d RK |
7249 | /* In this case, BITPOS must start at a byte boundary and |
7250 | TARGET, if specified, must be a MEM. */ | |
3c0cb5de JQ |
7251 | if (!MEM_P (op0) |
7252 | || (target != 0 && !MEM_P (target)) | |
a281e72d RK |
7253 | || bitpos % BITS_PER_UNIT != 0) |
7254 | abort (); | |
7255 | ||
7a06d606 RK |
7256 | emit_block_move (target, |
7257 | adjust_address (op0, VOIDmode, | |
7258 | bitpos / BITS_PER_UNIT), | |
a06ef755 | 7259 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
44bb111a | 7260 | / BITS_PER_UNIT), |
8403445a AM |
7261 | (modifier == EXPAND_STACK_PARM |
7262 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
3a94c984 | 7263 | |
a281e72d RK |
7264 | return target; |
7265 | } | |
bbf6f052 | 7266 | |
dc6d66b3 RK |
7267 | op0 = validize_mem (op0); |
7268 | ||
3c0cb5de | 7269 | if (MEM_P (op0) && REG_P (XEXP (op0, 0))) |
04050c69 | 7270 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 | 7271 | |
8403445a AM |
7272 | op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, |
7273 | (modifier == EXPAND_STACK_PARM | |
7274 | ? NULL_RTX : target), | |
7275 | ext_mode, ext_mode, | |
bbf6f052 | 7276 | int_size_in_bytes (TREE_TYPE (tem))); |
ef19912d RK |
7277 | |
7278 | /* If the result is a record type and BITSIZE is narrower than | |
7279 | the mode of OP0, an integral mode, and this is a big endian | |
7280 | machine, we must put the field into the high-order bits. */ | |
7281 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
7282 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
65a07688 | 7283 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) |
ef19912d RK |
7284 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, |
7285 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
7286 | - bitsize), | |
7287 | op0, 1); | |
7288 | ||
daae8185 RK |
7289 | /* If the result type is BLKmode, store the data into a temporary |
7290 | of the appropriate type, but with the mode corresponding to the | |
7291 | mode for the data we have (op0's mode). It's tempting to make | |
7292 | this a constant type, since we know it's only being stored once, | |
7293 | but that can cause problems if we are taking the address of this | |
7294 | COMPONENT_REF because the MEM of any reference via that address | |
7295 | will have flags corresponding to the type, which will not | |
7296 | necessarily be constant. */ | |
bbf6f052 RK |
7297 | if (mode == BLKmode) |
7298 | { | |
daae8185 RK |
7299 | rtx new |
7300 | = assign_stack_temp_for_type | |
7301 | (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type); | |
bbf6f052 RK |
7302 | |
7303 | emit_move_insn (new, op0); | |
7304 | op0 = copy_rtx (new); | |
7305 | PUT_MODE (op0, BLKmode); | |
c3d32120 | 7306 | set_mem_attributes (op0, exp, 1); |
bbf6f052 RK |
7307 | } |
7308 | ||
7309 | return op0; | |
7310 | } | |
7311 | ||
05019f83 RK |
7312 | /* If the result is BLKmode, use that to access the object |
7313 | now as well. */ | |
7314 | if (mode == BLKmode) | |
7315 | mode1 = BLKmode; | |
7316 | ||
bbf6f052 RK |
7317 | /* Get a reference to just this component. */ |
7318 | if (modifier == EXPAND_CONST_ADDRESS | |
7319 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
f1ec5147 | 7320 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
bbf6f052 | 7321 | else |
f4ef873c | 7322 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
41472af8 | 7323 | |
f47e9b4e RK |
7324 | if (op0 == orig_op0) |
7325 | op0 = copy_rtx (op0); | |
7326 | ||
3bdf5ad1 | 7327 | set_mem_attributes (op0, exp, 0); |
f8cfc6aa | 7328 | if (REG_P (XEXP (op0, 0))) |
a06ef755 | 7329 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 | 7330 | |
bbf6f052 | 7331 | MEM_VOLATILE_P (op0) |= volatilep; |
0d15e60c | 7332 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode |
08bbd316 | 7333 | || modifier == EXPAND_CONST_ADDRESS |
0d15e60c | 7334 | || modifier == EXPAND_INITIALIZER) |
bbf6f052 | 7335 | return op0; |
0d15e60c | 7336 | else if (target == 0) |
bbf6f052 | 7337 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
0d15e60c | 7338 | |
bbf6f052 RK |
7339 | convert_move (target, op0, unsignedp); |
7340 | return target; | |
7341 | } | |
7342 | ||
0f59171d RH |
7343 | case OBJ_TYPE_REF: |
7344 | return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier); | |
4a8d0c9c | 7345 | |
bbf6f052 RK |
7346 | /* Intended for a reference to a buffer of a file-object in Pascal. |
7347 | But it's not certain that a special tree code will really be | |
7348 | necessary for these. INDIRECT_REF might work for them. */ | |
7349 | case BUFFER_REF: | |
7350 | abort (); | |
7351 | ||
7308a047 | 7352 | case IN_EXPR: |
7308a047 | 7353 | { |
d6a5ac33 RK |
7354 | /* Pascal set IN expression. |
7355 | ||
7356 | Algorithm: | |
7357 | rlo = set_low - (set_low%bits_per_word); | |
7358 | the_word = set [ (index - rlo)/bits_per_word ]; | |
7359 | bit_index = index % bits_per_word; | |
7360 | bitmask = 1 << bit_index; | |
7361 | return !!(the_word & bitmask); */ | |
7362 | ||
7308a047 RS |
7363 | tree set = TREE_OPERAND (exp, 0); |
7364 | tree index = TREE_OPERAND (exp, 1); | |
8df83eae | 7365 | int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index)); |
7308a047 | 7366 | tree set_type = TREE_TYPE (set); |
7308a047 RS |
7367 | tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type)); |
7368 | tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type)); | |
d6a5ac33 RK |
7369 | rtx index_val = expand_expr (index, 0, VOIDmode, 0); |
7370 | rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0); | |
7371 | rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0); | |
7372 | rtx setval = expand_expr (set, 0, VOIDmode, 0); | |
7373 | rtx setaddr = XEXP (setval, 0); | |
7374 | enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index)); | |
7308a047 RS |
7375 | rtx rlow; |
7376 | rtx diff, quo, rem, addr, bit, result; | |
7308a047 | 7377 | |
d6a5ac33 RK |
7378 | /* If domain is empty, answer is no. Likewise if index is constant |
7379 | and out of bounds. */ | |
51723711 | 7380 | if (((TREE_CODE (set_high_bound) == INTEGER_CST |
d6a5ac33 | 7381 | && TREE_CODE (set_low_bound) == INTEGER_CST |
51723711 | 7382 | && tree_int_cst_lt (set_high_bound, set_low_bound)) |
d6a5ac33 RK |
7383 | || (TREE_CODE (index) == INTEGER_CST |
7384 | && TREE_CODE (set_low_bound) == INTEGER_CST | |
7385 | && tree_int_cst_lt (index, set_low_bound)) | |
7386 | || (TREE_CODE (set_high_bound) == INTEGER_CST | |
7387 | && TREE_CODE (index) == INTEGER_CST | |
7388 | && tree_int_cst_lt (set_high_bound, index)))) | |
7308a047 RS |
7389 | return const0_rtx; |
7390 | ||
d6a5ac33 RK |
7391 | if (target == 0) |
7392 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
7308a047 RS |
7393 | |
7394 | /* If we get here, we have to generate the code for both cases | |
7395 | (in range and out of range). */ | |
7396 | ||
7397 | op0 = gen_label_rtx (); | |
7398 | op1 = gen_label_rtx (); | |
7399 | ||
7400 | if (! (GET_CODE (index_val) == CONST_INT | |
7401 | && GET_CODE (lo_r) == CONST_INT)) | |
a06ef755 RK |
7402 | emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, |
7403 | GET_MODE (index_val), iunsignedp, op1); | |
7308a047 RS |
7404 | |
7405 | if (! (GET_CODE (index_val) == CONST_INT | |
7406 | && GET_CODE (hi_r) == CONST_INT)) | |
a06ef755 RK |
7407 | emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, |
7408 | GET_MODE (index_val), iunsignedp, op1); | |
7308a047 RS |
7409 | |
7410 | /* Calculate the element number of bit zero in the first word | |
7411 | of the set. */ | |
7412 | if (GET_CODE (lo_r) == CONST_INT) | |
17938e57 | 7413 | rlow = GEN_INT (INTVAL (lo_r) |
3a94c984 | 7414 | & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); |
7308a047 | 7415 | else |
17938e57 RK |
7416 | rlow = expand_binop (index_mode, and_optab, lo_r, |
7417 | GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)), | |
d6a5ac33 | 7418 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); |
7308a047 | 7419 | |
d6a5ac33 RK |
7420 | diff = expand_binop (index_mode, sub_optab, index_val, rlow, |
7421 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); | |
7308a047 RS |
7422 | |
7423 | quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff, | |
d6a5ac33 | 7424 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7308a047 | 7425 | rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val, |
d6a5ac33 RK |
7426 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7427 | ||
7308a047 | 7428 | addr = memory_address (byte_mode, |
d6a5ac33 RK |
7429 | expand_binop (index_mode, add_optab, diff, |
7430 | setaddr, NULL_RTX, iunsignedp, | |
17938e57 | 7431 | OPTAB_LIB_WIDEN)); |
d6a5ac33 | 7432 | |
3a94c984 | 7433 | /* Extract the bit we want to examine. */ |
7308a047 | 7434 | bit = expand_shift (RSHIFT_EXPR, byte_mode, |
38a448ca | 7435 | gen_rtx_MEM (byte_mode, addr), |
17938e57 RK |
7436 | make_tree (TREE_TYPE (index), rem), |
7437 | NULL_RTX, 1); | |
7438 | result = expand_binop (byte_mode, and_optab, bit, const1_rtx, | |
7439 | GET_MODE (target) == byte_mode ? target : 0, | |
7308a047 | 7440 | 1, OPTAB_LIB_WIDEN); |
17938e57 RK |
7441 | |
7442 | if (result != target) | |
7443 | convert_move (target, result, 1); | |
7308a047 RS |
7444 | |
7445 | /* Output the code to handle the out-of-range case. */ | |
7446 | emit_jump (op0); | |
7447 | emit_label (op1); | |
7448 | emit_move_insn (target, const0_rtx); | |
7449 | emit_label (op0); | |
7450 | return target; | |
7451 | } | |
7452 | ||
bbf6f052 | 7453 | case WITH_CLEANUP_EXPR: |
6ad7895a | 7454 | if (WITH_CLEANUP_EXPR_RTL (exp) == 0) |
bbf6f052 | 7455 | { |
6ad7895a | 7456 | WITH_CLEANUP_EXPR_RTL (exp) |
37a08a29 | 7457 | = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
659e5a7a JM |
7458 | expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1), |
7459 | CLEANUP_EH_ONLY (exp)); | |
e976b8b2 | 7460 | |
bbf6f052 | 7461 | /* That's it for this cleanup. */ |
6ad7895a | 7462 | TREE_OPERAND (exp, 1) = 0; |
bbf6f052 | 7463 | } |
6ad7895a | 7464 | return WITH_CLEANUP_EXPR_RTL (exp); |
bbf6f052 | 7465 | |
5dab5552 MS |
7466 | case CLEANUP_POINT_EXPR: |
7467 | { | |
e976b8b2 MS |
7468 | /* Start a new binding layer that will keep track of all cleanup |
7469 | actions to be performed. */ | |
8e91754e | 7470 | expand_start_bindings (2); |
e976b8b2 | 7471 | |
d93d4205 | 7472 | target_temp_slot_level = temp_slot_level; |
e976b8b2 | 7473 | |
37a08a29 | 7474 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
f283f66b JM |
7475 | /* If we're going to use this value, load it up now. */ |
7476 | if (! ignore) | |
7477 | op0 = force_not_mem (op0); | |
d93d4205 | 7478 | preserve_temp_slots (op0); |
e976b8b2 | 7479 | expand_end_bindings (NULL_TREE, 0, 0); |
5dab5552 MS |
7480 | } |
7481 | return op0; | |
7482 | ||
bbf6f052 RK |
7483 | case CALL_EXPR: |
7484 | /* Check for a built-in function. */ | |
7485 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
d6a5ac33 RK |
7486 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7487 | == FUNCTION_DECL) | |
bbf6f052 | 7488 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
0fb7aeda | 7489 | { |
c70eaeaf KG |
7490 | if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7491 | == BUILT_IN_FRONTEND) | |
673fda6b SB |
7492 | return lang_hooks.expand_expr (exp, original_target, |
7493 | tmode, modifier, | |
7494 | alt_rtl); | |
c70eaeaf KG |
7495 | else |
7496 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
7497 | } | |
d6a5ac33 | 7498 | |
8129842c | 7499 | return expand_call (exp, target, ignore); |
bbf6f052 RK |
7500 | |
7501 | case NON_LVALUE_EXPR: | |
7502 | case NOP_EXPR: | |
7503 | case CONVERT_EXPR: | |
7504 | case REFERENCE_EXPR: | |
4a53008b | 7505 | if (TREE_OPERAND (exp, 0) == error_mark_node) |
a592f288 | 7506 | return const0_rtx; |
4a53008b | 7507 | |
bbf6f052 RK |
7508 | if (TREE_CODE (type) == UNION_TYPE) |
7509 | { | |
7510 | tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
14a774a9 | 7511 | |
c3d32120 RK |
7512 | /* If both input and output are BLKmode, this conversion isn't doing |
7513 | anything except possibly changing memory attribute. */ | |
7514 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) | |
7515 | { | |
7516 | rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, | |
7517 | modifier); | |
7518 | ||
7519 | result = copy_rtx (result); | |
7520 | set_mem_attributes (result, exp, 0); | |
7521 | return result; | |
7522 | } | |
14a774a9 | 7523 | |
bbf6f052 | 7524 | if (target == 0) |
cf7cb67e JH |
7525 | { |
7526 | if (TYPE_MODE (type) != BLKmode) | |
7527 | target = gen_reg_rtx (TYPE_MODE (type)); | |
7528 | else | |
7529 | target = assign_temp (type, 0, 1, 1); | |
7530 | } | |
d6a5ac33 | 7531 | |
3c0cb5de | 7532 | if (MEM_P (target)) |
bbf6f052 RK |
7533 | /* Store data into beginning of memory target. */ |
7534 | store_expr (TREE_OPERAND (exp, 0), | |
8403445a AM |
7535 | adjust_address (target, TYPE_MODE (valtype), 0), |
7536 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
1499e0a8 | 7537 | |
f8cfc6aa | 7538 | else if (REG_P (target)) |
bbf6f052 | 7539 | /* Store this field into a union of the proper type. */ |
14a774a9 RK |
7540 | store_field (target, |
7541 | MIN ((int_size_in_bytes (TREE_TYPE | |
7542 | (TREE_OPERAND (exp, 0))) | |
7543 | * BITS_PER_UNIT), | |
8752c357 | 7544 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
14a774a9 | 7545 | 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), |
a06ef755 | 7546 | VOIDmode, 0, type, 0); |
bbf6f052 RK |
7547 | else |
7548 | abort (); | |
7549 | ||
7550 | /* Return the entire union. */ | |
7551 | return target; | |
7552 | } | |
d6a5ac33 | 7553 | |
7f62854a RK |
7554 | if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
7555 | { | |
7556 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, | |
37a08a29 | 7557 | modifier); |
7f62854a RK |
7558 | |
7559 | /* If the signedness of the conversion differs and OP0 is | |
7560 | a promoted SUBREG, clear that indication since we now | |
7561 | have to do the proper extension. */ | |
8df83eae | 7562 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp |
7f62854a RK |
7563 | && GET_CODE (op0) == SUBREG) |
7564 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
7565 | ||
7566 | return op0; | |
7567 | } | |
7568 | ||
fdf473ae | 7569 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
12342f90 RS |
7570 | if (GET_MODE (op0) == mode) |
7571 | return op0; | |
12342f90 | 7572 | |
d6a5ac33 RK |
7573 | /* If OP0 is a constant, just convert it into the proper mode. */ |
7574 | if (CONSTANT_P (op0)) | |
fdf473ae RH |
7575 | { |
7576 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
7577 | enum machine_mode inner_mode = TYPE_MODE (inner_type); | |
7578 | ||
0fb7aeda | 7579 | if (modifier == EXPAND_INITIALIZER) |
fdf473ae RH |
7580 | return simplify_gen_subreg (mode, op0, inner_mode, |
7581 | subreg_lowpart_offset (mode, | |
7582 | inner_mode)); | |
7583 | else | |
7584 | return convert_modes (mode, inner_mode, op0, | |
8df83eae | 7585 | TYPE_UNSIGNED (inner_type)); |
fdf473ae | 7586 | } |
12342f90 | 7587 | |
26fcb35a | 7588 | if (modifier == EXPAND_INITIALIZER) |
38a448ca | 7589 | return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
d6a5ac33 | 7590 | |
bbf6f052 | 7591 | if (target == 0) |
d6a5ac33 RK |
7592 | return |
7593 | convert_to_mode (mode, op0, | |
8df83eae | 7594 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 | 7595 | else |
d6a5ac33 | 7596 | convert_move (target, op0, |
8df83eae | 7597 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 RK |
7598 | return target; |
7599 | ||
ed239f5a | 7600 | case VIEW_CONVERT_EXPR: |
37a08a29 | 7601 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
ed239f5a RK |
7602 | |
7603 | /* If the input and output modes are both the same, we are done. | |
13cf99ec RK |
7604 | Otherwise, if neither mode is BLKmode and both are integral and within |
7605 | a word, we can use gen_lowpart. If neither is true, make sure the | |
7606 | operand is in memory and convert the MEM to the new mode. */ | |
ed239f5a RK |
7607 | if (TYPE_MODE (type) == GET_MODE (op0)) |
7608 | ; | |
7609 | else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode | |
13cf99ec RK |
7610 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
7611 | && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT | |
ed239f5a RK |
7612 | && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD |
7613 | && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) | |
7614 | op0 = gen_lowpart (TYPE_MODE (type), op0); | |
3c0cb5de | 7615 | else if (!MEM_P (op0)) |
ed239f5a | 7616 | { |
c11c10d8 RK |
7617 | /* If the operand is not a MEM, force it into memory. Since we |
7618 | are going to be be changing the mode of the MEM, don't call | |
7619 | force_const_mem for constants because we don't allow pool | |
7620 | constants to change mode. */ | |
ed239f5a | 7621 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
ed239f5a | 7622 | |
c11c10d8 RK |
7623 | if (TREE_ADDRESSABLE (exp)) |
7624 | abort (); | |
ed239f5a | 7625 | |
c11c10d8 RK |
7626 | if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) |
7627 | target | |
7628 | = assign_stack_temp_for_type | |
7629 | (TYPE_MODE (inner_type), | |
7630 | GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); | |
ed239f5a | 7631 | |
c11c10d8 RK |
7632 | emit_move_insn (target, op0); |
7633 | op0 = target; | |
ed239f5a RK |
7634 | } |
7635 | ||
c11c10d8 RK |
7636 | /* At this point, OP0 is in the correct mode. If the output type is such |
7637 | that the operand is known to be aligned, indicate that it is. | |
7638 | Otherwise, we need only be concerned about alignment for non-BLKmode | |
7639 | results. */ | |
3c0cb5de | 7640 | if (MEM_P (op0)) |
ed239f5a RK |
7641 | { |
7642 | op0 = copy_rtx (op0); | |
7643 | ||
ed239f5a RK |
7644 | if (TYPE_ALIGN_OK (type)) |
7645 | set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); | |
7646 | else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT | |
7647 | && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) | |
7648 | { | |
7649 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
65a07688 RK |
7650 | HOST_WIDE_INT temp_size |
7651 | = MAX (int_size_in_bytes (inner_type), | |
7652 | (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); | |
ed239f5a RK |
7653 | rtx new = assign_stack_temp_for_type (TYPE_MODE (type), |
7654 | temp_size, 0, type); | |
c4e59f51 | 7655 | rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); |
ed239f5a | 7656 | |
c11c10d8 RK |
7657 | if (TREE_ADDRESSABLE (exp)) |
7658 | abort (); | |
7659 | ||
ed239f5a RK |
7660 | if (GET_MODE (op0) == BLKmode) |
7661 | emit_block_move (new_with_op0_mode, op0, | |
44bb111a | 7662 | GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), |
8403445a AM |
7663 | (modifier == EXPAND_STACK_PARM |
7664 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
ed239f5a RK |
7665 | else |
7666 | emit_move_insn (new_with_op0_mode, op0); | |
7667 | ||
7668 | op0 = new; | |
7669 | } | |
0fb7aeda | 7670 | |
c4e59f51 | 7671 | op0 = adjust_address (op0, TYPE_MODE (type), 0); |
ed239f5a RK |
7672 | } |
7673 | ||
7674 | return op0; | |
7675 | ||
bbf6f052 | 7676 | case PLUS_EXPR: |
91ce572a | 7677 | this_optab = ! unsignedp && flag_trapv |
a9785c70 | 7678 | && (GET_MODE_CLASS (mode) == MODE_INT) |
91ce572a | 7679 | ? addv_optab : add_optab; |
bbf6f052 | 7680 | |
4dfa0342 | 7681 | /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and |
bbf6f052 RK |
7682 | something else, make sure we add the register to the constant and |
7683 | then to the other thing. This case can occur during strength | |
7684 | reduction and doing it this way will produce better code if the | |
7685 | frame pointer or argument pointer is eliminated. | |
7686 | ||
7687 | fold-const.c will ensure that the constant is always in the inner | |
7688 | PLUS_EXPR, so the only case we need to do anything about is if | |
7689 | sp, ap, or fp is our second argument, in which case we must swap | |
7690 | the innermost first argument and our second argument. */ | |
7691 | ||
7692 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR | |
7693 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST | |
4dfa0342 RH |
7694 | && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL |
7695 | && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx | |
7696 | || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx | |
7697 | || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) | |
bbf6f052 RK |
7698 | { |
7699 | tree t = TREE_OPERAND (exp, 1); | |
7700 | ||
7701 | TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
7702 | TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; | |
7703 | } | |
7704 | ||
88f63c77 | 7705 | /* If the result is to be ptr_mode and we are adding an integer to |
bbf6f052 RK |
7706 | something, we might be forming a constant. So try to use |
7707 | plus_constant. If it produces a sum and we can't accept it, | |
7708 | use force_operand. This allows P = &ARR[const] to generate | |
7709 | efficient code on machines where a SYMBOL_REF is not a valid | |
7710 | address. | |
7711 | ||
7712 | If this is an EXPAND_SUM call, always return the sum. */ | |
c980ac49 | 7713 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
0fb7aeda | 7714 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
bbf6f052 | 7715 | { |
8403445a AM |
7716 | if (modifier == EXPAND_STACK_PARM) |
7717 | target = 0; | |
c980ac49 RS |
7718 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
7719 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7720 | && TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
7721 | { | |
cbbc503e JL |
7722 | rtx constant_part; |
7723 | ||
c980ac49 RS |
7724 | op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
7725 | EXPAND_SUM); | |
cbbc503e JL |
7726 | /* Use immed_double_const to ensure that the constant is |
7727 | truncated according to the mode of OP1, then sign extended | |
7728 | to a HOST_WIDE_INT. Using the constant directly can result | |
7729 | in non-canonical RTL in a 64x32 cross compile. */ | |
7730 | constant_part | |
7731 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), | |
7732 | (HOST_WIDE_INT) 0, | |
a5efcd63 | 7733 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
7f401c74 | 7734 | op1 = plus_constant (op1, INTVAL (constant_part)); |
c980ac49 RS |
7735 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7736 | op1 = force_operand (op1, target); | |
7737 | return op1; | |
7738 | } | |
bbf6f052 | 7739 | |
c980ac49 RS |
7740 | else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
7741 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT | |
7742 | && TREE_CONSTANT (TREE_OPERAND (exp, 0))) | |
7743 | { | |
cbbc503e JL |
7744 | rtx constant_part; |
7745 | ||
c980ac49 | 7746 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
70d95bac RH |
7747 | (modifier == EXPAND_INITIALIZER |
7748 | ? EXPAND_INITIALIZER : EXPAND_SUM)); | |
c980ac49 RS |
7749 | if (! CONSTANT_P (op0)) |
7750 | { | |
7751 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7752 | VOIDmode, modifier); | |
f0e9957a RS |
7753 | /* Return a PLUS if modifier says it's OK. */ |
7754 | if (modifier == EXPAND_SUM | |
7755 | || modifier == EXPAND_INITIALIZER) | |
7756 | return simplify_gen_binary (PLUS, mode, op0, op1); | |
7757 | goto binop2; | |
c980ac49 | 7758 | } |
cbbc503e JL |
7759 | /* Use immed_double_const to ensure that the constant is |
7760 | truncated according to the mode of OP1, then sign extended | |
7761 | to a HOST_WIDE_INT. Using the constant directly can result | |
7762 | in non-canonical RTL in a 64x32 cross compile. */ | |
7763 | constant_part | |
7764 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), | |
7765 | (HOST_WIDE_INT) 0, | |
2a94e396 | 7766 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
7f401c74 | 7767 | op0 = plus_constant (op0, INTVAL (constant_part)); |
c980ac49 RS |
7768 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7769 | op0 = force_operand (op0, target); | |
7770 | return op0; | |
7771 | } | |
bbf6f052 RK |
7772 | } |
7773 | ||
7774 | /* No sense saving up arithmetic to be done | |
7775 | if it's all in the wrong mode to form part of an address. | |
7776 | And force_operand won't know whether to sign-extend or | |
7777 | zero-extend. */ | |
7778 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
88f63c77 | 7779 | || mode != ptr_mode) |
4ef7870a | 7780 | { |
eb698c58 RS |
7781 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7782 | subtarget, &op0, &op1, 0); | |
6e7727eb EB |
7783 | if (op0 == const0_rtx) |
7784 | return op1; | |
7785 | if (op1 == const0_rtx) | |
7786 | return op0; | |
4ef7870a EB |
7787 | goto binop2; |
7788 | } | |
bbf6f052 | 7789 | |
eb698c58 RS |
7790 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7791 | subtarget, &op0, &op1, modifier); | |
f0e9957a | 7792 | return simplify_gen_binary (PLUS, mode, op0, op1); |
bbf6f052 RK |
7793 | |
7794 | case MINUS_EXPR: | |
ea87523e RK |
7795 | /* For initializers, we are allowed to return a MINUS of two |
7796 | symbolic constants. Here we handle all cases when both operands | |
7797 | are constant. */ | |
bbf6f052 RK |
7798 | /* Handle difference of two symbolic constants, |
7799 | for the sake of an initializer. */ | |
7800 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
7801 | && really_constant_p (TREE_OPERAND (exp, 0)) | |
7802 | && really_constant_p (TREE_OPERAND (exp, 1))) | |
7803 | { | |
eb698c58 RS |
7804 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7805 | NULL_RTX, &op0, &op1, modifier); | |
ea87523e | 7806 | |
ea87523e RK |
7807 | /* If the last operand is a CONST_INT, use plus_constant of |
7808 | the negated constant. Else make the MINUS. */ | |
7809 | if (GET_CODE (op1) == CONST_INT) | |
7810 | return plus_constant (op0, - INTVAL (op1)); | |
7811 | else | |
38a448ca | 7812 | return gen_rtx_MINUS (mode, op0, op1); |
bbf6f052 | 7813 | } |
ae431183 | 7814 | |
91ce572a CC |
7815 | this_optab = ! unsignedp && flag_trapv |
7816 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7817 | ? subv_optab : sub_optab; | |
1717e19e UW |
7818 | |
7819 | /* No sense saving up arithmetic to be done | |
7820 | if it's all in the wrong mode to form part of an address. | |
7821 | And force_operand won't know whether to sign-extend or | |
7822 | zero-extend. */ | |
7823 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7824 | || mode != ptr_mode) | |
7825 | goto binop; | |
7826 | ||
eb698c58 RS |
7827 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7828 | subtarget, &op0, &op1, modifier); | |
1717e19e UW |
7829 | |
7830 | /* Convert A - const to A + (-const). */ | |
7831 | if (GET_CODE (op1) == CONST_INT) | |
7832 | { | |
7833 | op1 = negate_rtx (mode, op1); | |
f0e9957a | 7834 | return simplify_gen_binary (PLUS, mode, op0, op1); |
1717e19e UW |
7835 | } |
7836 | ||
7837 | goto binop2; | |
bbf6f052 RK |
7838 | |
7839 | case MULT_EXPR: | |
bbf6f052 RK |
7840 | /* If first operand is constant, swap them. |
7841 | Thus the following special case checks need only | |
7842 | check the second operand. */ | |
7843 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
7844 | { | |
b3694847 | 7845 | tree t1 = TREE_OPERAND (exp, 0); |
bbf6f052 RK |
7846 | TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); |
7847 | TREE_OPERAND (exp, 1) = t1; | |
7848 | } | |
7849 | ||
7850 | /* Attempt to return something suitable for generating an | |
7851 | indexed address, for machines that support that. */ | |
7852 | ||
88f63c77 | 7853 | if (modifier == EXPAND_SUM && mode == ptr_mode |
3b40e71b | 7854 | && host_integerp (TREE_OPERAND (exp, 1), 0)) |
bbf6f052 | 7855 | { |
48a5f2fa DJ |
7856 | tree exp1 = TREE_OPERAND (exp, 1); |
7857 | ||
921b3427 RK |
7858 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7859 | EXPAND_SUM); | |
bbf6f052 | 7860 | |
f8cfc6aa | 7861 | if (!REG_P (op0)) |
906c4e36 | 7862 | op0 = force_operand (op0, NULL_RTX); |
f8cfc6aa | 7863 | if (!REG_P (op0)) |
bbf6f052 RK |
7864 | op0 = copy_to_mode_reg (mode, op0); |
7865 | ||
48a5f2fa DJ |
7866 | return gen_rtx_MULT (mode, op0, |
7867 | gen_int_mode (tree_low_cst (exp1, 0), | |
7868 | TYPE_MODE (TREE_TYPE (exp1)))); | |
bbf6f052 RK |
7869 | } |
7870 | ||
8403445a AM |
7871 | if (modifier == EXPAND_STACK_PARM) |
7872 | target = 0; | |
7873 | ||
bbf6f052 RK |
7874 | /* Check for multiplying things that have been extended |
7875 | from a narrower type. If this machine supports multiplying | |
7876 | in that narrower type with a result in the desired type, | |
7877 | do it that way, and avoid the explicit type-conversion. */ | |
7878 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR | |
7879 | && TREE_CODE (type) == INTEGER_TYPE | |
7880 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7881 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
7882 | && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
7883 | && int_fits_type_p (TREE_OPERAND (exp, 1), | |
7884 | TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7885 | /* Don't use a widening multiply if a shift will do. */ | |
7886 | && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
906c4e36 | 7887 | > HOST_BITS_PER_WIDE_INT) |
bbf6f052 RK |
7888 | || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
7889 | || | |
7890 | (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8df83eae RK |
7891 | && (TYPE_PRECISION (TREE_TYPE |
7892 | (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7893 | == TYPE_PRECISION (TREE_TYPE | |
7894 | (TREE_OPERAND | |
7895 | (TREE_OPERAND (exp, 0), 0)))) | |
bbf6f052 RK |
7896 | /* If both operands are extended, they must either both |
7897 | be zero-extended or both be sign-extended. */ | |
8df83eae RK |
7898 | && (TYPE_UNSIGNED (TREE_TYPE |
7899 | (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7900 | == TYPE_UNSIGNED (TREE_TYPE | |
7901 | (TREE_OPERAND | |
7902 | (TREE_OPERAND (exp, 0), 0))))))) | |
bbf6f052 | 7903 | { |
888d65b5 RS |
7904 | tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); |
7905 | enum machine_mode innermode = TYPE_MODE (op0type); | |
8df83eae | 7906 | bool zextend_p = TYPE_UNSIGNED (op0type); |
888d65b5 RS |
7907 | optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; |
7908 | this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; | |
7909 | ||
b10af0c8 | 7910 | if (mode == GET_MODE_WIDER_MODE (innermode)) |
bbf6f052 | 7911 | { |
b10af0c8 TG |
7912 | if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
7913 | { | |
b10af0c8 | 7914 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) |
eb698c58 RS |
7915 | expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7916 | TREE_OPERAND (exp, 1), | |
7917 | NULL_RTX, &op0, &op1, 0); | |
b10af0c8 | 7918 | else |
eb698c58 RS |
7919 | expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7920 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7921 | NULL_RTX, &op0, &op1, 0); | |
b10af0c8 TG |
7922 | goto binop2; |
7923 | } | |
7924 | else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
7925 | && innermode == word_mode) | |
7926 | { | |
888d65b5 | 7927 | rtx htem, hipart; |
b10af0c8 TG |
7928 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7929 | NULL_RTX, VOIDmode, 0); | |
7930 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
8c118062 GK |
7931 | op1 = convert_modes (innermode, mode, |
7932 | expand_expr (TREE_OPERAND (exp, 1), | |
7933 | NULL_RTX, VOIDmode, 0), | |
7934 | unsignedp); | |
b10af0c8 TG |
7935 | else |
7936 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7937 | NULL_RTX, VOIDmode, 0); | |
7938 | temp = expand_binop (mode, other_optab, op0, op1, target, | |
7939 | unsignedp, OPTAB_LIB_WIDEN); | |
888d65b5 RS |
7940 | hipart = gen_highpart (innermode, temp); |
7941 | htem = expand_mult_highpart_adjust (innermode, hipart, | |
7942 | op0, op1, hipart, | |
7943 | zextend_p); | |
7944 | if (htem != hipart) | |
7945 | emit_move_insn (hipart, htem); | |
b10af0c8 TG |
7946 | return temp; |
7947 | } | |
bbf6f052 RK |
7948 | } |
7949 | } | |
eb698c58 RS |
7950 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7951 | subtarget, &op0, &op1, 0); | |
bbf6f052 RK |
7952 | return expand_mult (mode, op0, op1, target, unsignedp); |
7953 | ||
7954 | case TRUNC_DIV_EXPR: | |
7955 | case FLOOR_DIV_EXPR: | |
7956 | case CEIL_DIV_EXPR: | |
7957 | case ROUND_DIV_EXPR: | |
7958 | case EXACT_DIV_EXPR: | |
8403445a AM |
7959 | if (modifier == EXPAND_STACK_PARM) |
7960 | target = 0; | |
bbf6f052 RK |
7961 | /* Possible optimization: compute the dividend with EXPAND_SUM |
7962 | then if the divisor is constant can optimize the case | |
7963 | where some terms of the dividend have coeffs divisible by it. */ | |
eb698c58 RS |
7964 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7965 | subtarget, &op0, &op1, 0); | |
bbf6f052 RK |
7966 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
7967 | ||
7968 | case RDIV_EXPR: | |
b7e9703c JH |
7969 | /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving |
7970 | expensive divide. If not, combine will rebuild the original | |
7971 | computation. */ | |
7972 | if (flag_unsafe_math_optimizations && optimize && !optimize_size | |
ed7d44bc | 7973 | && TREE_CODE (type) == REAL_TYPE |
b7e9703c JH |
7974 | && !real_onep (TREE_OPERAND (exp, 0))) |
7975 | return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0), | |
7976 | build (RDIV_EXPR, type, | |
7977 | build_real (type, dconst1), | |
7978 | TREE_OPERAND (exp, 1))), | |
8e37cba8 | 7979 | target, tmode, modifier); |
ef89d648 | 7980 | this_optab = sdiv_optab; |
bbf6f052 RK |
7981 | goto binop; |
7982 | ||
7983 | case TRUNC_MOD_EXPR: | |
7984 | case FLOOR_MOD_EXPR: | |
7985 | case CEIL_MOD_EXPR: | |
7986 | case ROUND_MOD_EXPR: | |
8403445a AM |
7987 | if (modifier == EXPAND_STACK_PARM) |
7988 | target = 0; | |
eb698c58 RS |
7989 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7990 | subtarget, &op0, &op1, 0); | |
bbf6f052 RK |
7991 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
7992 | ||
7993 | case FIX_ROUND_EXPR: | |
7994 | case FIX_FLOOR_EXPR: | |
7995 | case FIX_CEIL_EXPR: | |
7996 | abort (); /* Not used for C. */ | |
7997 | ||
7998 | case FIX_TRUNC_EXPR: | |
906c4e36 | 7999 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
8403445a | 8000 | if (target == 0 || modifier == EXPAND_STACK_PARM) |
bbf6f052 RK |
8001 | target = gen_reg_rtx (mode); |
8002 | expand_fix (target, op0, unsignedp); | |
8003 | return target; | |
8004 | ||
8005 | case FLOAT_EXPR: | |
906c4e36 | 8006 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
8403445a | 8007 | if (target == 0 || modifier == EXPAND_STACK_PARM) |
bbf6f052 RK |
8008 | target = gen_reg_rtx (mode); |
8009 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
8010 | So give it the correct mode. With -O, cse will optimize this. */ | |
8011 | if (GET_MODE (op0) == VOIDmode) | |
8012 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8013 | op0); | |
8014 | expand_float (target, op0, | |
8df83eae | 8015 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 RK |
8016 | return target; |
8017 | ||
8018 | case NEGATE_EXPR: | |
5b22bee8 | 8019 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
8403445a AM |
8020 | if (modifier == EXPAND_STACK_PARM) |
8021 | target = 0; | |
91ce572a | 8022 | temp = expand_unop (mode, |
0fb7aeda KH |
8023 | ! unsignedp && flag_trapv |
8024 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
8025 | ? negv_optab : neg_optab, op0, target, 0); | |
bbf6f052 RK |
8026 | if (temp == 0) |
8027 | abort (); | |
8028 | return temp; | |
8029 | ||
8030 | case ABS_EXPR: | |
8031 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8403445a AM |
8032 | if (modifier == EXPAND_STACK_PARM) |
8033 | target = 0; | |
bbf6f052 | 8034 | |
11017cc7 | 8035 | /* ABS_EXPR is not valid for complex arguments. */ |
d6a5ac33 RK |
8036 | if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT |
8037 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) | |
11017cc7 | 8038 | abort (); |
2d7050fd | 8039 | |
bbf6f052 RK |
8040 | /* Unsigned abs is simply the operand. Testing here means we don't |
8041 | risk generating incorrect code below. */ | |
8df83eae | 8042 | if (TYPE_UNSIGNED (type)) |
bbf6f052 RK |
8043 | return op0; |
8044 | ||
91ce572a | 8045 | return expand_abs (mode, op0, target, unsignedp, |
e5e809f4 | 8046 | safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
bbf6f052 RK |
8047 | |
8048 | case MAX_EXPR: | |
8049 | case MIN_EXPR: | |
8050 | target = original_target; | |
8403445a AM |
8051 | if (target == 0 |
8052 | || modifier == EXPAND_STACK_PARM | |
3c0cb5de | 8053 | || (MEM_P (target) && MEM_VOLATILE_P (target)) |
d6a5ac33 | 8054 | || GET_MODE (target) != mode |
f8cfc6aa | 8055 | || (REG_P (target) |
bbf6f052 RK |
8056 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
8057 | target = gen_reg_rtx (mode); | |
eb698c58 RS |
8058 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
8059 | target, &op0, &op1, 0); | |
bbf6f052 RK |
8060 | |
8061 | /* First try to do it with a special MIN or MAX instruction. | |
8062 | If that does not win, use a conditional jump to select the proper | |
8063 | value. */ | |
288dc1ea | 8064 | this_optab = (unsignedp |
bbf6f052 RK |
8065 | ? (code == MIN_EXPR ? umin_optab : umax_optab) |
8066 | : (code == MIN_EXPR ? smin_optab : smax_optab)); | |
8067 | ||
8068 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
8069 | OPTAB_WIDEN); | |
8070 | if (temp != 0) | |
8071 | return temp; | |
8072 | ||
fa2981d8 JW |
8073 | /* At this point, a MEM target is no longer useful; we will get better |
8074 | code without it. */ | |
3a94c984 | 8075 | |
3c0cb5de | 8076 | if (MEM_P (target)) |
fa2981d8 JW |
8077 | target = gen_reg_rtx (mode); |
8078 | ||
e3be1116 RS |
8079 | /* If op1 was placed in target, swap op0 and op1. */ |
8080 | if (target != op0 && target == op1) | |
8081 | { | |
8082 | rtx tem = op0; | |
8083 | op0 = op1; | |
8084 | op1 = tem; | |
8085 | } | |
8086 | ||
ee456b1c RK |
8087 | if (target != op0) |
8088 | emit_move_insn (target, op0); | |
d6a5ac33 | 8089 | |
bbf6f052 | 8090 | op0 = gen_label_rtx (); |
d6a5ac33 | 8091 | |
f81497d9 RS |
8092 | /* If this mode is an integer too wide to compare properly, |
8093 | compare word by word. Rely on cse to optimize constant cases. */ | |
1eb8759b RH |
8094 | if (GET_MODE_CLASS (mode) == MODE_INT |
8095 | && ! can_compare_p (GE, mode, ccp_jump)) | |
bbf6f052 | 8096 | { |
f81497d9 | 8097 | if (code == MAX_EXPR) |
288dc1ea EB |
8098 | do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1, |
8099 | NULL_RTX, op0); | |
bbf6f052 | 8100 | else |
288dc1ea EB |
8101 | do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target, |
8102 | NULL_RTX, op0); | |
bbf6f052 | 8103 | } |
f81497d9 RS |
8104 | else |
8105 | { | |
b30f05db | 8106 | do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, |
288dc1ea | 8107 | unsignedp, mode, NULL_RTX, NULL_RTX, op0); |
f81497d9 | 8108 | } |
b30f05db | 8109 | emit_move_insn (target, op1); |
bbf6f052 RK |
8110 | emit_label (op0); |
8111 | return target; | |
8112 | ||
bbf6f052 RK |
8113 | case BIT_NOT_EXPR: |
8114 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8403445a AM |
8115 | if (modifier == EXPAND_STACK_PARM) |
8116 | target = 0; | |
bbf6f052 RK |
8117 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); |
8118 | if (temp == 0) | |
8119 | abort (); | |
8120 | return temp; | |
8121 | ||
d6a5ac33 RK |
8122 | /* ??? Can optimize bitwise operations with one arg constant. |
8123 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
8124 | and (a bitwise1 b) bitwise2 b (etc) | |
8125 | but that is probably not worth while. */ | |
8126 | ||
8127 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two | |
8128 | boolean values when we want in all cases to compute both of them. In | |
8129 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
8130 | as actual zero-or-1 values and then bitwise anding. In cases where | |
8131 | there cannot be any side effects, better code would be made by | |
8132 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
8133 | how to recognize those cases. */ | |
8134 | ||
bbf6f052 RK |
8135 | case TRUTH_AND_EXPR: |
8136 | case BIT_AND_EXPR: | |
8137 | this_optab = and_optab; | |
8138 | goto binop; | |
8139 | ||
bbf6f052 RK |
8140 | case TRUTH_OR_EXPR: |
8141 | case BIT_IOR_EXPR: | |
8142 | this_optab = ior_optab; | |
8143 | goto binop; | |
8144 | ||
874726a8 | 8145 | case TRUTH_XOR_EXPR: |
bbf6f052 RK |
8146 | case BIT_XOR_EXPR: |
8147 | this_optab = xor_optab; | |
8148 | goto binop; | |
8149 | ||
8150 | case LSHIFT_EXPR: | |
8151 | case RSHIFT_EXPR: | |
8152 | case LROTATE_EXPR: | |
8153 | case RROTATE_EXPR: | |
e5e809f4 | 8154 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 | 8155 | subtarget = 0; |
8403445a AM |
8156 | if (modifier == EXPAND_STACK_PARM) |
8157 | target = 0; | |
bbf6f052 RK |
8158 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
8159 | return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, | |
8160 | unsignedp); | |
8161 | ||
d6a5ac33 RK |
8162 | /* Could determine the answer when only additive constants differ. Also, |
8163 | the addition of one can be handled by changing the condition. */ | |
bbf6f052 RK |
8164 | case LT_EXPR: |
8165 | case LE_EXPR: | |
8166 | case GT_EXPR: | |
8167 | case GE_EXPR: | |
8168 | case EQ_EXPR: | |
8169 | case NE_EXPR: | |
1eb8759b RH |
8170 | case UNORDERED_EXPR: |
8171 | case ORDERED_EXPR: | |
8172 | case UNLT_EXPR: | |
8173 | case UNLE_EXPR: | |
8174 | case UNGT_EXPR: | |
8175 | case UNGE_EXPR: | |
8176 | case UNEQ_EXPR: | |
d1a7edaf | 8177 | case LTGT_EXPR: |
8403445a AM |
8178 | temp = do_store_flag (exp, |
8179 | modifier != EXPAND_STACK_PARM ? target : NULL_RTX, | |
8180 | tmode != VOIDmode ? tmode : mode, 0); | |
bbf6f052 RK |
8181 | if (temp != 0) |
8182 | return temp; | |
d6a5ac33 | 8183 | |
0f41302f | 8184 | /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
bbf6f052 RK |
8185 | if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
8186 | && original_target | |
f8cfc6aa | 8187 | && REG_P (original_target) |
bbf6f052 RK |
8188 | && (GET_MODE (original_target) |
8189 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
8190 | { | |
d6a5ac33 RK |
8191 | temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
8192 | VOIDmode, 0); | |
8193 | ||
c0a3eeac UW |
8194 | /* If temp is constant, we can just compute the result. */ |
8195 | if (GET_CODE (temp) == CONST_INT) | |
8196 | { | |
8197 | if (INTVAL (temp) != 0) | |
8198 | emit_move_insn (target, const1_rtx); | |
8199 | else | |
8200 | emit_move_insn (target, const0_rtx); | |
8201 | ||
8202 | return target; | |
8203 | } | |
8204 | ||
bbf6f052 | 8205 | if (temp != original_target) |
c0a3eeac UW |
8206 | { |
8207 | enum machine_mode mode1 = GET_MODE (temp); | |
8208 | if (mode1 == VOIDmode) | |
8209 | mode1 = tmode != VOIDmode ? tmode : mode; | |
0fb7aeda | 8210 | |
c0a3eeac UW |
8211 | temp = copy_to_mode_reg (mode1, temp); |
8212 | } | |
d6a5ac33 | 8213 | |
bbf6f052 | 8214 | op1 = gen_label_rtx (); |
c5d5d461 | 8215 | emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
a06ef755 | 8216 | GET_MODE (temp), unsignedp, op1); |
bbf6f052 RK |
8217 | emit_move_insn (temp, const1_rtx); |
8218 | emit_label (op1); | |
8219 | return temp; | |
8220 | } | |
d6a5ac33 | 8221 | |
bbf6f052 RK |
8222 | /* If no set-flag instruction, must generate a conditional |
8223 | store into a temporary variable. Drop through | |
8224 | and handle this like && and ||. */ | |
8225 | ||
8226 | case TRUTH_ANDIF_EXPR: | |
8227 | case TRUTH_ORIF_EXPR: | |
e44842fe | 8228 | if (! ignore |
8403445a AM |
8229 | && (target == 0 |
8230 | || modifier == EXPAND_STACK_PARM | |
8231 | || ! safe_from_p (target, exp, 1) | |
e44842fe RK |
8232 | /* Make sure we don't have a hard reg (such as function's return |
8233 | value) live across basic blocks, if not optimizing. */ | |
f8cfc6aa | 8234 | || (!optimize && REG_P (target) |
e44842fe | 8235 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) |
bbf6f052 | 8236 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
e44842fe RK |
8237 | |
8238 | if (target) | |
8239 | emit_clr_insn (target); | |
8240 | ||
bbf6f052 RK |
8241 | op1 = gen_label_rtx (); |
8242 | jumpifnot (exp, op1); | |
e44842fe RK |
8243 | |
8244 | if (target) | |
8245 | emit_0_to_1_insn (target); | |
8246 | ||
bbf6f052 | 8247 | emit_label (op1); |
e44842fe | 8248 | return ignore ? const0_rtx : target; |
bbf6f052 RK |
8249 | |
8250 | case TRUTH_NOT_EXPR: | |
8403445a AM |
8251 | if (modifier == EXPAND_STACK_PARM) |
8252 | target = 0; | |
bbf6f052 RK |
8253 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); |
8254 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
8255 | only with operands that are always zero or one. */ | |
906c4e36 | 8256 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
bbf6f052 RK |
8257 | target, 1, OPTAB_LIB_WIDEN); |
8258 | if (temp == 0) | |
8259 | abort (); | |
8260 | return temp; | |
8261 | ||
8262 | case COMPOUND_EXPR: | |
8263 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
8264 | emit_queue (); | |
0fab64a3 MM |
8265 | return expand_expr_real (TREE_OPERAND (exp, 1), |
8266 | (ignore ? const0_rtx : target), | |
8267 | VOIDmode, modifier, alt_rtl); | |
bbf6f052 | 8268 | |
6de9cd9a DN |
8269 | case STATEMENT_LIST: |
8270 | { | |
8271 | tree_stmt_iterator iter; | |
8272 | ||
8273 | if (!ignore) | |
8274 | abort (); | |
8275 | ||
8276 | for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) | |
8277 | expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); | |
8278 | } | |
8279 | return const0_rtx; | |
8280 | ||
bbf6f052 | 8281 | case COND_EXPR: |
6de9cd9a DN |
8282 | /* If it's void, we don't need to worry about computing a value. */ |
8283 | if (VOID_TYPE_P (TREE_TYPE (exp))) | |
8284 | { | |
8285 | tree pred = TREE_OPERAND (exp, 0); | |
8286 | tree then_ = TREE_OPERAND (exp, 1); | |
8287 | tree else_ = TREE_OPERAND (exp, 2); | |
8288 | ||
8289 | /* If we do not have any pending cleanups or stack_levels | |
8290 | to restore, and at least one arm of the COND_EXPR is a | |
8291 | GOTO_EXPR to a local label, then we can emit more efficient | |
8292 | code by using jumpif/jumpifnot instead of the 'if' machinery. */ | |
8293 | if (! optimize | |
8294 | || containing_blocks_have_cleanups_or_stack_level ()) | |
8295 | ; | |
8296 | else if (TREE_CODE (then_) == GOTO_EXPR | |
8297 | && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) | |
8298 | { | |
8299 | jumpif (pred, label_rtx (GOTO_DESTINATION (then_))); | |
8300 | return expand_expr (else_, const0_rtx, VOIDmode, 0); | |
8301 | } | |
8302 | else if (TREE_CODE (else_) == GOTO_EXPR | |
8303 | && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) | |
8304 | { | |
8305 | jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_))); | |
8306 | return expand_expr (then_, const0_rtx, VOIDmode, 0); | |
8307 | } | |
8308 | ||
8309 | /* Just use the 'if' machinery. */ | |
8310 | expand_start_cond (pred, 0); | |
8311 | start_cleanup_deferral (); | |
8312 | expand_expr (then_, const0_rtx, VOIDmode, 0); | |
8313 | ||
8314 | exp = else_; | |
8315 | ||
8316 | /* Iterate over 'else if's instead of recursing. */ | |
8317 | for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2)) | |
8318 | { | |
8319 | expand_start_else (); | |
8320 | if (EXPR_HAS_LOCATION (exp)) | |
8321 | { | |
8322 | emit_line_note (EXPR_LOCATION (exp)); | |
1ea463a2 | 8323 | record_block_change (TREE_BLOCK (exp)); |
6de9cd9a DN |
8324 | } |
8325 | expand_elseif (TREE_OPERAND (exp, 0)); | |
8326 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0); | |
8327 | } | |
8328 | /* Don't emit the jump and label if there's no 'else' clause. */ | |
8329 | if (TREE_SIDE_EFFECTS (exp)) | |
8330 | { | |
8331 | expand_start_else (); | |
8332 | expand_expr (exp, const0_rtx, VOIDmode, 0); | |
8333 | } | |
8334 | end_cleanup_deferral (); | |
8335 | expand_end_cond (); | |
8336 | return const0_rtx; | |
8337 | } | |
8338 | ||
ac01eace RK |
8339 | /* If we would have a "singleton" (see below) were it not for a |
8340 | conversion in each arm, bring that conversion back out. */ | |
8341 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8342 | && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR | |
8343 | && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) | |
8344 | == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0)))) | |
8345 | { | |
d6edb99e ZW |
8346 | tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); |
8347 | tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); | |
8348 | ||
8349 | if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2' | |
8350 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8351 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2' | |
8352 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)) | |
8353 | || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1' | |
8354 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8355 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1' | |
8356 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))) | |
ac01eace | 8357 | return expand_expr (build1 (NOP_EXPR, type, |
d6edb99e | 8358 | build (COND_EXPR, TREE_TYPE (iftrue), |
ac01eace | 8359 | TREE_OPERAND (exp, 0), |
d6edb99e | 8360 | iftrue, iffalse)), |
ac01eace RK |
8361 | target, tmode, modifier); |
8362 | } | |
8363 | ||
bbf6f052 RK |
8364 | { |
8365 | /* Note that COND_EXPRs whose type is a structure or union | |
8366 | are required to be constructed to contain assignments of | |
8367 | a temporary variable, so that we can evaluate them here | |
8368 | for side effect only. If type is void, we must do likewise. */ | |
8369 | ||
8370 | /* If an arm of the branch requires a cleanup, | |
8371 | only that cleanup is performed. */ | |
8372 | ||
8373 | tree singleton = 0; | |
8374 | tree binary_op = 0, unary_op = 0; | |
bbf6f052 RK |
8375 | |
8376 | /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and | |
8377 | convert it to our mode, if necessary. */ | |
8378 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
8379 | && integer_zerop (TREE_OPERAND (exp, 2)) | |
8380 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') | |
8381 | { | |
dd27116b RK |
8382 | if (ignore) |
8383 | { | |
8384 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, | |
37a08a29 | 8385 | modifier); |
dd27116b RK |
8386 | return const0_rtx; |
8387 | } | |
8388 | ||
8403445a AM |
8389 | if (modifier == EXPAND_STACK_PARM) |
8390 | target = 0; | |
37a08a29 | 8391 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier); |
bbf6f052 RK |
8392 | if (GET_MODE (op0) == mode) |
8393 | return op0; | |
d6a5ac33 | 8394 | |
bbf6f052 RK |
8395 | if (target == 0) |
8396 | target = gen_reg_rtx (mode); | |
8397 | convert_move (target, op0, unsignedp); | |
8398 | return target; | |
8399 | } | |
8400 | ||
ac01eace RK |
8401 | /* Check for X ? A + B : A. If we have this, we can copy A to the |
8402 | output and conditionally add B. Similarly for unary operations. | |
8403 | Don't do this if X has side-effects because those side effects | |
8404 | might affect A or B and the "?" operation is a sequence point in | |
8405 | ANSI. (operand_equal_p tests for side effects.) */ | |
bbf6f052 RK |
8406 | |
8407 | if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2' | |
8408 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8409 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8410 | singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1); | |
8411 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2' | |
8412 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8413 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8414 | singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2); | |
8415 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1' | |
8416 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8417 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8418 | singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1); | |
8419 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1' | |
8420 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8421 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8422 | singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2); | |
8423 | ||
01c8a7c8 RK |
8424 | /* If we are not to produce a result, we have no target. Otherwise, |
8425 | if a target was specified use it; it will not be used as an | |
3a94c984 | 8426 | intermediate target unless it is safe. If no target, use a |
01c8a7c8 RK |
8427 | temporary. */ |
8428 | ||
8429 | if (ignore) | |
8430 | temp = 0; | |
8403445a AM |
8431 | else if (modifier == EXPAND_STACK_PARM) |
8432 | temp = assign_temp (type, 0, 0, 1); | |
01c8a7c8 | 8433 | else if (original_target |
e5e809f4 | 8434 | && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) |
f8cfc6aa | 8435 | || (singleton && REG_P (original_target) |
01c8a7c8 RK |
8436 | && REGNO (original_target) >= FIRST_PSEUDO_REGISTER |
8437 | && original_target == var_rtx (singleton))) | |
8438 | && GET_MODE (original_target) == mode | |
7c00d1fe RK |
8439 | #ifdef HAVE_conditional_move |
8440 | && (! can_conditionally_move_p (mode) | |
f8cfc6aa | 8441 | || REG_P (original_target) |
7c00d1fe RK |
8442 | || TREE_ADDRESSABLE (type)) |
8443 | #endif | |
3c0cb5de | 8444 | && (!MEM_P (original_target) |
8125d7e9 | 8445 | || TREE_ADDRESSABLE (type))) |
01c8a7c8 RK |
8446 | temp = original_target; |
8447 | else if (TREE_ADDRESSABLE (type)) | |
8448 | abort (); | |
8449 | else | |
8450 | temp = assign_temp (type, 0, 0, 1); | |
8451 | ||
ac01eace RK |
8452 | /* If we had X ? A + C : A, with C a constant power of 2, and we can |
8453 | do the test of X as a store-flag operation, do this as | |
8454 | A + ((X != 0) << log C). Similarly for other simple binary | |
8455 | operators. Only do for C == 1 if BRANCH_COST is low. */ | |
dd27116b | 8456 | if (temp && singleton && binary_op |
bbf6f052 RK |
8457 | && (TREE_CODE (binary_op) == PLUS_EXPR |
8458 | || TREE_CODE (binary_op) == MINUS_EXPR | |
8459 | || TREE_CODE (binary_op) == BIT_IOR_EXPR | |
9fbd9f58 | 8460 | || TREE_CODE (binary_op) == BIT_XOR_EXPR) |
ac01eace RK |
8461 | && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1)) |
8462 | : integer_onep (TREE_OPERAND (binary_op, 1))) | |
bbf6f052 RK |
8463 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') |
8464 | { | |
8465 | rtx result; | |
61f6c84f | 8466 | tree cond; |
91ce572a | 8467 | optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR |
0fb7aeda KH |
8468 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) |
8469 | ? addv_optab : add_optab) | |
8470 | : TREE_CODE (binary_op) == MINUS_EXPR | |
8471 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8472 | ? subv_optab : sub_optab) | |
8473 | : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab | |
8474 | : xor_optab); | |
bbf6f052 | 8475 | |
61f6c84f | 8476 | /* If we had X ? A : A + 1, do this as A + (X == 0). */ |
bbf6f052 | 8477 | if (singleton == TREE_OPERAND (exp, 1)) |
61f6c84f JJ |
8478 | cond = invert_truthvalue (TREE_OPERAND (exp, 0)); |
8479 | else | |
8480 | cond = TREE_OPERAND (exp, 0); | |
bbf6f052 | 8481 | |
61f6c84f JJ |
8482 | result = do_store_flag (cond, (safe_from_p (temp, singleton, 1) |
8483 | ? temp : NULL_RTX), | |
bbf6f052 RK |
8484 | mode, BRANCH_COST <= 1); |
8485 | ||
ac01eace RK |
8486 | if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1))) |
8487 | result = expand_shift (LSHIFT_EXPR, mode, result, | |
8488 | build_int_2 (tree_log2 | |
8489 | (TREE_OPERAND | |
8490 | (binary_op, 1)), | |
8491 | 0), | |
e5e809f4 | 8492 | (safe_from_p (temp, singleton, 1) |
ac01eace RK |
8493 | ? temp : NULL_RTX), 0); |
8494 | ||
bbf6f052 RK |
8495 | if (result) |
8496 | { | |
906c4e36 | 8497 | op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8498 | return expand_binop (mode, boptab, op1, result, temp, |
8499 | unsignedp, OPTAB_LIB_WIDEN); | |
8500 | } | |
bbf6f052 | 8501 | } |
3a94c984 | 8502 | |
dabf8373 | 8503 | do_pending_stack_adjust (); |
bbf6f052 RK |
8504 | NO_DEFER_POP; |
8505 | op0 = gen_label_rtx (); | |
8506 | ||
8507 | if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))) | |
8508 | { | |
8509 | if (temp != 0) | |
8510 | { | |
8511 | /* If the target conflicts with the other operand of the | |
8512 | binary op, we can't use it. Also, we can't use the target | |
8513 | if it is a hard register, because evaluating the condition | |
8514 | might clobber it. */ | |
8515 | if ((binary_op | |
e5e809f4 | 8516 | && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1)) |
f8cfc6aa | 8517 | || (REG_P (temp) |
bbf6f052 RK |
8518 | && REGNO (temp) < FIRST_PSEUDO_REGISTER)) |
8519 | temp = gen_reg_rtx (mode); | |
8403445a AM |
8520 | store_expr (singleton, temp, |
8521 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
bbf6f052 RK |
8522 | } |
8523 | else | |
906c4e36 | 8524 | expand_expr (singleton, |
2937cf87 | 8525 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8526 | if (singleton == TREE_OPERAND (exp, 1)) |
8527 | jumpif (TREE_OPERAND (exp, 0), op0); | |
8528 | else | |
8529 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
8530 | ||
956d6950 | 8531 | start_cleanup_deferral (); |
bbf6f052 RK |
8532 | if (binary_op && temp == 0) |
8533 | /* Just touch the other operand. */ | |
8534 | expand_expr (TREE_OPERAND (binary_op, 1), | |
906c4e36 | 8535 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8536 | else if (binary_op) |
8537 | store_expr (build (TREE_CODE (binary_op), type, | |
8538 | make_tree (type, temp), | |
8539 | TREE_OPERAND (binary_op, 1)), | |
8403445a | 8540 | temp, modifier == EXPAND_STACK_PARM ? 2 : 0); |
bbf6f052 RK |
8541 | else |
8542 | store_expr (build1 (TREE_CODE (unary_op), type, | |
8543 | make_tree (type, temp)), | |
8403445a | 8544 | temp, modifier == EXPAND_STACK_PARM ? 2 : 0); |
bbf6f052 | 8545 | op1 = op0; |
bbf6f052 | 8546 | } |
bbf6f052 RK |
8547 | /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any |
8548 | comparison operator. If we have one of these cases, set the | |
8549 | output to A, branch on A (cse will merge these two references), | |
8550 | then set the output to FOO. */ | |
8551 | else if (temp | |
8552 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8553 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8554 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8555 | TREE_OPERAND (exp, 1), 0) | |
e9a25f70 JL |
8556 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8557 | || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR) | |
e5e809f4 | 8558 | && safe_from_p (temp, TREE_OPERAND (exp, 2), 1)) |
bbf6f052 | 8559 | { |
f8cfc6aa | 8560 | if (REG_P (temp) |
3a94c984 | 8561 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) |
bbf6f052 | 8562 | temp = gen_reg_rtx (mode); |
8403445a AM |
8563 | store_expr (TREE_OPERAND (exp, 1), temp, |
8564 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
bbf6f052 | 8565 | jumpif (TREE_OPERAND (exp, 0), op0); |
5dab5552 | 8566 | |
956d6950 | 8567 | start_cleanup_deferral (); |
c37b68d4 RS |
8568 | if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) |
8569 | store_expr (TREE_OPERAND (exp, 2), temp, | |
8570 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
8571 | else | |
8572 | expand_expr (TREE_OPERAND (exp, 2), | |
8573 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
bbf6f052 RK |
8574 | op1 = op0; |
8575 | } | |
8576 | else if (temp | |
8577 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8578 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8579 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8580 | TREE_OPERAND (exp, 2), 0) | |
e9a25f70 JL |
8581 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8582 | || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR) | |
e5e809f4 | 8583 | && safe_from_p (temp, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 | 8584 | { |
f8cfc6aa | 8585 | if (REG_P (temp) |
3a94c984 | 8586 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) |
bbf6f052 | 8587 | temp = gen_reg_rtx (mode); |
8403445a AM |
8588 | store_expr (TREE_OPERAND (exp, 2), temp, |
8589 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
bbf6f052 | 8590 | jumpifnot (TREE_OPERAND (exp, 0), op0); |
5dab5552 | 8591 | |
956d6950 | 8592 | start_cleanup_deferral (); |
c37b68d4 RS |
8593 | if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) |
8594 | store_expr (TREE_OPERAND (exp, 1), temp, | |
8595 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
8596 | else | |
8597 | expand_expr (TREE_OPERAND (exp, 1), | |
8598 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
bbf6f052 RK |
8599 | op1 = op0; |
8600 | } | |
8601 | else | |
8602 | { | |
8603 | op1 = gen_label_rtx (); | |
8604 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8605 | |
956d6950 | 8606 | start_cleanup_deferral (); |
3a94c984 | 8607 | |
2ac84cfe | 8608 | /* One branch of the cond can be void, if it never returns. For |
3a94c984 | 8609 | example A ? throw : E */ |
2ac84cfe | 8610 | if (temp != 0 |
3a94c984 | 8611 | && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) |
8403445a AM |
8612 | store_expr (TREE_OPERAND (exp, 1), temp, |
8613 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
bbf6f052 | 8614 | else |
906c4e36 RK |
8615 | expand_expr (TREE_OPERAND (exp, 1), |
8616 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
956d6950 | 8617 | end_cleanup_deferral (); |
bbf6f052 RK |
8618 | emit_queue (); |
8619 | emit_jump_insn (gen_jump (op1)); | |
8620 | emit_barrier (); | |
8621 | emit_label (op0); | |
956d6950 | 8622 | start_cleanup_deferral (); |
2ac84cfe | 8623 | if (temp != 0 |
3a94c984 | 8624 | && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) |
8403445a AM |
8625 | store_expr (TREE_OPERAND (exp, 2), temp, |
8626 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
bbf6f052 | 8627 | else |
906c4e36 RK |
8628 | expand_expr (TREE_OPERAND (exp, 2), |
8629 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
bbf6f052 RK |
8630 | } |
8631 | ||
956d6950 | 8632 | end_cleanup_deferral (); |
bbf6f052 RK |
8633 | |
8634 | emit_queue (); | |
8635 | emit_label (op1); | |
8636 | OK_DEFER_POP; | |
5dab5552 | 8637 | |
bbf6f052 RK |
8638 | return temp; |
8639 | } | |
8640 | ||
8641 | case TARGET_EXPR: | |
8642 | { | |
8643 | /* Something needs to be initialized, but we didn't know | |
8644 | where that thing was when building the tree. For example, | |
8645 | it could be the return value of a function, or a parameter | |
8646 | to a function which lays down in the stack, or a temporary | |
8647 | variable which must be passed by reference. | |
8648 | ||
8649 | We guarantee that the expression will either be constructed | |
8650 | or copied into our original target. */ | |
8651 | ||
8652 | tree slot = TREE_OPERAND (exp, 0); | |
2a888d4c | 8653 | tree cleanups = NULL_TREE; |
5c062816 | 8654 | tree exp1; |
bbf6f052 RK |
8655 | |
8656 | if (TREE_CODE (slot) != VAR_DECL) | |
8657 | abort (); | |
8658 | ||
9c51f375 RK |
8659 | if (! ignore) |
8660 | target = original_target; | |
8661 | ||
6fbfac92 JM |
8662 | /* Set this here so that if we get a target that refers to a |
8663 | register variable that's already been used, put_reg_into_stack | |
3a94c984 | 8664 | knows that it should fix up those uses. */ |
6fbfac92 JM |
8665 | TREE_USED (slot) = 1; |
8666 | ||
bbf6f052 RK |
8667 | if (target == 0) |
8668 | { | |
19e7881c | 8669 | if (DECL_RTL_SET_P (slot)) |
ac993f4f MS |
8670 | { |
8671 | target = DECL_RTL (slot); | |
5c062816 | 8672 | /* If we have already expanded the slot, so don't do |
ac993f4f | 8673 | it again. (mrs) */ |
5c062816 MS |
8674 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8675 | return target; | |
ac993f4f | 8676 | } |
bbf6f052 RK |
8677 | else |
8678 | { | |
e9a25f70 | 8679 | target = assign_temp (type, 2, 0, 1); |
19e7881c | 8680 | SET_DECL_RTL (slot, target); |
bbf6f052 | 8681 | |
e287fd6e RK |
8682 | /* Since SLOT is not known to the called function |
8683 | to belong to its stack frame, we must build an explicit | |
8684 | cleanup. This case occurs when we must build up a reference | |
8685 | to pass the reference as an argument. In this case, | |
8686 | it is very likely that such a reference need not be | |
8687 | built here. */ | |
8688 | ||
8689 | if (TREE_OPERAND (exp, 2) == 0) | |
c88770e9 | 8690 | TREE_OPERAND (exp, 2) |
ae2bcd98 | 8691 | = lang_hooks.maybe_build_cleanup (slot); |
2a888d4c | 8692 | cleanups = TREE_OPERAND (exp, 2); |
e287fd6e | 8693 | } |
bbf6f052 RK |
8694 | } |
8695 | else | |
8696 | { | |
8697 | /* This case does occur, when expanding a parameter which | |
8698 | needs to be constructed on the stack. The target | |
8699 | is the actual stack address that we want to initialize. | |
8700 | The function we call will perform the cleanup in this case. */ | |
8701 | ||
8c042b47 RS |
8702 | /* If we have already assigned it space, use that space, |
8703 | not target that we were passed in, as our target | |
8704 | parameter is only a hint. */ | |
19e7881c | 8705 | if (DECL_RTL_SET_P (slot)) |
3a94c984 KH |
8706 | { |
8707 | target = DECL_RTL (slot); | |
8708 | /* If we have already expanded the slot, so don't do | |
8c042b47 | 8709 | it again. (mrs) */ |
3a94c984 KH |
8710 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8711 | return target; | |
8c042b47 | 8712 | } |
21002281 | 8713 | else |
8fff4fc1 | 8714 | SET_DECL_RTL (slot, target); |
bbf6f052 RK |
8715 | } |
8716 | ||
4847c938 | 8717 | exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1); |
5c062816 MS |
8718 | /* Mark it as expanded. */ |
8719 | TREE_OPERAND (exp, 1) = NULL_TREE; | |
8720 | ||
2692eb7d JM |
8721 | if (VOID_TYPE_P (TREE_TYPE (exp1))) |
8722 | /* If the initializer is void, just expand it; it will initialize | |
8723 | the object directly. */ | |
8724 | expand_expr (exp1, const0_rtx, VOIDmode, 0); | |
8725 | else | |
8726 | store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0); | |
61d6b1cc | 8727 | |
659e5a7a | 8728 | expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp)); |
3a94c984 | 8729 | |
41531e5b | 8730 | return target; |
bbf6f052 RK |
8731 | } |
8732 | ||
8733 | case INIT_EXPR: | |
8734 | { | |
8735 | tree lhs = TREE_OPERAND (exp, 0); | |
8736 | tree rhs = TREE_OPERAND (exp, 1); | |
bbf6f052 | 8737 | |
b90f141a | 8738 | temp = expand_assignment (lhs, rhs, ! ignore); |
bbf6f052 RK |
8739 | return temp; |
8740 | } | |
8741 | ||
8742 | case MODIFY_EXPR: | |
8743 | { | |
8744 | /* If lhs is complex, expand calls in rhs before computing it. | |
6d0a3f67 NS |
8745 | That's so we don't compute a pointer and save it over a |
8746 | call. If lhs is simple, compute it first so we can give it | |
8747 | as a target if the rhs is just a call. This avoids an | |
8748 | extra temp and copy and that prevents a partial-subsumption | |
8749 | which makes bad code. Actually we could treat | |
8750 | component_ref's of vars like vars. */ | |
bbf6f052 RK |
8751 | |
8752 | tree lhs = TREE_OPERAND (exp, 0); | |
8753 | tree rhs = TREE_OPERAND (exp, 1); | |
bbf6f052 RK |
8754 | |
8755 | temp = 0; | |
8756 | ||
bbf6f052 RK |
8757 | /* Check for |= or &= of a bitfield of size one into another bitfield |
8758 | of size 1. In this case, (unless we need the result of the | |
8759 | assignment) we can do this more efficiently with a | |
8760 | test followed by an assignment, if necessary. | |
8761 | ||
8762 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
8763 | things change so we do, this code should be enhanced to | |
8764 | support it. */ | |
8765 | if (ignore | |
8766 | && TREE_CODE (lhs) == COMPONENT_REF | |
8767 | && (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8768 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
8769 | && TREE_OPERAND (rhs, 0) == lhs | |
8770 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
05bccae2 RK |
8771 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
8772 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
bbf6f052 RK |
8773 | { |
8774 | rtx label = gen_label_rtx (); | |
8775 | ||
8776 | do_jump (TREE_OPERAND (rhs, 1), | |
8777 | TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0, | |
8778 | TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0); | |
8779 | expand_assignment (lhs, convert (TREE_TYPE (rhs), | |
8780 | (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8781 | ? integer_one_node | |
8782 | : integer_zero_node)), | |
b90f141a | 8783 | 0); |
e7c33f54 | 8784 | do_pending_stack_adjust (); |
bbf6f052 RK |
8785 | emit_label (label); |
8786 | return const0_rtx; | |
8787 | } | |
8788 | ||
b90f141a | 8789 | temp = expand_assignment (lhs, rhs, ! ignore); |
0fb7aeda | 8790 | |
bbf6f052 RK |
8791 | return temp; |
8792 | } | |
8793 | ||
6e7f84a7 APB |
8794 | case RETURN_EXPR: |
8795 | if (!TREE_OPERAND (exp, 0)) | |
8796 | expand_null_return (); | |
8797 | else | |
8798 | expand_return (TREE_OPERAND (exp, 0)); | |
8799 | return const0_rtx; | |
8800 | ||
bbf6f052 RK |
8801 | case PREINCREMENT_EXPR: |
8802 | case PREDECREMENT_EXPR: | |
7b8b9722 | 8803 | return expand_increment (exp, 0, ignore); |
bbf6f052 RK |
8804 | |
8805 | case POSTINCREMENT_EXPR: | |
8806 | case POSTDECREMENT_EXPR: | |
8807 | /* Faster to treat as pre-increment if result is not used. */ | |
7b8b9722 | 8808 | return expand_increment (exp, ! ignore, ignore); |
bbf6f052 RK |
8809 | |
8810 | case ADDR_EXPR: | |
8403445a AM |
8811 | if (modifier == EXPAND_STACK_PARM) |
8812 | target = 0; | |
682ba3a6 RK |
8813 | /* If we are taking the address of something erroneous, just |
8814 | return a zero. */ | |
6de9cd9a | 8815 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) |
682ba3a6 | 8816 | return const0_rtx; |
d6b6783b RK |
8817 | /* If we are taking the address of a constant and are at the |
8818 | top level, we have to use output_constant_def since we can't | |
8819 | call force_const_mem at top level. */ | |
8820 | else if (cfun == 0 | |
8821 | && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR | |
8822 | || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) | |
8823 | == 'c'))) | |
8824 | op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0); | |
bbf6f052 RK |
8825 | else |
8826 | { | |
e287fd6e RK |
8827 | /* We make sure to pass const0_rtx down if we came in with |
8828 | ignore set, to avoid doing the cleanups twice for something. */ | |
8829 | op0 = expand_expr (TREE_OPERAND (exp, 0), | |
8830 | ignore ? const0_rtx : NULL_RTX, VOIDmode, | |
bbf6f052 RK |
8831 | (modifier == EXPAND_INITIALIZER |
8832 | ? modifier : EXPAND_CONST_ADDRESS)); | |
896102d0 | 8833 | |
119af78a RK |
8834 | /* If we are going to ignore the result, OP0 will have been set |
8835 | to const0_rtx, so just return it. Don't get confused and | |
8836 | think we are taking the address of the constant. */ | |
8837 | if (ignore) | |
8838 | return op0; | |
8839 | ||
73b7f58c BS |
8840 | /* Pass 1 for MODIFY, so that protect_from_queue doesn't get |
8841 | clever and returns a REG when given a MEM. */ | |
8842 | op0 = protect_from_queue (op0, 1); | |
3539e816 | 8843 | |
c5c76735 JL |
8844 | /* We would like the object in memory. If it is a constant, we can |
8845 | have it be statically allocated into memory. For a non-constant, | |
8846 | we need to allocate some memory and store the value into it. */ | |
896102d0 RK |
8847 | |
8848 | if (CONSTANT_P (op0)) | |
8849 | op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8850 | op0); | |
f8cfc6aa | 8851 | else if (REG_P (op0) || GET_CODE (op0) == SUBREG |
8fff4fc1 RH |
8852 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL |
8853 | || GET_CODE (op0) == LO_SUM) | |
896102d0 | 8854 | { |
82c82743 RH |
8855 | /* If this object is in a register, it can't be BLKmode. */ |
8856 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
8857 | rtx memloc = assign_temp (inner_type, 1, 1, 1); | |
8858 | ||
8859 | if (GET_CODE (op0) == PARALLEL) | |
8860 | /* Handle calls that pass values in multiple | |
8861 | non-contiguous locations. The Irix 6 ABI has examples | |
8862 | of this. */ | |
8863 | emit_group_store (memloc, op0, inner_type, | |
8864 | int_size_in_bytes (inner_type)); | |
df6018fd | 8865 | else |
82c82743 | 8866 | emit_move_insn (memloc, op0); |
0fb7aeda | 8867 | |
82c82743 | 8868 | op0 = memloc; |
896102d0 RK |
8869 | } |
8870 | ||
3c0cb5de | 8871 | if (!MEM_P (op0)) |
bbf6f052 | 8872 | abort (); |
3a94c984 | 8873 | |
34e81b5a | 8874 | mark_temp_addr_taken (op0); |
bbf6f052 | 8875 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
88f63c77 | 8876 | { |
34e81b5a | 8877 | op0 = XEXP (op0, 0); |
5ae6cd0d | 8878 | if (GET_MODE (op0) == Pmode && mode == ptr_mode) |
34e81b5a | 8879 | op0 = convert_memory_address (ptr_mode, op0); |
34e81b5a | 8880 | return op0; |
88f63c77 | 8881 | } |
987c71d9 | 8882 | |
c952ff4b RK |
8883 | /* If OP0 is not aligned as least as much as the type requires, we |
8884 | need to make a temporary, copy OP0 to it, and take the address of | |
8885 | the temporary. We want to use the alignment of the type, not of | |
8886 | the operand. Note that this is incorrect for FUNCTION_TYPE, but | |
8887 | the test for BLKmode means that can't happen. The test for | |
8888 | BLKmode is because we never make mis-aligned MEMs with | |
8889 | non-BLKmode. | |
8890 | ||
8891 | We don't need to do this at all if the machine doesn't have | |
8892 | strict alignment. */ | |
8893 | if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode | |
8894 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
ed239f5a RK |
8895 | > MEM_ALIGN (op0)) |
8896 | && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT) | |
a06ef755 RK |
8897 | { |
8898 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
bdaa131b | 8899 | rtx new; |
a06ef755 | 8900 | |
c3d32120 RK |
8901 | if (TYPE_ALIGN_OK (inner_type)) |
8902 | abort (); | |
8903 | ||
bdaa131b JM |
8904 | if (TREE_ADDRESSABLE (inner_type)) |
8905 | { | |
8906 | /* We can't make a bitwise copy of this object, so fail. */ | |
8907 | error ("cannot take the address of an unaligned member"); | |
8908 | return const0_rtx; | |
8909 | } | |
8910 | ||
8911 | new = assign_stack_temp_for_type | |
8912 | (TYPE_MODE (inner_type), | |
8913 | MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0)) | |
8914 | : int_size_in_bytes (inner_type), | |
8915 | 1, build_qualified_type (inner_type, | |
8916 | (TYPE_QUALS (inner_type) | |
8917 | | TYPE_QUAL_CONST))); | |
8918 | ||
44bb111a | 8919 | emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)), |
8403445a AM |
8920 | (modifier == EXPAND_STACK_PARM |
8921 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
bdaa131b | 8922 | |
a06ef755 RK |
8923 | op0 = new; |
8924 | } | |
8925 | ||
bbf6f052 RK |
8926 | op0 = force_operand (XEXP (op0, 0), target); |
8927 | } | |
987c71d9 | 8928 | |
05c8e58b | 8929 | if (flag_force_addr |
f8cfc6aa | 8930 | && !REG_P (op0) |
05c8e58b HPN |
8931 | && modifier != EXPAND_CONST_ADDRESS |
8932 | && modifier != EXPAND_INITIALIZER | |
8933 | && modifier != EXPAND_SUM) | |
987c71d9 RK |
8934 | op0 = force_reg (Pmode, op0); |
8935 | ||
f8cfc6aa | 8936 | if (REG_P (op0) |
dc6d66b3 | 8937 | && ! REG_USERVAR_P (op0)) |
bdb429a5 | 8938 | mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type))); |
987c71d9 | 8939 | |
5ae6cd0d | 8940 | if (GET_MODE (op0) == Pmode && mode == ptr_mode) |
9fcfcce7 | 8941 | op0 = convert_memory_address (ptr_mode, op0); |
88f63c77 | 8942 | |
bbf6f052 RK |
8943 | return op0; |
8944 | ||
8945 | case ENTRY_VALUE_EXPR: | |
8946 | abort (); | |
8947 | ||
7308a047 RS |
8948 | /* COMPLEX type for Extended Pascal & Fortran */ |
8949 | case COMPLEX_EXPR: | |
8950 | { | |
8951 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6551fa4d | 8952 | rtx insns; |
7308a047 RS |
8953 | |
8954 | /* Get the rtx code of the operands. */ | |
8955 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8956 | op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0); | |
8957 | ||
8958 | if (! target) | |
8959 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
8960 | ||
6551fa4d | 8961 | start_sequence (); |
7308a047 RS |
8962 | |
8963 | /* Move the real (op0) and imaginary (op1) parts to their location. */ | |
2d7050fd RS |
8964 | emit_move_insn (gen_realpart (mode, target), op0); |
8965 | emit_move_insn (gen_imagpart (mode, target), op1); | |
7308a047 | 8966 | |
6551fa4d JW |
8967 | insns = get_insns (); |
8968 | end_sequence (); | |
8969 | ||
7308a047 | 8970 | /* Complex construction should appear as a single unit. */ |
6551fa4d JW |
8971 | /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS, |
8972 | each with a separate pseudo as destination. | |
8973 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8974 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8975 | emit_no_conflict_block (insns, target, op0, op1, NULL_RTX); |
8976 | else | |
2f937369 | 8977 | emit_insn (insns); |
7308a047 RS |
8978 | |
8979 | return target; | |
8980 | } | |
8981 | ||
8982 | case REALPART_EXPR: | |
2d7050fd RS |
8983 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8984 | return gen_realpart (mode, op0); | |
3a94c984 | 8985 | |
7308a047 | 8986 | case IMAGPART_EXPR: |
2d7050fd RS |
8987 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8988 | return gen_imagpart (mode, op0); | |
7308a047 RS |
8989 | |
8990 | case CONJ_EXPR: | |
8991 | { | |
62acb978 | 8992 | enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
7308a047 | 8993 | rtx imag_t; |
6551fa4d | 8994 | rtx insns; |
3a94c984 KH |
8995 | |
8996 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
7308a047 RS |
8997 | |
8998 | if (! target) | |
d6a5ac33 | 8999 | target = gen_reg_rtx (mode); |
3a94c984 | 9000 | |
6551fa4d | 9001 | start_sequence (); |
7308a047 RS |
9002 | |
9003 | /* Store the realpart and the negated imagpart to target. */ | |
62acb978 RK |
9004 | emit_move_insn (gen_realpart (partmode, target), |
9005 | gen_realpart (partmode, op0)); | |
7308a047 | 9006 | |
62acb978 | 9007 | imag_t = gen_imagpart (partmode, target); |
91ce572a | 9008 | temp = expand_unop (partmode, |
0fb7aeda KH |
9009 | ! unsignedp && flag_trapv |
9010 | && (GET_MODE_CLASS(partmode) == MODE_INT) | |
9011 | ? negv_optab : neg_optab, | |
3a94c984 | 9012 | gen_imagpart (partmode, op0), imag_t, 0); |
7308a047 RS |
9013 | if (temp != imag_t) |
9014 | emit_move_insn (imag_t, temp); | |
9015 | ||
6551fa4d JW |
9016 | insns = get_insns (); |
9017 | end_sequence (); | |
9018 | ||
3a94c984 | 9019 | /* Conjugate should appear as a single unit |
d6a5ac33 | 9020 | If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS, |
6551fa4d JW |
9021 | each with a separate pseudo as destination. |
9022 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 9023 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
9024 | emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX); |
9025 | else | |
2f937369 | 9026 | emit_insn (insns); |
7308a047 RS |
9027 | |
9028 | return target; | |
9029 | } | |
9030 | ||
6de9cd9a DN |
9031 | case RESX_EXPR: |
9032 | expand_resx_expr (exp); | |
9033 | return const0_rtx; | |
9034 | ||
e976b8b2 MS |
9035 | case TRY_CATCH_EXPR: |
9036 | { | |
9037 | tree handler = TREE_OPERAND (exp, 1); | |
9038 | ||
9039 | expand_eh_region_start (); | |
e976b8b2 | 9040 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
6de9cd9a | 9041 | expand_eh_handler (handler); |
e976b8b2 MS |
9042 | |
9043 | return op0; | |
9044 | } | |
9045 | ||
6de9cd9a DN |
9046 | case CATCH_EXPR: |
9047 | expand_start_catch (CATCH_TYPES (exp)); | |
9048 | expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0); | |
9049 | expand_end_catch (); | |
9050 | return const0_rtx; | |
9051 | ||
9052 | case EH_FILTER_EXPR: | |
9053 | /* Should have been handled in expand_eh_handler. */ | |
9054 | abort (); | |
9055 | ||
b335b813 PB |
9056 | case TRY_FINALLY_EXPR: |
9057 | { | |
9058 | tree try_block = TREE_OPERAND (exp, 0); | |
9059 | tree finally_block = TREE_OPERAND (exp, 1); | |
b335b813 | 9060 | |
6de9cd9a DN |
9061 | if ((!optimize && lang_protect_cleanup_actions == NULL) |
9062 | || unsafe_for_reeval (finally_block) > 1) | |
8943a0b4 RH |
9063 | { |
9064 | /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR | |
9065 | is not sufficient, so we cannot expand the block twice. | |
9066 | So we play games with GOTO_SUBROUTINE_EXPR to let us | |
9067 | expand the thing only once. */ | |
8ad8135a RH |
9068 | /* When not optimizing, we go ahead with this form since |
9069 | (1) user breakpoints operate more predictably without | |
9070 | code duplication, and | |
9071 | (2) we're not running any of the global optimizers | |
9072 | that would explode in time/space with the highly | |
9073 | connected CFG created by the indirect branching. */ | |
8943a0b4 RH |
9074 | |
9075 | rtx finally_label = gen_label_rtx (); | |
9076 | rtx done_label = gen_label_rtx (); | |
9077 | rtx return_link = gen_reg_rtx (Pmode); | |
9078 | tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node, | |
9079 | (tree) finally_label, (tree) return_link); | |
9080 | TREE_SIDE_EFFECTS (cleanup) = 1; | |
9081 | ||
9082 | /* Start a new binding layer that will keep track of all cleanup | |
9083 | actions to be performed. */ | |
9084 | expand_start_bindings (2); | |
9085 | target_temp_slot_level = temp_slot_level; | |
9086 | ||
9087 | expand_decl_cleanup (NULL_TREE, cleanup); | |
9088 | op0 = expand_expr (try_block, target, tmode, modifier); | |
9089 | ||
9090 | preserve_temp_slots (op0); | |
9091 | expand_end_bindings (NULL_TREE, 0, 0); | |
9092 | emit_jump (done_label); | |
9093 | emit_label (finally_label); | |
9094 | expand_expr (finally_block, const0_rtx, VOIDmode, 0); | |
9095 | emit_indirect_jump (return_link); | |
9096 | emit_label (done_label); | |
9097 | } | |
9098 | else | |
9099 | { | |
9100 | expand_start_bindings (2); | |
9101 | target_temp_slot_level = temp_slot_level; | |
b335b813 | 9102 | |
8943a0b4 RH |
9103 | expand_decl_cleanup (NULL_TREE, finally_block); |
9104 | op0 = expand_expr (try_block, target, tmode, modifier); | |
b335b813 | 9105 | |
8943a0b4 RH |
9106 | preserve_temp_slots (op0); |
9107 | expand_end_bindings (NULL_TREE, 0, 0); | |
9108 | } | |
b335b813 | 9109 | |
b335b813 PB |
9110 | return op0; |
9111 | } | |
9112 | ||
3a94c984 | 9113 | case GOTO_SUBROUTINE_EXPR: |
b335b813 PB |
9114 | { |
9115 | rtx subr = (rtx) TREE_OPERAND (exp, 0); | |
9116 | rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1); | |
9117 | rtx return_address = gen_label_rtx (); | |
3a94c984 KH |
9118 | emit_move_insn (return_link, |
9119 | gen_rtx_LABEL_REF (Pmode, return_address)); | |
b335b813 PB |
9120 | emit_jump (subr); |
9121 | emit_label (return_address); | |
9122 | return const0_rtx; | |
9123 | } | |
9124 | ||
d3707adb RH |
9125 | case VA_ARG_EXPR: |
9126 | return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type); | |
9127 | ||
52a11cbf | 9128 | case EXC_PTR_EXPR: |
86c99549 | 9129 | return get_exception_pointer (cfun); |
52a11cbf | 9130 | |
6de9cd9a DN |
9131 | case FILTER_EXPR: |
9132 | return get_exception_filter (cfun); | |
9133 | ||
67231816 RH |
9134 | case FDESC_EXPR: |
9135 | /* Function descriptors are not valid except for as | |
9136 | initialization constants, and should not be expanded. */ | |
9137 | abort (); | |
9138 | ||
6de9cd9a DN |
9139 | case SWITCH_EXPR: |
9140 | expand_start_case (0, SWITCH_COND (exp), integer_type_node, | |
9141 | "switch"); | |
9142 | if (SWITCH_BODY (exp)) | |
9143 | expand_expr_stmt (SWITCH_BODY (exp)); | |
9144 | if (SWITCH_LABELS (exp)) | |
9145 | { | |
9146 | tree duplicate = 0; | |
9147 | tree vec = SWITCH_LABELS (exp); | |
9148 | size_t i, n = TREE_VEC_LENGTH (vec); | |
9149 | ||
9150 | for (i = 0; i < n; ++i) | |
9151 | { | |
9152 | tree elt = TREE_VEC_ELT (vec, i); | |
9153 | tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp)); | |
9154 | tree min_value = TYPE_MIN_VALUE (controlling_expr_type); | |
9155 | tree max_value = TYPE_MAX_VALUE (controlling_expr_type); | |
9156 | ||
9157 | tree case_low = CASE_LOW (elt); | |
9158 | tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low; | |
9159 | if (case_low && case_high) | |
9160 | { | |
9161 | /* Case label is less than minimum for type. */ | |
ebd5a208 RK |
9162 | if (TREE_CODE (min_value) == INTEGER_CST |
9163 | && tree_int_cst_compare (case_low, min_value) < 0 | |
9164 | && tree_int_cst_compare (case_high, min_value) < 0) | |
6de9cd9a DN |
9165 | { |
9166 | warning ("case label value %d is less than minimum value for type", | |
9167 | TREE_INT_CST (case_low)); | |
9168 | continue; | |
9169 | } | |
9170 | ||
9171 | /* Case value is greater than maximum for type. */ | |
ebd5a208 RK |
9172 | if (TREE_CODE (max_value) == INTEGER_CST |
9173 | && tree_int_cst_compare (case_low, max_value) > 0 | |
9174 | && tree_int_cst_compare (case_high, max_value) > 0) | |
6de9cd9a DN |
9175 | { |
9176 | warning ("case label value %d exceeds maximum value for type", | |
9177 | TREE_INT_CST (case_high)); | |
9178 | continue; | |
9179 | } | |
9180 | ||
9181 | /* Saturate lower case label value to minimum. */ | |
ebd5a208 RK |
9182 | if (TREE_CODE (min_value) == INTEGER_CST |
9183 | && tree_int_cst_compare (case_high, min_value) >= 0 | |
9184 | && tree_int_cst_compare (case_low, min_value) < 0) | |
6de9cd9a DN |
9185 | { |
9186 | warning ("lower value %d in case label range less than minimum value for type", | |
9187 | TREE_INT_CST (case_low)); | |
9188 | case_low = min_value; | |
9189 | } | |
9190 | ||
9191 | /* Saturate upper case label value to maximum. */ | |
ebd5a208 RK |
9192 | if (TREE_CODE (max_value) == INTEGER_CST |
9193 | && tree_int_cst_compare (case_low, max_value) <= 0 | |
9194 | && tree_int_cst_compare (case_high, max_value) > 0) | |
6de9cd9a DN |
9195 | { |
9196 | warning ("upper value %d in case label range exceeds maximum value for type", | |
9197 | TREE_INT_CST (case_high)); | |
9198 | case_high = max_value; | |
9199 | } | |
9200 | } | |
9201 | ||
9202 | add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true); | |
9203 | if (duplicate) | |
9204 | abort (); | |
9205 | } | |
9206 | } | |
9207 | expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp)); | |
9208 | return const0_rtx; | |
9209 | ||
9210 | case LABEL_EXPR: | |
9211 | expand_label (TREE_OPERAND (exp, 0)); | |
9212 | return const0_rtx; | |
9213 | ||
9214 | case CASE_LABEL_EXPR: | |
9215 | { | |
9216 | tree duplicate = 0; | |
9217 | add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp), | |
9218 | &duplicate, false); | |
9219 | if (duplicate) | |
9220 | abort (); | |
9221 | return const0_rtx; | |
9222 | } | |
9223 | ||
9224 | case ASM_EXPR: | |
9225 | expand_asm_expr (exp); | |
9226 | return const0_rtx; | |
9227 | ||
bbf6f052 | 9228 | default: |
673fda6b SB |
9229 | return lang_hooks.expand_expr (exp, original_target, tmode, |
9230 | modifier, alt_rtl); | |
bbf6f052 RK |
9231 | } |
9232 | ||
9233 | /* Here to do an ordinary binary operator, generating an instruction | |
9234 | from the optab already placed in `this_optab'. */ | |
9235 | binop: | |
eb698c58 RS |
9236 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
9237 | subtarget, &op0, &op1, 0); | |
bbf6f052 | 9238 | binop2: |
8403445a AM |
9239 | if (modifier == EXPAND_STACK_PARM) |
9240 | target = 0; | |
bbf6f052 RK |
9241 | temp = expand_binop (mode, this_optab, op0, op1, target, |
9242 | unsignedp, OPTAB_LIB_WIDEN); | |
9243 | if (temp == 0) | |
9244 | abort (); | |
9245 | return temp; | |
9246 | } | |
b93a436e | 9247 | \f |
1ce7f3c2 RK |
9248 | /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that |
9249 | when applied to the address of EXP produces an address known to be | |
9250 | aligned more than BIGGEST_ALIGNMENT. */ | |
9251 | ||
9252 | static int | |
502b8322 | 9253 | is_aligning_offset (tree offset, tree exp) |
1ce7f3c2 | 9254 | { |
6fce44af | 9255 | /* Strip off any conversions. */ |
1ce7f3c2 RK |
9256 | while (TREE_CODE (offset) == NON_LVALUE_EXPR |
9257 | || TREE_CODE (offset) == NOP_EXPR | |
6fce44af | 9258 | || TREE_CODE (offset) == CONVERT_EXPR) |
1ce7f3c2 RK |
9259 | offset = TREE_OPERAND (offset, 0); |
9260 | ||
9261 | /* We must now have a BIT_AND_EXPR with a constant that is one less than | |
9262 | power of 2 and which is larger than BIGGEST_ALIGNMENT. */ | |
9263 | if (TREE_CODE (offset) != BIT_AND_EXPR | |
9264 | || !host_integerp (TREE_OPERAND (offset, 1), 1) | |
c0cfc691 OH |
9265 | || compare_tree_int (TREE_OPERAND (offset, 1), |
9266 | BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 | |
1ce7f3c2 RK |
9267 | || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) |
9268 | return 0; | |
9269 | ||
9270 | /* Look at the first operand of BIT_AND_EXPR and strip any conversion. | |
9271 | It must be NEGATE_EXPR. Then strip any more conversions. */ | |
9272 | offset = TREE_OPERAND (offset, 0); | |
9273 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
9274 | || TREE_CODE (offset) == NOP_EXPR | |
9275 | || TREE_CODE (offset) == CONVERT_EXPR) | |
9276 | offset = TREE_OPERAND (offset, 0); | |
9277 | ||
9278 | if (TREE_CODE (offset) != NEGATE_EXPR) | |
9279 | return 0; | |
9280 | ||
9281 | offset = TREE_OPERAND (offset, 0); | |
9282 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
9283 | || TREE_CODE (offset) == NOP_EXPR | |
9284 | || TREE_CODE (offset) == CONVERT_EXPR) | |
9285 | offset = TREE_OPERAND (offset, 0); | |
9286 | ||
6fce44af RK |
9287 | /* This must now be the address of EXP. */ |
9288 | return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; | |
1ce7f3c2 RK |
9289 | } |
9290 | \f | |
e0a2f705 | 9291 | /* Return the tree node if an ARG corresponds to a string constant or zero |
cc2902df | 9292 | if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset |
fed3cef0 RK |
9293 | in bytes within the string that ARG is accessing. The type of the |
9294 | offset will be `sizetype'. */ | |
b93a436e | 9295 | |
28f4ec01 | 9296 | tree |
502b8322 | 9297 | string_constant (tree arg, tree *ptr_offset) |
b93a436e JL |
9298 | { |
9299 | STRIP_NOPS (arg); | |
9300 | ||
9301 | if (TREE_CODE (arg) == ADDR_EXPR | |
9302 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) | |
9303 | { | |
fed3cef0 | 9304 | *ptr_offset = size_zero_node; |
b93a436e JL |
9305 | return TREE_OPERAND (arg, 0); |
9306 | } | |
6de9cd9a DN |
9307 | if (TREE_CODE (arg) == ADDR_EXPR |
9308 | && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF | |
9309 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST) | |
9310 | { | |
9311 | *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1)); | |
9312 | return TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
9313 | } | |
b93a436e JL |
9314 | else if (TREE_CODE (arg) == PLUS_EXPR) |
9315 | { | |
9316 | tree arg0 = TREE_OPERAND (arg, 0); | |
9317 | tree arg1 = TREE_OPERAND (arg, 1); | |
9318 | ||
9319 | STRIP_NOPS (arg0); | |
9320 | STRIP_NOPS (arg1); | |
9321 | ||
9322 | if (TREE_CODE (arg0) == ADDR_EXPR | |
9323 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) | |
bbf6f052 | 9324 | { |
fed3cef0 | 9325 | *ptr_offset = convert (sizetype, arg1); |
b93a436e | 9326 | return TREE_OPERAND (arg0, 0); |
bbf6f052 | 9327 | } |
b93a436e JL |
9328 | else if (TREE_CODE (arg1) == ADDR_EXPR |
9329 | && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) | |
bbf6f052 | 9330 | { |
fed3cef0 | 9331 | *ptr_offset = convert (sizetype, arg0); |
b93a436e | 9332 | return TREE_OPERAND (arg1, 0); |
bbf6f052 | 9333 | } |
b93a436e | 9334 | } |
ca695ac9 | 9335 | |
b93a436e JL |
9336 | return 0; |
9337 | } | |
ca695ac9 | 9338 | \f |
b93a436e JL |
9339 | /* Expand code for a post- or pre- increment or decrement |
9340 | and return the RTX for the result. | |
9341 | POST is 1 for postinc/decrements and 0 for preinc/decrements. */ | |
1499e0a8 | 9342 | |
b93a436e | 9343 | static rtx |
502b8322 | 9344 | expand_increment (tree exp, int post, int ignore) |
ca695ac9 | 9345 | { |
b3694847 SS |
9346 | rtx op0, op1; |
9347 | rtx temp, value; | |
9348 | tree incremented = TREE_OPERAND (exp, 0); | |
b93a436e JL |
9349 | optab this_optab = add_optab; |
9350 | int icode; | |
9351 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
9352 | int op0_is_copy = 0; | |
9353 | int single_insn = 0; | |
9354 | /* 1 means we can't store into OP0 directly, | |
9355 | because it is a subreg narrower than a word, | |
9356 | and we don't dare clobber the rest of the word. */ | |
9357 | int bad_subreg = 0; | |
1499e0a8 | 9358 | |
b93a436e JL |
9359 | /* Stabilize any component ref that might need to be |
9360 | evaluated more than once below. */ | |
9361 | if (!post | |
9362 | || TREE_CODE (incremented) == BIT_FIELD_REF | |
9363 | || (TREE_CODE (incremented) == COMPONENT_REF | |
9364 | && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF | |
9365 | || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) | |
9366 | incremented = stabilize_reference (incremented); | |
9367 | /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost | |
9368 | ones into save exprs so that they don't accidentally get evaluated | |
9369 | more than once by the code below. */ | |
9370 | if (TREE_CODE (incremented) == PREINCREMENT_EXPR | |
9371 | || TREE_CODE (incremented) == PREDECREMENT_EXPR) | |
9372 | incremented = save_expr (incremented); | |
e9a25f70 | 9373 | |
b93a436e JL |
9374 | /* Compute the operands as RTX. |
9375 | Note whether OP0 is the actual lvalue or a copy of it: | |
9376 | I believe it is a copy iff it is a register or subreg | |
6d2f8887 | 9377 | and insns were generated in computing it. */ |
e9a25f70 | 9378 | |
b93a436e | 9379 | temp = get_last_insn (); |
37a08a29 | 9380 | op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); |
e9a25f70 | 9381 | |
b93a436e JL |
9382 | /* If OP0 is a SUBREG made for a promoted variable, we cannot increment |
9383 | in place but instead must do sign- or zero-extension during assignment, | |
9384 | so we copy it into a new register and let the code below use it as | |
9385 | a copy. | |
e9a25f70 | 9386 | |
b93a436e JL |
9387 | Note that we can safely modify this SUBREG since it is know not to be |
9388 | shared (it was made by the expand_expr call above). */ | |
9389 | ||
9390 | if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) | |
9391 | { | |
9392 | if (post) | |
9393 | SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); | |
9394 | else | |
9395 | bad_subreg = 1; | |
9396 | } | |
9397 | else if (GET_CODE (op0) == SUBREG | |
9398 | && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD) | |
9399 | { | |
9400 | /* We cannot increment this SUBREG in place. If we are | |
9401 | post-incrementing, get a copy of the old value. Otherwise, | |
9402 | just mark that we cannot increment in place. */ | |
9403 | if (post) | |
9404 | op0 = copy_to_reg (op0); | |
9405 | else | |
9406 | bad_subreg = 1; | |
e9a25f70 JL |
9407 | } |
9408 | ||
f8cfc6aa | 9409 | op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0)) |
b93a436e | 9410 | && temp != get_last_insn ()); |
37a08a29 | 9411 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
1499e0a8 | 9412 | |
b93a436e JL |
9413 | /* Decide whether incrementing or decrementing. */ |
9414 | if (TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9415 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9416 | this_optab = sub_optab; | |
9417 | ||
9418 | /* Convert decrement by a constant into a negative increment. */ | |
9419 | if (this_optab == sub_optab | |
9420 | && GET_CODE (op1) == CONST_INT) | |
ca695ac9 | 9421 | { |
3a94c984 | 9422 | op1 = GEN_INT (-INTVAL (op1)); |
b93a436e | 9423 | this_optab = add_optab; |
ca695ac9 | 9424 | } |
1499e0a8 | 9425 | |
91ce572a | 9426 | if (TYPE_TRAP_SIGNED (TREE_TYPE (exp))) |
505ddab6 | 9427 | this_optab = this_optab == add_optab ? addv_optab : subv_optab; |
91ce572a | 9428 | |
b93a436e JL |
9429 | /* For a preincrement, see if we can do this with a single instruction. */ |
9430 | if (!post) | |
9431 | { | |
9432 | icode = (int) this_optab->handlers[(int) mode].insn_code; | |
9433 | if (icode != (int) CODE_FOR_nothing | |
9434 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9435 | of the insn we want to queue. */ | |
a995e389 RH |
9436 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9437 | && (*insn_data[icode].operand[1].predicate) (op0, mode) | |
9438 | && (*insn_data[icode].operand[2].predicate) (op1, mode)) | |
b93a436e JL |
9439 | single_insn = 1; |
9440 | } | |
bbf6f052 | 9441 | |
b93a436e JL |
9442 | /* If OP0 is not the actual lvalue, but rather a copy in a register, |
9443 | then we cannot just increment OP0. We must therefore contrive to | |
9444 | increment the original value. Then, for postincrement, we can return | |
9445 | OP0 since it is a copy of the old value. For preincrement, expand here | |
9446 | unless we can do it with a single insn. | |
bbf6f052 | 9447 | |
b93a436e JL |
9448 | Likewise if storing directly into OP0 would clobber high bits |
9449 | we need to preserve (bad_subreg). */ | |
9450 | if (op0_is_copy || (!post && !single_insn) || bad_subreg) | |
a358cee0 | 9451 | { |
b93a436e JL |
9452 | /* This is the easiest way to increment the value wherever it is. |
9453 | Problems with multiple evaluation of INCREMENTED are prevented | |
9454 | because either (1) it is a component_ref or preincrement, | |
9455 | in which case it was stabilized above, or (2) it is an array_ref | |
9456 | with constant index in an array in a register, which is | |
9457 | safe to reevaluate. */ | |
9458 | tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9459 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9460 | ? MINUS_EXPR : PLUS_EXPR), | |
9461 | TREE_TYPE (exp), | |
9462 | incremented, | |
9463 | TREE_OPERAND (exp, 1)); | |
a358cee0 | 9464 | |
b93a436e JL |
9465 | while (TREE_CODE (incremented) == NOP_EXPR |
9466 | || TREE_CODE (incremented) == CONVERT_EXPR) | |
9467 | { | |
9468 | newexp = convert (TREE_TYPE (incremented), newexp); | |
9469 | incremented = TREE_OPERAND (incremented, 0); | |
9470 | } | |
bbf6f052 | 9471 | |
b90f141a | 9472 | temp = expand_assignment (incremented, newexp, ! post && ! ignore); |
b93a436e JL |
9473 | return post ? op0 : temp; |
9474 | } | |
bbf6f052 | 9475 | |
b93a436e JL |
9476 | if (post) |
9477 | { | |
9478 | /* We have a true reference to the value in OP0. | |
9479 | If there is an insn to add or subtract in this mode, queue it. | |
d91edf86 | 9480 | Queuing the increment insn avoids the register shuffling |
b93a436e JL |
9481 | that often results if we must increment now and first save |
9482 | the old value for subsequent use. */ | |
bbf6f052 | 9483 | |
b93a436e JL |
9484 | #if 0 /* Turned off to avoid making extra insn for indexed memref. */ |
9485 | op0 = stabilize (op0); | |
9486 | #endif | |
41dfd40c | 9487 | |
b93a436e JL |
9488 | icode = (int) this_optab->handlers[(int) mode].insn_code; |
9489 | if (icode != (int) CODE_FOR_nothing | |
9490 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9491 | of the insn we want to queue. */ | |
a995e389 RH |
9492 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9493 | && (*insn_data[icode].operand[1].predicate) (op0, mode)) | |
b93a436e | 9494 | { |
a995e389 | 9495 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9496 | op1 = force_reg (mode, op1); |
bbf6f052 | 9497 | |
b93a436e JL |
9498 | return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); |
9499 | } | |
3c0cb5de | 9500 | if (icode != (int) CODE_FOR_nothing && MEM_P (op0)) |
b93a436e JL |
9501 | { |
9502 | rtx addr = (general_operand (XEXP (op0, 0), mode) | |
9503 | ? force_reg (Pmode, XEXP (op0, 0)) | |
9504 | : copy_to_reg (XEXP (op0, 0))); | |
9505 | rtx temp, result; | |
ca695ac9 | 9506 | |
792760b9 | 9507 | op0 = replace_equiv_address (op0, addr); |
b93a436e | 9508 | temp = force_reg (GET_MODE (op0), op0); |
a995e389 | 9509 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9510 | op1 = force_reg (mode, op1); |
ca695ac9 | 9511 | |
b93a436e JL |
9512 | /* The increment queue is LIFO, thus we have to `queue' |
9513 | the instructions in reverse order. */ | |
9514 | enqueue_insn (op0, gen_move_insn (op0, temp)); | |
9515 | result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1)); | |
9516 | return result; | |
bbf6f052 RK |
9517 | } |
9518 | } | |
ca695ac9 | 9519 | |
b93a436e JL |
9520 | /* Preincrement, or we can't increment with one simple insn. */ |
9521 | if (post) | |
9522 | /* Save a copy of the value before inc or dec, to return it later. */ | |
9523 | temp = value = copy_to_reg (op0); | |
9524 | else | |
9525 | /* Arrange to return the incremented value. */ | |
9526 | /* Copy the rtx because expand_binop will protect from the queue, | |
9527 | and the results of that would be invalid for us to return | |
9528 | if our caller does emit_queue before using our result. */ | |
9529 | temp = copy_rtx (value = op0); | |
bbf6f052 | 9530 | |
b93a436e | 9531 | /* Increment however we can. */ |
37a08a29 | 9532 | op1 = expand_binop (mode, this_optab, value, op1, op0, |
8df83eae | 9533 | TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); |
37a08a29 | 9534 | |
b93a436e JL |
9535 | /* Make sure the value is stored into OP0. */ |
9536 | if (op1 != op0) | |
9537 | emit_move_insn (op0, op1); | |
5718612f | 9538 | |
b93a436e JL |
9539 | return temp; |
9540 | } | |
9541 | \f | |
b93a436e JL |
9542 | /* Generate code to calculate EXP using a store-flag instruction |
9543 | and return an rtx for the result. EXP is either a comparison | |
9544 | or a TRUTH_NOT_EXPR whose operand is a comparison. | |
ca695ac9 | 9545 | |
b93a436e | 9546 | If TARGET is nonzero, store the result there if convenient. |
ca695ac9 | 9547 | |
cc2902df | 9548 | If ONLY_CHEAP is nonzero, only do this if it is likely to be very |
b93a436e | 9549 | cheap. |
ca695ac9 | 9550 | |
b93a436e JL |
9551 | Return zero if there is no suitable set-flag instruction |
9552 | available on this machine. | |
ca695ac9 | 9553 | |
b93a436e JL |
9554 | Once expand_expr has been called on the arguments of the comparison, |
9555 | we are committed to doing the store flag, since it is not safe to | |
9556 | re-evaluate the expression. We emit the store-flag insn by calling | |
9557 | emit_store_flag, but only expand the arguments if we have a reason | |
9558 | to believe that emit_store_flag will be successful. If we think that | |
9559 | it will, but it isn't, we have to simulate the store-flag with a | |
9560 | set/jump/set sequence. */ | |
ca695ac9 | 9561 | |
b93a436e | 9562 | static rtx |
502b8322 | 9563 | do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
b93a436e JL |
9564 | { |
9565 | enum rtx_code code; | |
9566 | tree arg0, arg1, type; | |
9567 | tree tem; | |
9568 | enum machine_mode operand_mode; | |
9569 | int invert = 0; | |
9570 | int unsignedp; | |
9571 | rtx op0, op1; | |
9572 | enum insn_code icode; | |
9573 | rtx subtarget = target; | |
381127e8 | 9574 | rtx result, label; |
ca695ac9 | 9575 | |
b93a436e JL |
9576 | /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
9577 | result at the end. We can't simply invert the test since it would | |
9578 | have already been inverted if it were valid. This case occurs for | |
9579 | some floating-point comparisons. */ | |
ca695ac9 | 9580 | |
b93a436e JL |
9581 | if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
9582 | invert = 1, exp = TREE_OPERAND (exp, 0); | |
ca695ac9 | 9583 | |
b93a436e JL |
9584 | arg0 = TREE_OPERAND (exp, 0); |
9585 | arg1 = TREE_OPERAND (exp, 1); | |
5129d2ce AH |
9586 | |
9587 | /* Don't crash if the comparison was erroneous. */ | |
9588 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
9589 | return const0_rtx; | |
9590 | ||
b93a436e JL |
9591 | type = TREE_TYPE (arg0); |
9592 | operand_mode = TYPE_MODE (type); | |
8df83eae | 9593 | unsignedp = TYPE_UNSIGNED (type); |
ca695ac9 | 9594 | |
b93a436e JL |
9595 | /* We won't bother with BLKmode store-flag operations because it would mean |
9596 | passing a lot of information to emit_store_flag. */ | |
9597 | if (operand_mode == BLKmode) | |
9598 | return 0; | |
ca695ac9 | 9599 | |
b93a436e JL |
9600 | /* We won't bother with store-flag operations involving function pointers |
9601 | when function pointers must be canonicalized before comparisons. */ | |
9602 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
9603 | if (HAVE_canonicalize_funcptr_for_compare | |
9604 | && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
9605 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
9606 | == FUNCTION_TYPE)) | |
9607 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
9608 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
9609 | == FUNCTION_TYPE)))) | |
9610 | return 0; | |
ca695ac9 JB |
9611 | #endif |
9612 | ||
b93a436e JL |
9613 | STRIP_NOPS (arg0); |
9614 | STRIP_NOPS (arg1); | |
ca695ac9 | 9615 | |
b93a436e JL |
9616 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
9617 | operation of some type. Some comparisons against 1 and -1 can be | |
9618 | converted to comparisons with zero. Do so here so that the tests | |
9619 | below will be aware that we have a comparison with zero. These | |
9620 | tests will not catch constants in the first operand, but constants | |
9621 | are rarely passed as the first operand. */ | |
ca695ac9 | 9622 | |
b93a436e JL |
9623 | switch (TREE_CODE (exp)) |
9624 | { | |
9625 | case EQ_EXPR: | |
9626 | code = EQ; | |
bbf6f052 | 9627 | break; |
b93a436e JL |
9628 | case NE_EXPR: |
9629 | code = NE; | |
bbf6f052 | 9630 | break; |
b93a436e JL |
9631 | case LT_EXPR: |
9632 | if (integer_onep (arg1)) | |
9633 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
9634 | else | |
9635 | code = unsignedp ? LTU : LT; | |
ca695ac9 | 9636 | break; |
b93a436e JL |
9637 | case LE_EXPR: |
9638 | if (! unsignedp && integer_all_onesp (arg1)) | |
9639 | arg1 = integer_zero_node, code = LT; | |
9640 | else | |
9641 | code = unsignedp ? LEU : LE; | |
ca695ac9 | 9642 | break; |
b93a436e JL |
9643 | case GT_EXPR: |
9644 | if (! unsignedp && integer_all_onesp (arg1)) | |
9645 | arg1 = integer_zero_node, code = GE; | |
9646 | else | |
9647 | code = unsignedp ? GTU : GT; | |
9648 | break; | |
9649 | case GE_EXPR: | |
9650 | if (integer_onep (arg1)) | |
9651 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
9652 | else | |
9653 | code = unsignedp ? GEU : GE; | |
ca695ac9 | 9654 | break; |
1eb8759b RH |
9655 | |
9656 | case UNORDERED_EXPR: | |
9657 | code = UNORDERED; | |
9658 | break; | |
9659 | case ORDERED_EXPR: | |
9660 | code = ORDERED; | |
9661 | break; | |
9662 | case UNLT_EXPR: | |
9663 | code = UNLT; | |
9664 | break; | |
9665 | case UNLE_EXPR: | |
9666 | code = UNLE; | |
9667 | break; | |
9668 | case UNGT_EXPR: | |
9669 | code = UNGT; | |
9670 | break; | |
9671 | case UNGE_EXPR: | |
9672 | code = UNGE; | |
9673 | break; | |
9674 | case UNEQ_EXPR: | |
9675 | code = UNEQ; | |
9676 | break; | |
d1a7edaf PB |
9677 | case LTGT_EXPR: |
9678 | code = LTGT; | |
9679 | break; | |
1eb8759b | 9680 | |
ca695ac9 | 9681 | default: |
b93a436e | 9682 | abort (); |
bbf6f052 | 9683 | } |
bbf6f052 | 9684 | |
b93a436e JL |
9685 | /* Put a constant second. */ |
9686 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) | |
9687 | { | |
9688 | tem = arg0; arg0 = arg1; arg1 = tem; | |
9689 | code = swap_condition (code); | |
ca695ac9 | 9690 | } |
bbf6f052 | 9691 | |
b93a436e JL |
9692 | /* If this is an equality or inequality test of a single bit, we can |
9693 | do this by shifting the bit being tested to the low-order bit and | |
9694 | masking the result with the constant 1. If the condition was EQ, | |
9695 | we xor it with 1. This does not require an scc insn and is faster | |
7960bf22 JL |
9696 | than an scc insn even if we have it. |
9697 | ||
9698 | The code to make this transformation was moved into fold_single_bit_test, | |
9699 | so we just call into the folder and expand its result. */ | |
d39985fa | 9700 | |
b93a436e JL |
9701 | if ((code == NE || code == EQ) |
9702 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
9703 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
60cd4dae | 9704 | { |
ae2bcd98 | 9705 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
60cd4dae | 9706 | return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR, |
450b1728 | 9707 | arg0, arg1, type), |
60cd4dae JL |
9708 | target, VOIDmode, EXPAND_NORMAL); |
9709 | } | |
bbf6f052 | 9710 | |
b93a436e | 9711 | /* Now see if we are likely to be able to do this. Return if not. */ |
1eb8759b | 9712 | if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
b93a436e | 9713 | return 0; |
1eb8759b | 9714 | |
b93a436e JL |
9715 | icode = setcc_gen_code[(int) code]; |
9716 | if (icode == CODE_FOR_nothing | |
a995e389 | 9717 | || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
ca695ac9 | 9718 | { |
b93a436e JL |
9719 | /* We can only do this if it is one of the special cases that |
9720 | can be handled without an scc insn. */ | |
9721 | if ((code == LT && integer_zerop (arg1)) | |
9722 | || (! only_cheap && code == GE && integer_zerop (arg1))) | |
9723 | ; | |
9724 | else if (BRANCH_COST >= 0 | |
9725 | && ! only_cheap && (code == NE || code == EQ) | |
9726 | && TREE_CODE (type) != REAL_TYPE | |
9727 | && ((abs_optab->handlers[(int) operand_mode].insn_code | |
9728 | != CODE_FOR_nothing) | |
9729 | || (ffs_optab->handlers[(int) operand_mode].insn_code | |
9730 | != CODE_FOR_nothing))) | |
9731 | ; | |
9732 | else | |
9733 | return 0; | |
ca695ac9 | 9734 | } |
3a94c984 | 9735 | |
296b4ed9 | 9736 | if (! get_subtarget (target) |
e3be1116 | 9737 | || GET_MODE (subtarget) != operand_mode) |
b93a436e JL |
9738 | subtarget = 0; |
9739 | ||
eb698c58 | 9740 | expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); |
b93a436e JL |
9741 | |
9742 | if (target == 0) | |
9743 | target = gen_reg_rtx (mode); | |
9744 | ||
9745 | /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe | |
9746 | because, if the emit_store_flag does anything it will succeed and | |
9747 | OP0 and OP1 will not be used subsequently. */ | |
ca695ac9 | 9748 | |
b93a436e JL |
9749 | result = emit_store_flag (target, code, |
9750 | queued_subexp_p (op0) ? copy_rtx (op0) : op0, | |
9751 | queued_subexp_p (op1) ? copy_rtx (op1) : op1, | |
9752 | operand_mode, unsignedp, 1); | |
ca695ac9 | 9753 | |
b93a436e JL |
9754 | if (result) |
9755 | { | |
9756 | if (invert) | |
9757 | result = expand_binop (mode, xor_optab, result, const1_rtx, | |
9758 | result, 0, OPTAB_LIB_WIDEN); | |
9759 | return result; | |
ca695ac9 | 9760 | } |
bbf6f052 | 9761 | |
b93a436e | 9762 | /* If this failed, we have to do this with set/compare/jump/set code. */ |
f8cfc6aa | 9763 | if (!REG_P (target) |
b93a436e JL |
9764 | || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) |
9765 | target = gen_reg_rtx (GET_MODE (target)); | |
9766 | ||
9767 | emit_move_insn (target, invert ? const0_rtx : const1_rtx); | |
9768 | result = compare_from_rtx (op0, op1, code, unsignedp, | |
a06ef755 | 9769 | operand_mode, NULL_RTX); |
b93a436e JL |
9770 | if (GET_CODE (result) == CONST_INT) |
9771 | return (((result == const0_rtx && ! invert) | |
9772 | || (result != const0_rtx && invert)) | |
9773 | ? const0_rtx : const1_rtx); | |
ca695ac9 | 9774 | |
8f08e8c0 JL |
9775 | /* The code of RESULT may not match CODE if compare_from_rtx |
9776 | decided to swap its operands and reverse the original code. | |
9777 | ||
9778 | We know that compare_from_rtx returns either a CONST_INT or | |
9779 | a new comparison code, so it is safe to just extract the | |
9780 | code from RESULT. */ | |
9781 | code = GET_CODE (result); | |
9782 | ||
b93a436e JL |
9783 | label = gen_label_rtx (); |
9784 | if (bcc_gen_fctn[(int) code] == 0) | |
9785 | abort (); | |
0f41302f | 9786 | |
b93a436e JL |
9787 | emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); |
9788 | emit_move_insn (target, invert ? const1_rtx : const0_rtx); | |
9789 | emit_label (label); | |
bbf6f052 | 9790 | |
b93a436e | 9791 | return target; |
ca695ac9 | 9792 | } |
b93a436e | 9793 | \f |
b93a436e | 9794 | |
ad82abb8 ZW |
9795 | /* Stubs in case we haven't got a casesi insn. */ |
9796 | #ifndef HAVE_casesi | |
9797 | # define HAVE_casesi 0 | |
9798 | # define gen_casesi(a, b, c, d, e) (0) | |
9799 | # define CODE_FOR_casesi CODE_FOR_nothing | |
9800 | #endif | |
9801 | ||
9802 | /* If the machine does not have a case insn that compares the bounds, | |
9803 | this means extra overhead for dispatch tables, which raises the | |
9804 | threshold for using them. */ | |
9805 | #ifndef CASE_VALUES_THRESHOLD | |
9806 | #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) | |
9807 | #endif /* CASE_VALUES_THRESHOLD */ | |
9808 | ||
9809 | unsigned int | |
502b8322 | 9810 | case_values_threshold (void) |
ad82abb8 ZW |
9811 | { |
9812 | return CASE_VALUES_THRESHOLD; | |
9813 | } | |
9814 | ||
9815 | /* Attempt to generate a casesi instruction. Returns 1 if successful, | |
9816 | 0 otherwise (i.e. if there is no casesi instruction). */ | |
9817 | int | |
502b8322 AJ |
9818 | try_casesi (tree index_type, tree index_expr, tree minval, tree range, |
9819 | rtx table_label ATTRIBUTE_UNUSED, rtx default_label) | |
ad82abb8 ZW |
9820 | { |
9821 | enum machine_mode index_mode = SImode; | |
9822 | int index_bits = GET_MODE_BITSIZE (index_mode); | |
9823 | rtx op1, op2, index; | |
9824 | enum machine_mode op_mode; | |
9825 | ||
9826 | if (! HAVE_casesi) | |
9827 | return 0; | |
9828 | ||
9829 | /* Convert the index to SImode. */ | |
9830 | if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) | |
9831 | { | |
9832 | enum machine_mode omode = TYPE_MODE (index_type); | |
9833 | rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
9834 | ||
9835 | /* We must handle the endpoints in the original mode. */ | |
9836 | index_expr = build (MINUS_EXPR, index_type, | |
9837 | index_expr, minval); | |
9838 | minval = integer_zero_node; | |
9839 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
9840 | emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, | |
a06ef755 | 9841 | omode, 1, default_label); |
ad82abb8 ZW |
9842 | /* Now we can safely truncate. */ |
9843 | index = convert_to_mode (index_mode, index, 0); | |
9844 | } | |
9845 | else | |
9846 | { | |
9847 | if (TYPE_MODE (index_type) != index_mode) | |
9848 | { | |
ae2bcd98 | 9849 | index_expr = convert (lang_hooks.types.type_for_size |
b0c48229 | 9850 | (index_bits, 0), index_expr); |
ad82abb8 ZW |
9851 | index_type = TREE_TYPE (index_expr); |
9852 | } | |
9853 | ||
9854 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
9855 | } | |
9856 | emit_queue (); | |
9857 | index = protect_from_queue (index, 0); | |
9858 | do_pending_stack_adjust (); | |
9859 | ||
9860 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; | |
9861 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) | |
9862 | (index, op_mode)) | |
9863 | index = copy_to_mode_reg (op_mode, index); | |
e87b4f3f | 9864 | |
ad82abb8 ZW |
9865 | op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0); |
9866 | ||
9867 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; | |
9868 | op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), | |
8df83eae | 9869 | op1, TYPE_UNSIGNED (TREE_TYPE (minval))); |
ad82abb8 ZW |
9870 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) |
9871 | (op1, op_mode)) | |
9872 | op1 = copy_to_mode_reg (op_mode, op1); | |
9873 | ||
9874 | op2 = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
9875 | ||
9876 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; | |
9877 | op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), | |
8df83eae | 9878 | op2, TYPE_UNSIGNED (TREE_TYPE (range))); |
ad82abb8 ZW |
9879 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) |
9880 | (op2, op_mode)) | |
9881 | op2 = copy_to_mode_reg (op_mode, op2); | |
9882 | ||
9883 | emit_jump_insn (gen_casesi (index, op1, op2, | |
9884 | table_label, default_label)); | |
9885 | return 1; | |
9886 | } | |
9887 | ||
9888 | /* Attempt to generate a tablejump instruction; same concept. */ | |
9889 | #ifndef HAVE_tablejump | |
9890 | #define HAVE_tablejump 0 | |
9891 | #define gen_tablejump(x, y) (0) | |
9892 | #endif | |
9893 | ||
9894 | /* Subroutine of the next function. | |
9895 | ||
9896 | INDEX is the value being switched on, with the lowest value | |
b93a436e JL |
9897 | in the table already subtracted. |
9898 | MODE is its expected mode (needed if INDEX is constant). | |
9899 | RANGE is the length of the jump table. | |
9900 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
88d3b7f0 | 9901 | |
b93a436e JL |
9902 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
9903 | index value is out of range. */ | |
0f41302f | 9904 | |
ad82abb8 | 9905 | static void |
502b8322 AJ |
9906 | do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label, |
9907 | rtx default_label) | |
ca695ac9 | 9908 | { |
b3694847 | 9909 | rtx temp, vector; |
88d3b7f0 | 9910 | |
74f6d071 JH |
9911 | if (INTVAL (range) > cfun->max_jumptable_ents) |
9912 | cfun->max_jumptable_ents = INTVAL (range); | |
1877be45 | 9913 | |
b93a436e JL |
9914 | /* Do an unsigned comparison (in the proper mode) between the index |
9915 | expression and the value which represents the length of the range. | |
9916 | Since we just finished subtracting the lower bound of the range | |
9917 | from the index expression, this comparison allows us to simultaneously | |
9918 | check that the original index expression value is both greater than | |
9919 | or equal to the minimum value of the range and less than or equal to | |
9920 | the maximum value of the range. */ | |
709f5be1 | 9921 | |
c5d5d461 | 9922 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
a06ef755 | 9923 | default_label); |
bbf6f052 | 9924 | |
b93a436e JL |
9925 | /* If index is in range, it must fit in Pmode. |
9926 | Convert to Pmode so we can index with it. */ | |
9927 | if (mode != Pmode) | |
9928 | index = convert_to_mode (Pmode, index, 1); | |
bbf6f052 | 9929 | |
ba228239 | 9930 | /* Don't let a MEM slip through, because then INDEX that comes |
b93a436e JL |
9931 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, |
9932 | and break_out_memory_refs will go to work on it and mess it up. */ | |
9933 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
f8cfc6aa | 9934 | if (flag_pic && !REG_P (index)) |
b93a436e JL |
9935 | index = copy_to_mode_reg (Pmode, index); |
9936 | #endif | |
ca695ac9 | 9937 | |
b93a436e JL |
9938 | /* If flag_force_addr were to affect this address |
9939 | it could interfere with the tricky assumptions made | |
9940 | about addresses that contain label-refs, | |
9941 | which may be valid only very near the tablejump itself. */ | |
9942 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the | |
9943 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
9944 | uses should all be Pmode, because they are addresses. This code | |
9945 | could fail if addresses and insns are not the same size. */ | |
9946 | index = gen_rtx_PLUS (Pmode, | |
9947 | gen_rtx_MULT (Pmode, index, | |
9948 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
9949 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
9950 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
9951 | if (flag_pic) | |
9952 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
9953 | else | |
bbf6f052 | 9954 | #endif |
b93a436e JL |
9955 | index = memory_address_noforce (CASE_VECTOR_MODE, index); |
9956 | temp = gen_reg_rtx (CASE_VECTOR_MODE); | |
9957 | vector = gen_rtx_MEM (CASE_VECTOR_MODE, index); | |
9958 | RTX_UNCHANGING_P (vector) = 1; | |
4da2eb6b | 9959 | MEM_NOTRAP_P (vector) = 1; |
b93a436e JL |
9960 | convert_move (temp, vector, 0); |
9961 | ||
9962 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
9963 | ||
9964 | /* If we are generating PIC code or if the table is PC-relative, the | |
9965 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
9966 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
9967 | emit_barrier (); | |
bbf6f052 | 9968 | } |
b93a436e | 9969 | |
ad82abb8 | 9970 | int |
502b8322 AJ |
9971 | try_tablejump (tree index_type, tree index_expr, tree minval, tree range, |
9972 | rtx table_label, rtx default_label) | |
ad82abb8 ZW |
9973 | { |
9974 | rtx index; | |
9975 | ||
9976 | if (! HAVE_tablejump) | |
9977 | return 0; | |
9978 | ||
9979 | index_expr = fold (build (MINUS_EXPR, index_type, | |
9980 | convert (index_type, index_expr), | |
9981 | convert (index_type, minval))); | |
9982 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
9983 | emit_queue (); | |
9984 | index = protect_from_queue (index, 0); | |
9985 | do_pending_stack_adjust (); | |
9986 | ||
9987 | do_tablejump (index, TYPE_MODE (index_type), | |
9988 | convert_modes (TYPE_MODE (index_type), | |
9989 | TYPE_MODE (TREE_TYPE (range)), | |
9990 | expand_expr (range, NULL_RTX, | |
9991 | VOIDmode, 0), | |
8df83eae | 9992 | TYPE_UNSIGNED (TREE_TYPE (range))), |
ad82abb8 ZW |
9993 | table_label, default_label); |
9994 | return 1; | |
9995 | } | |
e2500fed | 9996 | |
cb2a532e AH |
9997 | /* Nonzero if the mode is a valid vector mode for this architecture. |
9998 | This returns nonzero even if there is no hardware support for the | |
9999 | vector mode, but we can emulate with narrower modes. */ | |
10000 | ||
10001 | int | |
502b8322 | 10002 | vector_mode_valid_p (enum machine_mode mode) |
cb2a532e AH |
10003 | { |
10004 | enum mode_class class = GET_MODE_CLASS (mode); | |
10005 | enum machine_mode innermode; | |
10006 | ||
10007 | /* Doh! What's going on? */ | |
10008 | if (class != MODE_VECTOR_INT | |
10009 | && class != MODE_VECTOR_FLOAT) | |
10010 | return 0; | |
10011 | ||
10012 | /* Hardware support. Woo hoo! */ | |
10013 | if (VECTOR_MODE_SUPPORTED_P (mode)) | |
10014 | return 1; | |
10015 | ||
10016 | innermode = GET_MODE_INNER (mode); | |
10017 | ||
10018 | /* We should probably return 1 if requesting V4DI and we have no DI, | |
10019 | but we have V2DI, but this is probably very unlikely. */ | |
10020 | ||
10021 | /* If we have support for the inner mode, we can safely emulate it. | |
10022 | We may not have V2DI, but me can emulate with a pair of DIs. */ | |
10023 | return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing; | |
10024 | } | |
10025 | ||
d744e06e AH |
10026 | /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ |
10027 | static rtx | |
502b8322 | 10028 | const_vector_from_tree (tree exp) |
d744e06e AH |
10029 | { |
10030 | rtvec v; | |
10031 | int units, i; | |
10032 | tree link, elt; | |
10033 | enum machine_mode inner, mode; | |
10034 | ||
10035 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
10036 | ||
6de9cd9a | 10037 | if (initializer_zerop (exp)) |
d744e06e AH |
10038 | return CONST0_RTX (mode); |
10039 | ||
10040 | units = GET_MODE_NUNITS (mode); | |
10041 | inner = GET_MODE_INNER (mode); | |
10042 | ||
10043 | v = rtvec_alloc (units); | |
10044 | ||
10045 | link = TREE_VECTOR_CST_ELTS (exp); | |
10046 | for (i = 0; link; link = TREE_CHAIN (link), ++i) | |
10047 | { | |
10048 | elt = TREE_VALUE (link); | |
10049 | ||
10050 | if (TREE_CODE (elt) == REAL_CST) | |
10051 | RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt), | |
10052 | inner); | |
10053 | else | |
10054 | RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt), | |
10055 | TREE_INT_CST_HIGH (elt), | |
10056 | inner); | |
10057 | } | |
10058 | ||
5f6c070d AH |
10059 | /* Initialize remaining elements to 0. */ |
10060 | for (; i < units; ++i) | |
10061 | RTVEC_ELT (v, i) = CONST0_RTX (inner); | |
10062 | ||
d744e06e AH |
10063 | return gen_rtx_raw_CONST_VECTOR (mode, v); |
10064 | } | |
e2500fed | 10065 | #include "gt-expr.h" |