]>
Commit | Line | Data |
---|---|---|
bbf6f052 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
8752c357 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
d9221e01 | 3 | 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
bbf6f052 | 4 | |
1322177d | 5 | This file is part of GCC. |
bbf6f052 | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
bbf6f052 | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
bbf6f052 RK |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
bbf6f052 | 21 | |
bbf6f052 | 22 | #include "config.h" |
670ee920 | 23 | #include "system.h" |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
ca695ac9 | 26 | #include "machmode.h" |
11ad4784 | 27 | #include "real.h" |
bbf6f052 RK |
28 | #include "rtl.h" |
29 | #include "tree.h" | |
30 | #include "flags.h" | |
bf76bb5a | 31 | #include "regs.h" |
4ed67205 | 32 | #include "hard-reg-set.h" |
3d195391 | 33 | #include "except.h" |
bbf6f052 | 34 | #include "function.h" |
bbf6f052 | 35 | #include "insn-config.h" |
34e81b5a | 36 | #include "insn-attr.h" |
3a94c984 | 37 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 38 | #include "expr.h" |
e78d8e51 ZW |
39 | #include "optabs.h" |
40 | #include "libfuncs.h" | |
bbf6f052 | 41 | #include "recog.h" |
3ef1eef4 | 42 | #include "reload.h" |
bbf6f052 | 43 | #include "output.h" |
bbf6f052 | 44 | #include "typeclass.h" |
10f0ad3d | 45 | #include "toplev.h" |
d7db6646 | 46 | #include "ggc.h" |
ac79cd5a | 47 | #include "langhooks.h" |
e2c49ac2 | 48 | #include "intl.h" |
b1474bb7 | 49 | #include "tm_p.h" |
6de9cd9a | 50 | #include "tree-iterator.h" |
2f8e398b PB |
51 | #include "tree-pass.h" |
52 | #include "tree-flow.h" | |
c988af2b | 53 | #include "target.h" |
2f8e398b | 54 | #include "timevar.h" |
bbf6f052 | 55 | |
bbf6f052 | 56 | /* Decide whether a function's arguments should be processed |
bbc8a071 RK |
57 | from first to last or from last to first. |
58 | ||
59 | They should if the stack and args grow in opposite directions, but | |
60 | only if we have push insns. */ | |
bbf6f052 | 61 | |
bbf6f052 | 62 | #ifdef PUSH_ROUNDING |
bbc8a071 | 63 | |
2da4124d | 64 | #ifndef PUSH_ARGS_REVERSED |
3319a347 | 65 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
3a94c984 | 66 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
bbf6f052 | 67 | #endif |
2da4124d | 68 | #endif |
bbc8a071 | 69 | |
bbf6f052 RK |
70 | #endif |
71 | ||
72 | #ifndef STACK_PUSH_CODE | |
73 | #ifdef STACK_GROWS_DOWNWARD | |
74 | #define STACK_PUSH_CODE PRE_DEC | |
75 | #else | |
76 | #define STACK_PUSH_CODE PRE_INC | |
77 | #endif | |
78 | #endif | |
79 | ||
4ca79136 | 80 | |
bbf6f052 RK |
81 | /* If this is nonzero, we do not bother generating VOLATILE |
82 | around volatile memory references, and we are willing to | |
83 | output indirect addresses. If cse is to follow, we reject | |
84 | indirect addresses so a useful potential cse is generated; | |
85 | if it is used only once, instruction combination will produce | |
86 | the same indirect address eventually. */ | |
87 | int cse_not_expected; | |
88 | ||
4969d05d RK |
89 | /* This structure is used by move_by_pieces to describe the move to |
90 | be performed. */ | |
4969d05d RK |
91 | struct move_by_pieces |
92 | { | |
93 | rtx to; | |
94 | rtx to_addr; | |
95 | int autinc_to; | |
96 | int explicit_inc_to; | |
97 | rtx from; | |
98 | rtx from_addr; | |
99 | int autinc_from; | |
100 | int explicit_inc_from; | |
3bdf5ad1 RK |
101 | unsigned HOST_WIDE_INT len; |
102 | HOST_WIDE_INT offset; | |
4969d05d RK |
103 | int reverse; |
104 | }; | |
105 | ||
57814e5e | 106 | /* This structure is used by store_by_pieces to describe the clear to |
9de08200 RK |
107 | be performed. */ |
108 | ||
57814e5e | 109 | struct store_by_pieces |
9de08200 RK |
110 | { |
111 | rtx to; | |
112 | rtx to_addr; | |
113 | int autinc_to; | |
114 | int explicit_inc_to; | |
3bdf5ad1 RK |
115 | unsigned HOST_WIDE_INT len; |
116 | HOST_WIDE_INT offset; | |
502b8322 | 117 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode); |
fad205ff | 118 | void *constfundata; |
9de08200 RK |
119 | int reverse; |
120 | }; | |
121 | ||
502b8322 | 122 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, |
45d78e7f | 123 | unsigned int, |
502b8322 AJ |
124 | unsigned int); |
125 | static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, | |
126 | struct move_by_pieces *); | |
127 | static bool block_move_libcall_safe_for_call_parm (void); | |
70128ad9 | 128 | static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned); |
502b8322 AJ |
129 | static rtx emit_block_move_via_libcall (rtx, rtx, rtx); |
130 | static tree emit_block_move_libcall_fn (int); | |
131 | static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); | |
132 | static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); | |
133 | static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); | |
134 | static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); | |
135 | static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, | |
136 | struct store_by_pieces *); | |
70128ad9 | 137 | static bool clear_storage_via_clrmem (rtx, rtx, unsigned); |
502b8322 AJ |
138 | static rtx clear_storage_via_libcall (rtx, rtx); |
139 | static tree clear_storage_libcall_fn (int); | |
140 | static rtx compress_float_constant (rtx, rtx); | |
141 | static rtx get_subtarget (rtx); | |
502b8322 AJ |
142 | static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, |
143 | HOST_WIDE_INT, enum machine_mode, | |
144 | tree, tree, int, int); | |
145 | static void store_constructor (tree, rtx, int, HOST_WIDE_INT); | |
146 | static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, | |
147 | tree, enum machine_mode, int, tree, int); | |
502b8322 AJ |
148 | |
149 | static unsigned HOST_WIDE_INT highest_pow2_factor (tree); | |
d50a16c4 | 150 | static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree); |
502b8322 AJ |
151 | |
152 | static int is_aligning_offset (tree, tree); | |
eb698c58 RS |
153 | static void expand_operands (tree, tree, rtx, rtx*, rtx*, |
154 | enum expand_modifier); | |
bc15d0ef | 155 | static rtx reduce_to_bit_field_precision (rtx, rtx, tree); |
502b8322 | 156 | static rtx do_store_flag (tree, rtx, enum machine_mode, int); |
21d93687 | 157 | #ifdef PUSH_ROUNDING |
502b8322 | 158 | static void emit_single_push_insn (enum machine_mode, rtx, tree); |
21d93687 | 159 | #endif |
502b8322 AJ |
160 | static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx); |
161 | static rtx const_vector_from_tree (tree); | |
bbf6f052 | 162 | |
4fa52007 RK |
163 | /* Record for each mode whether we can move a register directly to or |
164 | from an object of that mode in memory. If we can't, we won't try | |
165 | to use that mode directly when accessing a field of that mode. */ | |
166 | ||
167 | static char direct_load[NUM_MACHINE_MODES]; | |
168 | static char direct_store[NUM_MACHINE_MODES]; | |
169 | ||
51286de6 RH |
170 | /* Record for each mode whether we can float-extend from memory. */ |
171 | ||
172 | static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; | |
173 | ||
fbe1758d | 174 | /* This macro is used to determine whether move_by_pieces should be called |
3a94c984 | 175 | to perform a structure copy. */ |
fbe1758d | 176 | #ifndef MOVE_BY_PIECES_P |
19caa751 | 177 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
45d78e7f JJ |
178 | (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \ |
179 | < (unsigned int) MOVE_RATIO) | |
fbe1758d AM |
180 | #endif |
181 | ||
78762e3b RS |
182 | /* This macro is used to determine whether clear_by_pieces should be |
183 | called to clear storage. */ | |
184 | #ifndef CLEAR_BY_PIECES_P | |
185 | #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ | |
45d78e7f JJ |
186 | (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ |
187 | < (unsigned int) CLEAR_RATIO) | |
78762e3b RS |
188 | #endif |
189 | ||
4977bab6 ZW |
190 | /* This macro is used to determine whether store_by_pieces should be |
191 | called to "memset" storage with byte values other than zero, or | |
192 | to "memcpy" storage when the source is a constant string. */ | |
193 | #ifndef STORE_BY_PIECES_P | |
45d78e7f JJ |
194 | #define STORE_BY_PIECES_P(SIZE, ALIGN) \ |
195 | (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ | |
196 | < (unsigned int) MOVE_RATIO) | |
4977bab6 ZW |
197 | #endif |
198 | ||
266007a7 | 199 | /* This array records the insn_code of insns to perform block moves. */ |
70128ad9 | 200 | enum insn_code movmem_optab[NUM_MACHINE_MODES]; |
266007a7 | 201 | |
9de08200 | 202 | /* This array records the insn_code of insns to perform block clears. */ |
70128ad9 | 203 | enum insn_code clrmem_optab[NUM_MACHINE_MODES]; |
9de08200 | 204 | |
118355a0 ZW |
205 | /* These arrays record the insn_code of two different kinds of insns |
206 | to perform block compares. */ | |
207 | enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; | |
208 | enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; | |
209 | ||
cc2902df | 210 | /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ |
e87b4f3f RS |
211 | |
212 | #ifndef SLOW_UNALIGNED_ACCESS | |
e1565e65 | 213 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
e87b4f3f | 214 | #endif |
bbf6f052 | 215 | \f |
4fa52007 | 216 | /* This is run once per compilation to set up which modes can be used |
266007a7 | 217 | directly in memory and to initialize the block move optab. */ |
4fa52007 RK |
218 | |
219 | void | |
502b8322 | 220 | init_expr_once (void) |
4fa52007 RK |
221 | { |
222 | rtx insn, pat; | |
223 | enum machine_mode mode; | |
cff48d8f | 224 | int num_clobbers; |
9ec36da5 | 225 | rtx mem, mem1; |
bf1660a6 | 226 | rtx reg; |
9ec36da5 | 227 | |
e2549997 RS |
228 | /* Try indexing by frame ptr and try by stack ptr. |
229 | It is known that on the Convex the stack ptr isn't a valid index. | |
230 | With luck, one or the other is valid on any machine. */ | |
9ec36da5 JL |
231 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
232 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
4fa52007 | 233 | |
bf1660a6 JL |
234 | /* A scratch register we can modify in-place below to avoid |
235 | useless RTL allocations. */ | |
236 | reg = gen_rtx_REG (VOIDmode, -1); | |
237 | ||
1f8c3c5b RH |
238 | insn = rtx_alloc (INSN); |
239 | pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); | |
240 | PATTERN (insn) = pat; | |
4fa52007 RK |
241 | |
242 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
243 | mode = (enum machine_mode) ((int) mode + 1)) | |
244 | { | |
245 | int regno; | |
4fa52007 RK |
246 | |
247 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
248 | PUT_MODE (mem, mode); | |
e2549997 | 249 | PUT_MODE (mem1, mode); |
bf1660a6 | 250 | PUT_MODE (reg, mode); |
4fa52007 | 251 | |
e6fe56a4 RK |
252 | /* See if there is some register that can be used in this mode and |
253 | directly loaded or stored from memory. */ | |
254 | ||
7308a047 RS |
255 | if (mode != VOIDmode && mode != BLKmode) |
256 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
257 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
258 | regno++) | |
259 | { | |
260 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
261 | continue; | |
e6fe56a4 | 262 | |
bf1660a6 | 263 | REGNO (reg) = regno; |
e6fe56a4 | 264 | |
7308a047 RS |
265 | SET_SRC (pat) = mem; |
266 | SET_DEST (pat) = reg; | |
267 | if (recog (pat, insn, &num_clobbers) >= 0) | |
268 | direct_load[(int) mode] = 1; | |
e6fe56a4 | 269 | |
e2549997 RS |
270 | SET_SRC (pat) = mem1; |
271 | SET_DEST (pat) = reg; | |
272 | if (recog (pat, insn, &num_clobbers) >= 0) | |
273 | direct_load[(int) mode] = 1; | |
274 | ||
7308a047 RS |
275 | SET_SRC (pat) = reg; |
276 | SET_DEST (pat) = mem; | |
277 | if (recog (pat, insn, &num_clobbers) >= 0) | |
278 | direct_store[(int) mode] = 1; | |
e2549997 RS |
279 | |
280 | SET_SRC (pat) = reg; | |
281 | SET_DEST (pat) = mem1; | |
282 | if (recog (pat, insn, &num_clobbers) >= 0) | |
283 | direct_store[(int) mode] = 1; | |
7308a047 | 284 | } |
4fa52007 RK |
285 | } |
286 | ||
51286de6 RH |
287 | mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); |
288 | ||
289 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; | |
290 | mode = GET_MODE_WIDER_MODE (mode)) | |
291 | { | |
292 | enum machine_mode srcmode; | |
293 | for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; | |
0fb7aeda | 294 | srcmode = GET_MODE_WIDER_MODE (srcmode)) |
51286de6 RH |
295 | { |
296 | enum insn_code ic; | |
297 | ||
298 | ic = can_extend_p (mode, srcmode, 0); | |
299 | if (ic == CODE_FOR_nothing) | |
300 | continue; | |
301 | ||
302 | PUT_MODE (mem, srcmode); | |
0fb7aeda | 303 | |
51286de6 RH |
304 | if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) |
305 | float_extend_from_mem[mode][srcmode] = true; | |
306 | } | |
307 | } | |
4fa52007 | 308 | } |
cff48d8f | 309 | |
bbf6f052 RK |
310 | /* This is run at the start of compiling a function. */ |
311 | ||
312 | void | |
502b8322 | 313 | init_expr (void) |
bbf6f052 | 314 | { |
3a70d621 | 315 | cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status)); |
bbf6f052 | 316 | } |
bbf6f052 RK |
317 | \f |
318 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
319 | Both modes may be integer, or both may be floating. | |
320 | UNSIGNEDP should be nonzero if FROM is an unsigned type. | |
321 | This causes zero-extension instead of sign-extension. */ | |
322 | ||
323 | void | |
502b8322 | 324 | convert_move (rtx to, rtx from, int unsignedp) |
bbf6f052 RK |
325 | { |
326 | enum machine_mode to_mode = GET_MODE (to); | |
327 | enum machine_mode from_mode = GET_MODE (from); | |
328 | int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; | |
329 | int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; | |
330 | enum insn_code code; | |
331 | rtx libcall; | |
332 | ||
333 | /* rtx code for making an equivalent value. */ | |
37d0b254 SE |
334 | enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN |
335 | : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); | |
bbf6f052 | 336 | |
bbf6f052 | 337 | |
5b0264cb | 338 | gcc_assert (to_real == from_real); |
bbf6f052 | 339 | |
6de9cd9a DN |
340 | /* If the source and destination are already the same, then there's |
341 | nothing to do. */ | |
342 | if (to == from) | |
343 | return; | |
344 | ||
1499e0a8 RK |
345 | /* If FROM is a SUBREG that indicates that we have already done at least |
346 | the required extension, strip it. We don't handle such SUBREGs as | |
347 | TO here. */ | |
348 | ||
349 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
350 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
351 | >= GET_MODE_SIZE (to_mode)) | |
352 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
353 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
354 | ||
5b0264cb | 355 | gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); |
1499e0a8 | 356 | |
bbf6f052 RK |
357 | if (to_mode == from_mode |
358 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
359 | { | |
360 | emit_move_insn (to, from); | |
361 | return; | |
362 | } | |
363 | ||
0b4565c9 BS |
364 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
365 | { | |
5b0264cb | 366 | gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); |
3a94c984 | 367 | |
0b4565c9 | 368 | if (VECTOR_MODE_P (to_mode)) |
bafe341a | 369 | from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
0b4565c9 | 370 | else |
bafe341a | 371 | to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
0b4565c9 BS |
372 | |
373 | emit_move_insn (to, from); | |
374 | return; | |
375 | } | |
376 | ||
06765df1 R |
377 | if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT) |
378 | { | |
379 | convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp); | |
380 | convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp); | |
381 | return; | |
382 | } | |
383 | ||
bbf6f052 RK |
384 | if (to_real) |
385 | { | |
642dfa8b | 386 | rtx value, insns; |
85363ca0 | 387 | convert_optab tab; |
81d79e2c | 388 | |
5b0264cb NS |
389 | gcc_assert (GET_MODE_PRECISION (from_mode) |
390 | != GET_MODE_PRECISION (to_mode)); | |
391 | ||
e44846d6 | 392 | if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) |
85363ca0 | 393 | tab = sext_optab; |
85363ca0 | 394 | else |
5b0264cb | 395 | tab = trunc_optab; |
2b01c326 | 396 | |
85363ca0 | 397 | /* Try converting directly if the insn is supported. */ |
2b01c326 | 398 | |
85363ca0 ZW |
399 | code = tab->handlers[to_mode][from_mode].insn_code; |
400 | if (code != CODE_FOR_nothing) | |
b092b471 | 401 | { |
85363ca0 ZW |
402 | emit_unop_insn (code, to, from, |
403 | tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE); | |
b092b471 JW |
404 | return; |
405 | } | |
b092b471 | 406 | |
85363ca0 ZW |
407 | /* Otherwise use a libcall. */ |
408 | libcall = tab->handlers[to_mode][from_mode].libfunc; | |
3a94c984 | 409 | |
5b0264cb NS |
410 | /* Is this conversion implemented yet? */ |
411 | gcc_assert (libcall); | |
bbf6f052 | 412 | |
642dfa8b | 413 | start_sequence (); |
ebb1b59a | 414 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
81d79e2c | 415 | 1, from, from_mode); |
642dfa8b BS |
416 | insns = get_insns (); |
417 | end_sequence (); | |
450b1728 EC |
418 | emit_libcall_block (insns, to, value, |
419 | tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode, | |
420 | from) | |
421 | : gen_rtx_FLOAT_EXTEND (to_mode, from)); | |
bbf6f052 RK |
422 | return; |
423 | } | |
424 | ||
85363ca0 ZW |
425 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
426 | /* Targets are expected to provide conversion insns between PxImode and | |
427 | xImode for all MODE_PARTIAL_INT modes they use, but no others. */ | |
428 | if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT) | |
429 | { | |
430 | enum machine_mode full_mode | |
431 | = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); | |
432 | ||
5b0264cb NS |
433 | gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code |
434 | != CODE_FOR_nothing); | |
85363ca0 ZW |
435 | |
436 | if (full_mode != from_mode) | |
437 | from = convert_to_mode (full_mode, from, unsignedp); | |
438 | emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code, | |
439 | to, from, UNKNOWN); | |
440 | return; | |
441 | } | |
442 | if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) | |
443 | { | |
444 | enum machine_mode full_mode | |
445 | = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); | |
446 | ||
5b0264cb NS |
447 | gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code |
448 | != CODE_FOR_nothing); | |
85363ca0 ZW |
449 | |
450 | emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, | |
451 | to, from, UNKNOWN); | |
452 | if (to_mode == full_mode) | |
453 | return; | |
454 | ||
a1105617 | 455 | /* else proceed to integer conversions below. */ |
85363ca0 ZW |
456 | from_mode = full_mode; |
457 | } | |
458 | ||
bbf6f052 RK |
459 | /* Now both modes are integers. */ |
460 | ||
461 | /* Handle expanding beyond a word. */ | |
462 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
463 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
464 | { | |
465 | rtx insns; | |
466 | rtx lowpart; | |
467 | rtx fill_value; | |
468 | rtx lowfrom; | |
469 | int i; | |
470 | enum machine_mode lowpart_mode; | |
471 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
472 | ||
473 | /* Try converting directly if the insn is supported. */ | |
474 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
475 | != CODE_FOR_nothing) | |
476 | { | |
cd1b4b44 RK |
477 | /* If FROM is a SUBREG, put it into a register. Do this |
478 | so that we always generate the same set of insns for | |
479 | better cse'ing; if an intermediate assignment occurred, | |
480 | we won't be doing the operation directly on the SUBREG. */ | |
481 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
482 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
483 | emit_unop_insn (code, to, from, equiv_code); |
484 | return; | |
485 | } | |
486 | /* Next, try converting via full word. */ | |
487 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
488 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
489 | != CODE_FOR_nothing)) | |
490 | { | |
f8cfc6aa | 491 | if (REG_P (to)) |
6a2d136b EB |
492 | { |
493 | if (reg_overlap_mentioned_p (to, from)) | |
494 | from = force_reg (from_mode, from); | |
495 | emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); | |
496 | } | |
bbf6f052 RK |
497 | convert_move (gen_lowpart (word_mode, to), from, unsignedp); |
498 | emit_unop_insn (code, to, | |
499 | gen_lowpart (word_mode, to), equiv_code); | |
500 | return; | |
501 | } | |
502 | ||
503 | /* No special multiword conversion insn; do it by hand. */ | |
504 | start_sequence (); | |
505 | ||
5c5033c3 RK |
506 | /* Since we will turn this into a no conflict block, we must ensure |
507 | that the source does not overlap the target. */ | |
508 | ||
509 | if (reg_overlap_mentioned_p (to, from)) | |
510 | from = force_reg (from_mode, from); | |
511 | ||
bbf6f052 RK |
512 | /* Get a copy of FROM widened to a word, if necessary. */ |
513 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
514 | lowpart_mode = word_mode; | |
515 | else | |
516 | lowpart_mode = from_mode; | |
517 | ||
518 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
519 | ||
520 | lowpart = gen_lowpart (lowpart_mode, to); | |
521 | emit_move_insn (lowpart, lowfrom); | |
522 | ||
523 | /* Compute the value to put in each remaining word. */ | |
524 | if (unsignedp) | |
525 | fill_value = const0_rtx; | |
526 | else | |
527 | { | |
528 | #ifdef HAVE_slt | |
529 | if (HAVE_slt | |
a995e389 | 530 | && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
bbf6f052 RK |
531 | && STORE_FLAG_VALUE == -1) |
532 | { | |
906c4e36 | 533 | emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
a06ef755 | 534 | lowpart_mode, 0); |
bbf6f052 RK |
535 | fill_value = gen_reg_rtx (word_mode); |
536 | emit_insn (gen_slt (fill_value)); | |
537 | } | |
538 | else | |
539 | #endif | |
540 | { | |
541 | fill_value | |
542 | = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, | |
543 | size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), | |
906c4e36 | 544 | NULL_RTX, 0); |
bbf6f052 RK |
545 | fill_value = convert_to_mode (word_mode, fill_value, 1); |
546 | } | |
547 | } | |
548 | ||
549 | /* Fill the remaining words. */ | |
550 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
551 | { | |
552 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
553 | rtx subword = operand_subword (to, index, 1, to_mode); | |
554 | ||
5b0264cb | 555 | gcc_assert (subword); |
bbf6f052 RK |
556 | |
557 | if (fill_value != subword) | |
558 | emit_move_insn (subword, fill_value); | |
559 | } | |
560 | ||
561 | insns = get_insns (); | |
562 | end_sequence (); | |
563 | ||
906c4e36 | 564 | emit_no_conflict_block (insns, to, from, NULL_RTX, |
38a448ca | 565 | gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); |
bbf6f052 RK |
566 | return; |
567 | } | |
568 | ||
d3c64ee3 RS |
569 | /* Truncating multi-word to a word or less. */ |
570 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
571 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
bbf6f052 | 572 | { |
3c0cb5de | 573 | if (!((MEM_P (from) |
431a6eca JW |
574 | && ! MEM_VOLATILE_P (from) |
575 | && direct_load[(int) to_mode] | |
576 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 577 | || REG_P (from) |
431a6eca JW |
578 | || GET_CODE (from) == SUBREG)) |
579 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
580 | convert_move (to, gen_lowpart (word_mode, from), 0); |
581 | return; | |
582 | } | |
583 | ||
bbf6f052 RK |
584 | /* Now follow all the conversions between integers |
585 | no more than a word long. */ | |
586 | ||
587 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
588 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
589 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
d3c64ee3 | 590 | GET_MODE_BITSIZE (from_mode))) |
bbf6f052 | 591 | { |
3c0cb5de | 592 | if (!((MEM_P (from) |
d3c64ee3 RS |
593 | && ! MEM_VOLATILE_P (from) |
594 | && direct_load[(int) to_mode] | |
595 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 596 | || REG_P (from) |
d3c64ee3 RS |
597 | || GET_CODE (from) == SUBREG)) |
598 | from = force_reg (from_mode, from); | |
f8cfc6aa | 599 | if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER |
34aa3599 RK |
600 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) |
601 | from = copy_to_reg (from); | |
bbf6f052 RK |
602 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
603 | return; | |
604 | } | |
605 | ||
d3c64ee3 | 606 | /* Handle extension. */ |
bbf6f052 RK |
607 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
608 | { | |
609 | /* Convert directly if that works. */ | |
610 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
611 | != CODE_FOR_nothing) | |
612 | { | |
9413de45 RK |
613 | if (flag_force_mem) |
614 | from = force_not_mem (from); | |
615 | ||
bbf6f052 RK |
616 | emit_unop_insn (code, to, from, equiv_code); |
617 | return; | |
618 | } | |
619 | else | |
620 | { | |
621 | enum machine_mode intermediate; | |
2b28d92e NC |
622 | rtx tmp; |
623 | tree shift_amount; | |
bbf6f052 RK |
624 | |
625 | /* Search for a mode to convert via. */ | |
626 | for (intermediate = from_mode; intermediate != VOIDmode; | |
627 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
930b4e39 RK |
628 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
629 | != CODE_FOR_nothing) | |
630 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
d60eaeff JL |
631 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
632 | GET_MODE_BITSIZE (intermediate)))) | |
bbf6f052 RK |
633 | && (can_extend_p (intermediate, from_mode, unsignedp) |
634 | != CODE_FOR_nothing)) | |
635 | { | |
636 | convert_move (to, convert_to_mode (intermediate, from, | |
637 | unsignedp), unsignedp); | |
638 | return; | |
639 | } | |
640 | ||
2b28d92e | 641 | /* No suitable intermediate mode. |
3a94c984 | 642 | Generate what we need with shifts. */ |
4a90aeeb NS |
643 | shift_amount = build_int_cst (NULL_TREE, |
644 | GET_MODE_BITSIZE (to_mode) | |
7d60be94 | 645 | - GET_MODE_BITSIZE (from_mode)); |
2b28d92e NC |
646 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); |
647 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
648 | to, unsignedp); | |
3a94c984 | 649 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
2b28d92e NC |
650 | to, unsignedp); |
651 | if (tmp != to) | |
652 | emit_move_insn (to, tmp); | |
653 | return; | |
bbf6f052 RK |
654 | } |
655 | } | |
656 | ||
3a94c984 | 657 | /* Support special truncate insns for certain modes. */ |
85363ca0 | 658 | if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing) |
bbf6f052 | 659 | { |
85363ca0 ZW |
660 | emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code, |
661 | to, from, UNKNOWN); | |
b9bcad65 RK |
662 | return; |
663 | } | |
664 | ||
bbf6f052 RK |
665 | /* Handle truncation of volatile memrefs, and so on; |
666 | the things that couldn't be truncated directly, | |
85363ca0 ZW |
667 | and for which there was no special instruction. |
668 | ||
669 | ??? Code above formerly short-circuited this, for most integer | |
670 | mode pairs, with a force_reg in from_mode followed by a recursive | |
671 | call to this routine. Appears always to have been wrong. */ | |
bbf6f052 RK |
672 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) |
673 | { | |
674 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
675 | emit_move_insn (to, temp); | |
676 | return; | |
677 | } | |
678 | ||
679 | /* Mode combination is not recognized. */ | |
5b0264cb | 680 | gcc_unreachable (); |
bbf6f052 RK |
681 | } |
682 | ||
683 | /* Return an rtx for a value that would result | |
684 | from converting X to mode MODE. | |
685 | Both X and MODE may be floating, or both integer. | |
686 | UNSIGNEDP is nonzero if X is an unsigned value. | |
687 | This can be done by referring to a part of X in place | |
ad76cef8 | 688 | or by copying to a new temporary with conversion. */ |
bbf6f052 RK |
689 | |
690 | rtx | |
502b8322 | 691 | convert_to_mode (enum machine_mode mode, rtx x, int unsignedp) |
5ffe63ed RS |
692 | { |
693 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
694 | } | |
695 | ||
696 | /* Return an rtx for a value that would result | |
697 | from converting X from mode OLDMODE to mode MODE. | |
698 | Both modes may be floating, or both integer. | |
699 | UNSIGNEDP is nonzero if X is an unsigned value. | |
700 | ||
701 | This can be done by referring to a part of X in place | |
702 | or by copying to a new temporary with conversion. | |
703 | ||
ad76cef8 | 704 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */ |
5ffe63ed RS |
705 | |
706 | rtx | |
502b8322 | 707 | convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp) |
bbf6f052 | 708 | { |
b3694847 | 709 | rtx temp; |
5ffe63ed | 710 | |
1499e0a8 RK |
711 | /* If FROM is a SUBREG that indicates that we have already done at least |
712 | the required extension, strip it. */ | |
713 | ||
714 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
715 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
716 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
717 | x = gen_lowpart (mode, x); | |
bbf6f052 | 718 | |
64791b18 RK |
719 | if (GET_MODE (x) != VOIDmode) |
720 | oldmode = GET_MODE (x); | |
3a94c984 | 721 | |
5ffe63ed | 722 | if (mode == oldmode) |
bbf6f052 RK |
723 | return x; |
724 | ||
725 | /* There is one case that we must handle specially: If we are converting | |
906c4e36 | 726 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
bbf6f052 RK |
727 | we are to interpret the constant as unsigned, gen_lowpart will do |
728 | the wrong if the constant appears negative. What we want to do is | |
729 | make the high-order word of the constant zero, not all ones. */ | |
730 | ||
731 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
906c4e36 | 732 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
bbf6f052 | 733 | && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
96ff8a16 ILT |
734 | { |
735 | HOST_WIDE_INT val = INTVAL (x); | |
736 | ||
737 | if (oldmode != VOIDmode | |
738 | && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) | |
739 | { | |
740 | int width = GET_MODE_BITSIZE (oldmode); | |
741 | ||
742 | /* We need to zero extend VAL. */ | |
743 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
744 | } | |
745 | ||
746 | return immed_double_const (val, (HOST_WIDE_INT) 0, mode); | |
747 | } | |
bbf6f052 RK |
748 | |
749 | /* We can do this with a gen_lowpart if both desired and current modes | |
750 | are integer, and this is either a constant integer, a register, or a | |
ba2e110c RK |
751 | non-volatile MEM. Except for the constant case where MODE is no |
752 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
bbf6f052 | 753 | |
ba2e110c RK |
754 | if ((GET_CODE (x) == CONST_INT |
755 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
bbf6f052 | 756 | || (GET_MODE_CLASS (mode) == MODE_INT |
5ffe63ed | 757 | && GET_MODE_CLASS (oldmode) == MODE_INT |
bbf6f052 | 758 | && (GET_CODE (x) == CONST_DOUBLE |
5ffe63ed | 759 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
3c0cb5de | 760 | && ((MEM_P (x) && ! MEM_VOLATILE_P (x) |
d57c66da | 761 | && direct_load[(int) mode]) |
f8cfc6aa | 762 | || (REG_P (x) |
006c9f4a SE |
763 | && (! HARD_REGISTER_P (x) |
764 | || HARD_REGNO_MODE_OK (REGNO (x), mode)) | |
2bf29316 JW |
765 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
766 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
ba2e110c RK |
767 | { |
768 | /* ?? If we don't know OLDMODE, we have to assume here that | |
769 | X does not need sign- or zero-extension. This may not be | |
770 | the case, but it's the best we can do. */ | |
771 | if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode | |
772 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) | |
773 | { | |
774 | HOST_WIDE_INT val = INTVAL (x); | |
775 | int width = GET_MODE_BITSIZE (oldmode); | |
776 | ||
777 | /* We must sign or zero-extend in this case. Start by | |
778 | zero-extending, then sign extend if we need to. */ | |
779 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
780 | if (! unsignedp | |
781 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
782 | val |= (HOST_WIDE_INT) (-1) << width; | |
783 | ||
2496c7bd | 784 | return gen_int_mode (val, mode); |
ba2e110c RK |
785 | } |
786 | ||
787 | return gen_lowpart (mode, x); | |
788 | } | |
bbf6f052 | 789 | |
ebe75517 JH |
790 | /* Converting from integer constant into mode is always equivalent to an |
791 | subreg operation. */ | |
792 | if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) | |
793 | { | |
5b0264cb | 794 | gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); |
ebe75517 JH |
795 | return simplify_gen_subreg (mode, x, oldmode, 0); |
796 | } | |
797 | ||
bbf6f052 RK |
798 | temp = gen_reg_rtx (mode); |
799 | convert_move (temp, x, unsignedp); | |
800 | return temp; | |
801 | } | |
802 | \f | |
cf5124f6 RS |
803 | /* STORE_MAX_PIECES is the number of bytes at a time that we can |
804 | store efficiently. Due to internal GCC limitations, this is | |
805 | MOVE_MAX_PIECES limited by the number of bytes GCC can represent | |
806 | for an immediate constant. */ | |
807 | ||
808 | #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) | |
809 | ||
8fd3cf4e JJ |
810 | /* Determine whether the LEN bytes can be moved by using several move |
811 | instructions. Return nonzero if a call to move_by_pieces should | |
812 | succeed. */ | |
813 | ||
814 | int | |
502b8322 AJ |
815 | can_move_by_pieces (unsigned HOST_WIDE_INT len, |
816 | unsigned int align ATTRIBUTE_UNUSED) | |
8fd3cf4e JJ |
817 | { |
818 | return MOVE_BY_PIECES_P (len, align); | |
819 | } | |
820 | ||
21d93687 | 821 | /* Generate several move instructions to copy LEN bytes from block FROM to |
ad76cef8 | 822 | block TO. (These are MEM rtx's with BLKmode). |
566aa174 | 823 | |
21d93687 RK |
824 | If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is |
825 | used to push FROM to the stack. | |
566aa174 | 826 | |
8fd3cf4e | 827 | ALIGN is maximum stack alignment we can assume. |
bbf6f052 | 828 | |
8fd3cf4e JJ |
829 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala |
830 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
831 | stpcpy. */ | |
832 | ||
833 | rtx | |
502b8322 AJ |
834 | move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
835 | unsigned int align, int endp) | |
bbf6f052 RK |
836 | { |
837 | struct move_by_pieces data; | |
566aa174 | 838 | rtx to_addr, from_addr = XEXP (from, 0); |
770ae6cc | 839 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
840 | enum machine_mode mode = VOIDmode, tmode; |
841 | enum insn_code icode; | |
bbf6f052 | 842 | |
f26aca6d DD |
843 | align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from)); |
844 | ||
bbf6f052 | 845 | data.offset = 0; |
bbf6f052 | 846 | data.from_addr = from_addr; |
566aa174 JH |
847 | if (to) |
848 | { | |
849 | to_addr = XEXP (to, 0); | |
850 | data.to = to; | |
851 | data.autinc_to | |
852 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
853 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
854 | data.reverse | |
855 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
856 | } | |
857 | else | |
858 | { | |
859 | to_addr = NULL_RTX; | |
860 | data.to = NULL_RTX; | |
861 | data.autinc_to = 1; | |
862 | #ifdef STACK_GROWS_DOWNWARD | |
863 | data.reverse = 1; | |
864 | #else | |
865 | data.reverse = 0; | |
866 | #endif | |
867 | } | |
868 | data.to_addr = to_addr; | |
bbf6f052 | 869 | data.from = from; |
bbf6f052 RK |
870 | data.autinc_from |
871 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
872 | || GET_CODE (from_addr) == POST_INC | |
873 | || GET_CODE (from_addr) == POST_DEC); | |
874 | ||
875 | data.explicit_inc_from = 0; | |
876 | data.explicit_inc_to = 0; | |
bbf6f052 RK |
877 | if (data.reverse) data.offset = len; |
878 | data.len = len; | |
879 | ||
880 | /* If copying requires more than two move insns, | |
881 | copy addresses to registers (to make displacements shorter) | |
882 | and use post-increment if available. */ | |
883 | if (!(data.autinc_from && data.autinc_to) | |
45d78e7f | 884 | && move_by_pieces_ninsns (len, align, max_size) > 2) |
bbf6f052 | 885 | { |
3a94c984 | 886 | /* Find the mode of the largest move... */ |
fbe1758d AM |
887 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
888 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
889 | if (GET_MODE_SIZE (tmode) < max_size) | |
890 | mode = tmode; | |
891 | ||
892 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
bbf6f052 RK |
893 | { |
894 | data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); | |
895 | data.autinc_from = 1; | |
896 | data.explicit_inc_from = -1; | |
897 | } | |
fbe1758d | 898 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
bbf6f052 RK |
899 | { |
900 | data.from_addr = copy_addr_to_reg (from_addr); | |
901 | data.autinc_from = 1; | |
902 | data.explicit_inc_from = 1; | |
903 | } | |
bbf6f052 RK |
904 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
905 | data.from_addr = copy_addr_to_reg (from_addr); | |
fbe1758d | 906 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
bbf6f052 RK |
907 | { |
908 | data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); | |
909 | data.autinc_to = 1; | |
910 | data.explicit_inc_to = -1; | |
911 | } | |
fbe1758d | 912 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
bbf6f052 RK |
913 | { |
914 | data.to_addr = copy_addr_to_reg (to_addr); | |
915 | data.autinc_to = 1; | |
916 | data.explicit_inc_to = 1; | |
917 | } | |
bbf6f052 RK |
918 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
919 | data.to_addr = copy_addr_to_reg (to_addr); | |
920 | } | |
921 | ||
f64d6991 DE |
922 | tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
923 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
924 | align = GET_MODE_ALIGNMENT (tmode); | |
925 | else | |
926 | { | |
927 | enum machine_mode xmode; | |
928 | ||
929 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
930 | tmode != VOIDmode; | |
931 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
932 | if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES | |
933 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
934 | break; | |
935 | ||
936 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
937 | } | |
bbf6f052 RK |
938 | |
939 | /* First move what we can in the largest integer mode, then go to | |
940 | successively smaller modes. */ | |
941 | ||
942 | while (max_size > 1) | |
943 | { | |
e7c33f54 RK |
944 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
945 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
946 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
947 | mode = tmode; |
948 | ||
949 | if (mode == VOIDmode) | |
950 | break; | |
951 | ||
952 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 953 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
954 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
955 | ||
956 | max_size = GET_MODE_SIZE (mode); | |
957 | } | |
958 | ||
959 | /* The code above should have handled everything. */ | |
5b0264cb | 960 | gcc_assert (!data.len); |
8fd3cf4e JJ |
961 | |
962 | if (endp) | |
963 | { | |
964 | rtx to1; | |
965 | ||
5b0264cb | 966 | gcc_assert (!data.reverse); |
8fd3cf4e JJ |
967 | if (data.autinc_to) |
968 | { | |
969 | if (endp == 2) | |
970 | { | |
971 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
972 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
973 | else | |
974 | data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, | |
975 | -1)); | |
976 | } | |
977 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
978 | data.offset); | |
979 | } | |
980 | else | |
981 | { | |
982 | if (endp == 2) | |
983 | --data.offset; | |
984 | to1 = adjust_address (data.to, QImode, data.offset); | |
985 | } | |
986 | return to1; | |
987 | } | |
988 | else | |
989 | return data.to; | |
bbf6f052 RK |
990 | } |
991 | ||
992 | /* Return number of insns required to move L bytes by pieces. | |
f1eaaf73 | 993 | ALIGN (in bits) is maximum alignment we can assume. */ |
bbf6f052 | 994 | |
3bdf5ad1 | 995 | static unsigned HOST_WIDE_INT |
45d78e7f JJ |
996 | move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, |
997 | unsigned int max_size) | |
bbf6f052 | 998 | { |
3bdf5ad1 | 999 | unsigned HOST_WIDE_INT n_insns = 0; |
f64d6991 | 1000 | enum machine_mode tmode; |
bbf6f052 | 1001 | |
f64d6991 DE |
1002 | tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
1003 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
1004 | align = GET_MODE_ALIGNMENT (tmode); | |
1005 | else | |
1006 | { | |
1007 | enum machine_mode tmode, xmode; | |
1008 | ||
1009 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
1010 | tmode != VOIDmode; | |
1011 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
1012 | if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES | |
1013 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
1014 | break; | |
1015 | ||
1016 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
1017 | } | |
bbf6f052 RK |
1018 | |
1019 | while (max_size > 1) | |
1020 | { | |
f64d6991 | 1021 | enum machine_mode mode = VOIDmode; |
bbf6f052 RK |
1022 | enum insn_code icode; |
1023 | ||
e7c33f54 RK |
1024 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1025 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1026 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1027 | mode = tmode; |
1028 | ||
1029 | if (mode == VOIDmode) | |
1030 | break; | |
1031 | ||
1032 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1033 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1034 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1035 | ||
1036 | max_size = GET_MODE_SIZE (mode); | |
1037 | } | |
1038 | ||
5b0264cb | 1039 | gcc_assert (!l); |
bbf6f052 RK |
1040 | return n_insns; |
1041 | } | |
1042 | ||
1043 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1044 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1045 | to make a move insn for that mode. DATA has all the other info. */ | |
1046 | ||
1047 | static void | |
502b8322 AJ |
1048 | move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
1049 | struct move_by_pieces *data) | |
bbf6f052 | 1050 | { |
3bdf5ad1 | 1051 | unsigned int size = GET_MODE_SIZE (mode); |
ae0ed63a | 1052 | rtx to1 = NULL_RTX, from1; |
bbf6f052 RK |
1053 | |
1054 | while (data->len >= size) | |
1055 | { | |
3bdf5ad1 RK |
1056 | if (data->reverse) |
1057 | data->offset -= size; | |
1058 | ||
566aa174 | 1059 | if (data->to) |
3bdf5ad1 | 1060 | { |
566aa174 | 1061 | if (data->autinc_to) |
630036c6 JJ |
1062 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
1063 | data->offset); | |
566aa174 | 1064 | else |
f4ef873c | 1065 | to1 = adjust_address (data->to, mode, data->offset); |
3bdf5ad1 | 1066 | } |
3bdf5ad1 RK |
1067 | |
1068 | if (data->autinc_from) | |
630036c6 JJ |
1069 | from1 = adjust_automodify_address (data->from, mode, data->from_addr, |
1070 | data->offset); | |
3bdf5ad1 | 1071 | else |
f4ef873c | 1072 | from1 = adjust_address (data->from, mode, data->offset); |
bbf6f052 | 1073 | |
940da324 | 1074 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
3d709fd3 RH |
1075 | emit_insn (gen_add2_insn (data->to_addr, |
1076 | GEN_INT (-(HOST_WIDE_INT)size))); | |
940da324 | 1077 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
3d709fd3 RH |
1078 | emit_insn (gen_add2_insn (data->from_addr, |
1079 | GEN_INT (-(HOST_WIDE_INT)size))); | |
bbf6f052 | 1080 | |
566aa174 JH |
1081 | if (data->to) |
1082 | emit_insn ((*genfun) (to1, from1)); | |
1083 | else | |
21d93687 RK |
1084 | { |
1085 | #ifdef PUSH_ROUNDING | |
1086 | emit_single_push_insn (mode, from1, NULL); | |
1087 | #else | |
5b0264cb | 1088 | gcc_unreachable (); |
21d93687 RK |
1089 | #endif |
1090 | } | |
3bdf5ad1 | 1091 | |
940da324 | 1092 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
906c4e36 | 1093 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
940da324 | 1094 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
906c4e36 | 1095 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
bbf6f052 | 1096 | |
3bdf5ad1 RK |
1097 | if (! data->reverse) |
1098 | data->offset += size; | |
bbf6f052 RK |
1099 | |
1100 | data->len -= size; | |
1101 | } | |
1102 | } | |
1103 | \f | |
4ca79136 RH |
1104 | /* Emit code to move a block Y to a block X. This may be done with |
1105 | string-move instructions, with multiple scalar move instructions, | |
1106 | or with a library call. | |
bbf6f052 | 1107 | |
4ca79136 | 1108 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. |
bbf6f052 | 1109 | SIZE is an rtx that says how long they are. |
19caa751 | 1110 | ALIGN is the maximum alignment we can assume they have. |
44bb111a | 1111 | METHOD describes what kind of copy this is, and what mechanisms may be used. |
bbf6f052 | 1112 | |
e9a25f70 JL |
1113 | Return the address of the new block, if memcpy is called and returns it, |
1114 | 0 otherwise. */ | |
1115 | ||
1116 | rtx | |
502b8322 | 1117 | emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) |
bbf6f052 | 1118 | { |
44bb111a | 1119 | bool may_use_call; |
e9a25f70 | 1120 | rtx retval = 0; |
44bb111a RH |
1121 | unsigned int align; |
1122 | ||
1123 | switch (method) | |
1124 | { | |
1125 | case BLOCK_OP_NORMAL: | |
1126 | may_use_call = true; | |
1127 | break; | |
1128 | ||
1129 | case BLOCK_OP_CALL_PARM: | |
1130 | may_use_call = block_move_libcall_safe_for_call_parm (); | |
1131 | ||
1132 | /* Make inhibit_defer_pop nonzero around the library call | |
1133 | to force it to pop the arguments right away. */ | |
1134 | NO_DEFER_POP; | |
1135 | break; | |
1136 | ||
1137 | case BLOCK_OP_NO_LIBCALL: | |
1138 | may_use_call = false; | |
1139 | break; | |
1140 | ||
1141 | default: | |
5b0264cb | 1142 | gcc_unreachable (); |
44bb111a RH |
1143 | } |
1144 | ||
1145 | align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); | |
e9a25f70 | 1146 | |
5b0264cb NS |
1147 | gcc_assert (MEM_P (x)); |
1148 | gcc_assert (MEM_P (y)); | |
1149 | gcc_assert (size); | |
bbf6f052 | 1150 | |
82c82743 RH |
1151 | /* Make sure we've got BLKmode addresses; store_one_arg can decide that |
1152 | block copy is more efficient for other large modes, e.g. DCmode. */ | |
1153 | x = adjust_address (x, BLKmode, 0); | |
1154 | y = adjust_address (y, BLKmode, 0); | |
1155 | ||
cb38fd88 RH |
1156 | /* Set MEM_SIZE as appropriate for this block copy. The main place this |
1157 | can be incorrect is coming from __builtin_memcpy. */ | |
1158 | if (GET_CODE (size) == CONST_INT) | |
1159 | { | |
6972c506 JJ |
1160 | if (INTVAL (size) == 0) |
1161 | return 0; | |
1162 | ||
cb38fd88 RH |
1163 | x = shallow_copy_rtx (x); |
1164 | y = shallow_copy_rtx (y); | |
1165 | set_mem_size (x, size); | |
1166 | set_mem_size (y, size); | |
1167 | } | |
1168 | ||
fbe1758d | 1169 | if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
8fd3cf4e | 1170 | move_by_pieces (x, y, INTVAL (size), align, 0); |
70128ad9 | 1171 | else if (emit_block_move_via_movmem (x, y, size, align)) |
4ca79136 | 1172 | ; |
44bb111a | 1173 | else if (may_use_call) |
4ca79136 | 1174 | retval = emit_block_move_via_libcall (x, y, size); |
44bb111a RH |
1175 | else |
1176 | emit_block_move_via_loop (x, y, size, align); | |
1177 | ||
1178 | if (method == BLOCK_OP_CALL_PARM) | |
1179 | OK_DEFER_POP; | |
266007a7 | 1180 | |
4ca79136 RH |
1181 | return retval; |
1182 | } | |
266007a7 | 1183 | |
502b8322 | 1184 | /* A subroutine of emit_block_move. Returns true if calling the |
44bb111a RH |
1185 | block move libcall will not clobber any parameters which may have |
1186 | already been placed on the stack. */ | |
1187 | ||
1188 | static bool | |
502b8322 | 1189 | block_move_libcall_safe_for_call_parm (void) |
44bb111a | 1190 | { |
a357a6d4 | 1191 | /* If arguments are pushed on the stack, then they're safe. */ |
44bb111a RH |
1192 | if (PUSH_ARGS) |
1193 | return true; | |
44bb111a | 1194 | |
450b1728 | 1195 | /* If registers go on the stack anyway, any argument is sure to clobber |
a357a6d4 GK |
1196 | an outgoing argument. */ |
1197 | #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE) | |
1198 | { | |
1199 | tree fn = emit_block_move_libcall_fn (false); | |
1200 | (void) fn; | |
1201 | if (REG_PARM_STACK_SPACE (fn) != 0) | |
1202 | return false; | |
1203 | } | |
44bb111a | 1204 | #endif |
44bb111a | 1205 | |
a357a6d4 GK |
1206 | /* If any argument goes in memory, then it might clobber an outgoing |
1207 | argument. */ | |
1208 | { | |
1209 | CUMULATIVE_ARGS args_so_far; | |
1210 | tree fn, arg; | |
450b1728 | 1211 | |
a357a6d4 | 1212 | fn = emit_block_move_libcall_fn (false); |
0f6937fe | 1213 | INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3); |
450b1728 | 1214 | |
a357a6d4 GK |
1215 | arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); |
1216 | for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) | |
1217 | { | |
1218 | enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); | |
1219 | rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); | |
1220 | if (!tmp || !REG_P (tmp)) | |
44bb111a | 1221 | return false; |
a357a6d4 GK |
1222 | if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, |
1223 | NULL_TREE, 1)) | |
1224 | return false; | |
a357a6d4 GK |
1225 | FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1); |
1226 | } | |
1227 | } | |
1228 | return true; | |
44bb111a RH |
1229 | } |
1230 | ||
70128ad9 | 1231 | /* A subroutine of emit_block_move. Expand a movmem pattern; |
4ca79136 | 1232 | return true if successful. */ |
3ef1eef4 | 1233 | |
4ca79136 | 1234 | static bool |
70128ad9 | 1235 | emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align) |
4ca79136 | 1236 | { |
4ca79136 | 1237 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
a5e9c810 | 1238 | int save_volatile_ok = volatile_ok; |
4ca79136 | 1239 | enum machine_mode mode; |
266007a7 | 1240 | |
4ca79136 RH |
1241 | /* Since this is a move insn, we don't care about volatility. */ |
1242 | volatile_ok = 1; | |
1243 | ||
ee960939 OH |
1244 | /* Try the most limited insn first, because there's no point |
1245 | including more than one in the machine description unless | |
1246 | the more limited one has some advantage. */ | |
1247 | ||
4ca79136 RH |
1248 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
1249 | mode = GET_MODE_WIDER_MODE (mode)) | |
1250 | { | |
70128ad9 | 1251 | enum insn_code code = movmem_optab[(int) mode]; |
4ca79136 RH |
1252 | insn_operand_predicate_fn pred; |
1253 | ||
1254 | if (code != CODE_FOR_nothing | |
1255 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT | |
1256 | here because if SIZE is less than the mode mask, as it is | |
1257 | returned by the macro, it will definitely be less than the | |
1258 | actual mode mask. */ | |
1259 | && ((GET_CODE (size) == CONST_INT | |
1260 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
1261 | <= (GET_MODE_MASK (mode) >> 1))) | |
1262 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
1263 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
1264 | || (*pred) (x, BLKmode)) | |
1265 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1266 | || (*pred) (y, BLKmode)) | |
1267 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1268 | || (*pred) (opalign, VOIDmode))) | |
1269 | { | |
1270 | rtx op2; | |
1271 | rtx last = get_last_insn (); | |
1272 | rtx pat; | |
1273 | ||
1274 | op2 = convert_to_mode (mode, size, 1); | |
1275 | pred = insn_data[(int) code].operand[2].predicate; | |
1276 | if (pred != 0 && ! (*pred) (op2, mode)) | |
1277 | op2 = copy_to_mode_reg (mode, op2); | |
1278 | ||
1279 | /* ??? When called via emit_block_move_for_call, it'd be | |
1280 | nice if there were some way to inform the backend, so | |
1281 | that it doesn't fail the expansion because it thinks | |
1282 | emitting the libcall would be more efficient. */ | |
1283 | ||
1284 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1285 | if (pat) | |
1286 | { | |
1287 | emit_insn (pat); | |
a5e9c810 | 1288 | volatile_ok = save_volatile_ok; |
4ca79136 | 1289 | return true; |
bbf6f052 | 1290 | } |
4ca79136 RH |
1291 | else |
1292 | delete_insns_since (last); | |
bbf6f052 | 1293 | } |
4ca79136 | 1294 | } |
bbf6f052 | 1295 | |
a5e9c810 | 1296 | volatile_ok = save_volatile_ok; |
4ca79136 RH |
1297 | return false; |
1298 | } | |
3ef1eef4 | 1299 | |
8f99553f | 1300 | /* A subroutine of emit_block_move. Expand a call to memcpy. |
4ca79136 | 1301 | Return the return value from memcpy, 0 otherwise. */ |
4bc973ae | 1302 | |
4ca79136 | 1303 | static rtx |
502b8322 | 1304 | emit_block_move_via_libcall (rtx dst, rtx src, rtx size) |
4ca79136 | 1305 | { |
ee960939 | 1306 | rtx dst_addr, src_addr; |
4ca79136 RH |
1307 | tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; |
1308 | enum machine_mode size_mode; | |
1309 | rtx retval; | |
4bc973ae | 1310 | |
ad76cef8 PB |
1311 | /* Emit code to copy the addresses of DST and SRC and SIZE into new |
1312 | pseudos. We can then place those new pseudos into a VAR_DECL and | |
1313 | use them later. */ | |
ee960939 OH |
1314 | |
1315 | dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0)); | |
1316 | src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0)); | |
4ca79136 | 1317 | |
ee960939 OH |
1318 | dst_addr = convert_memory_address (ptr_mode, dst_addr); |
1319 | src_addr = convert_memory_address (ptr_mode, src_addr); | |
ee960939 OH |
1320 | |
1321 | dst_tree = make_tree (ptr_type_node, dst_addr); | |
1322 | src_tree = make_tree (ptr_type_node, src_addr); | |
4ca79136 | 1323 | |
8f99553f | 1324 | size_mode = TYPE_MODE (sizetype); |
ee960939 | 1325 | |
4ca79136 RH |
1326 | size = convert_to_mode (size_mode, size, 1); |
1327 | size = copy_to_mode_reg (size_mode, size); | |
1328 | ||
1329 | /* It is incorrect to use the libcall calling conventions to call | |
1330 | memcpy in this context. This could be a user call to memcpy and | |
1331 | the user may wish to examine the return value from memcpy. For | |
1332 | targets where libcalls and normal calls have different conventions | |
8f99553f | 1333 | for returning pointers, we could end up generating incorrect code. */ |
4ca79136 | 1334 | |
8f99553f | 1335 | size_tree = make_tree (sizetype, size); |
4ca79136 RH |
1336 | |
1337 | fn = emit_block_move_libcall_fn (true); | |
1338 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
8f99553f JM |
1339 | arg_list = tree_cons (NULL_TREE, src_tree, arg_list); |
1340 | arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); | |
4ca79136 RH |
1341 | |
1342 | /* Now we have to build up the CALL_EXPR itself. */ | |
1343 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
3244e67d RS |
1344 | call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), |
1345 | call_expr, arg_list, NULL_TREE); | |
4ca79136 RH |
1346 | |
1347 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
1348 | ||
8f99553f | 1349 | return retval; |
4ca79136 | 1350 | } |
52cf7115 | 1351 | |
4ca79136 RH |
1352 | /* A subroutine of emit_block_move_via_libcall. Create the tree node |
1353 | for the function we use for block copies. The first time FOR_CALL | |
1354 | is true, we call assemble_external. */ | |
52cf7115 | 1355 | |
4ca79136 RH |
1356 | static GTY(()) tree block_move_fn; |
1357 | ||
9661b15f | 1358 | void |
502b8322 | 1359 | init_block_move_fn (const char *asmspec) |
4ca79136 | 1360 | { |
9661b15f | 1361 | if (!block_move_fn) |
4ca79136 | 1362 | { |
8fd3cf4e | 1363 | tree args, fn; |
9661b15f | 1364 | |
8f99553f JM |
1365 | fn = get_identifier ("memcpy"); |
1366 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
1367 | const_ptr_type_node, sizetype, | |
1368 | NULL_TREE); | |
52cf7115 | 1369 | |
4ca79136 RH |
1370 | fn = build_decl (FUNCTION_DECL, fn, args); |
1371 | DECL_EXTERNAL (fn) = 1; | |
1372 | TREE_PUBLIC (fn) = 1; | |
1373 | DECL_ARTIFICIAL (fn) = 1; | |
1374 | TREE_NOTHROW (fn) = 1; | |
66c60e67 | 1375 | |
4ca79136 | 1376 | block_move_fn = fn; |
bbf6f052 | 1377 | } |
e9a25f70 | 1378 | |
9661b15f | 1379 | if (asmspec) |
0e6df31e | 1380 | set_user_assembler_name (block_move_fn, asmspec); |
9661b15f JJ |
1381 | } |
1382 | ||
1383 | static tree | |
502b8322 | 1384 | emit_block_move_libcall_fn (int for_call) |
9661b15f JJ |
1385 | { |
1386 | static bool emitted_extern; | |
1387 | ||
1388 | if (!block_move_fn) | |
1389 | init_block_move_fn (NULL); | |
1390 | ||
4ca79136 RH |
1391 | if (for_call && !emitted_extern) |
1392 | { | |
1393 | emitted_extern = true; | |
0e6df31e | 1394 | make_decl_rtl (block_move_fn); |
9661b15f | 1395 | assemble_external (block_move_fn); |
4ca79136 RH |
1396 | } |
1397 | ||
9661b15f | 1398 | return block_move_fn; |
bbf6f052 | 1399 | } |
44bb111a RH |
1400 | |
1401 | /* A subroutine of emit_block_move. Copy the data via an explicit | |
1402 | loop. This is used only when libcalls are forbidden. */ | |
1403 | /* ??? It'd be nice to copy in hunks larger than QImode. */ | |
1404 | ||
1405 | static void | |
502b8322 AJ |
1406 | emit_block_move_via_loop (rtx x, rtx y, rtx size, |
1407 | unsigned int align ATTRIBUTE_UNUSED) | |
44bb111a RH |
1408 | { |
1409 | rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; | |
1410 | enum machine_mode iter_mode; | |
1411 | ||
1412 | iter_mode = GET_MODE (size); | |
1413 | if (iter_mode == VOIDmode) | |
1414 | iter_mode = word_mode; | |
1415 | ||
1416 | top_label = gen_label_rtx (); | |
1417 | cmp_label = gen_label_rtx (); | |
1418 | iter = gen_reg_rtx (iter_mode); | |
1419 | ||
1420 | emit_move_insn (iter, const0_rtx); | |
1421 | ||
1422 | x_addr = force_operand (XEXP (x, 0), NULL_RTX); | |
1423 | y_addr = force_operand (XEXP (y, 0), NULL_RTX); | |
1424 | do_pending_stack_adjust (); | |
1425 | ||
44bb111a RH |
1426 | emit_jump (cmp_label); |
1427 | emit_label (top_label); | |
1428 | ||
1429 | tmp = convert_modes (Pmode, iter_mode, iter, true); | |
1430 | x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); | |
1431 | y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); | |
1432 | x = change_address (x, QImode, x_addr); | |
1433 | y = change_address (y, QImode, y_addr); | |
1434 | ||
1435 | emit_move_insn (x, y); | |
1436 | ||
1437 | tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, | |
1438 | true, OPTAB_LIB_WIDEN); | |
1439 | if (tmp != iter) | |
1440 | emit_move_insn (iter, tmp); | |
1441 | ||
44bb111a RH |
1442 | emit_label (cmp_label); |
1443 | ||
1444 | emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, | |
1445 | true, top_label); | |
44bb111a | 1446 | } |
bbf6f052 RK |
1447 | \f |
1448 | /* Copy all or part of a value X into registers starting at REGNO. | |
1449 | The number of registers to be filled is NREGS. */ | |
1450 | ||
1451 | void | |
502b8322 | 1452 | move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode) |
bbf6f052 RK |
1453 | { |
1454 | int i; | |
381127e8 | 1455 | #ifdef HAVE_load_multiple |
3a94c984 | 1456 | rtx pat; |
381127e8 RL |
1457 | rtx last; |
1458 | #endif | |
bbf6f052 | 1459 | |
72bb9717 RK |
1460 | if (nregs == 0) |
1461 | return; | |
1462 | ||
bbf6f052 RK |
1463 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
1464 | x = validize_mem (force_const_mem (mode, x)); | |
1465 | ||
1466 | /* See if the machine can do this with a load multiple insn. */ | |
1467 | #ifdef HAVE_load_multiple | |
c3a02afe | 1468 | if (HAVE_load_multiple) |
bbf6f052 | 1469 | { |
c3a02afe | 1470 | last = get_last_insn (); |
38a448ca | 1471 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
c3a02afe RK |
1472 | GEN_INT (nregs)); |
1473 | if (pat) | |
1474 | { | |
1475 | emit_insn (pat); | |
1476 | return; | |
1477 | } | |
1478 | else | |
1479 | delete_insns_since (last); | |
bbf6f052 | 1480 | } |
bbf6f052 RK |
1481 | #endif |
1482 | ||
1483 | for (i = 0; i < nregs; i++) | |
38a448ca | 1484 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
bbf6f052 RK |
1485 | operand_subword_force (x, i, mode)); |
1486 | } | |
1487 | ||
1488 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
c6b97fac | 1489 | The number of registers to be filled is NREGS. */ |
0040593d | 1490 | |
bbf6f052 | 1491 | void |
502b8322 | 1492 | move_block_from_reg (int regno, rtx x, int nregs) |
bbf6f052 RK |
1493 | { |
1494 | int i; | |
bbf6f052 | 1495 | |
2954d7db RK |
1496 | if (nregs == 0) |
1497 | return; | |
1498 | ||
bbf6f052 RK |
1499 | /* See if the machine can do this with a store multiple insn. */ |
1500 | #ifdef HAVE_store_multiple | |
c3a02afe | 1501 | if (HAVE_store_multiple) |
bbf6f052 | 1502 | { |
c6b97fac AM |
1503 | rtx last = get_last_insn (); |
1504 | rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), | |
1505 | GEN_INT (nregs)); | |
c3a02afe RK |
1506 | if (pat) |
1507 | { | |
1508 | emit_insn (pat); | |
1509 | return; | |
1510 | } | |
1511 | else | |
1512 | delete_insns_since (last); | |
bbf6f052 | 1513 | } |
bbf6f052 RK |
1514 | #endif |
1515 | ||
1516 | for (i = 0; i < nregs; i++) | |
1517 | { | |
1518 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
1519 | ||
5b0264cb | 1520 | gcc_assert (tem); |
bbf6f052 | 1521 | |
38a448ca | 1522 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
bbf6f052 RK |
1523 | } |
1524 | } | |
1525 | ||
084a1106 JDA |
1526 | /* Generate a PARALLEL rtx for a new non-consecutive group of registers from |
1527 | ORIG, where ORIG is a non-consecutive group of registers represented by | |
1528 | a PARALLEL. The clone is identical to the original except in that the | |
1529 | original set of registers is replaced by a new set of pseudo registers. | |
1530 | The new set has the same modes as the original set. */ | |
1531 | ||
1532 | rtx | |
502b8322 | 1533 | gen_group_rtx (rtx orig) |
084a1106 JDA |
1534 | { |
1535 | int i, length; | |
1536 | rtx *tmps; | |
1537 | ||
5b0264cb | 1538 | gcc_assert (GET_CODE (orig) == PARALLEL); |
084a1106 JDA |
1539 | |
1540 | length = XVECLEN (orig, 0); | |
703ad42b | 1541 | tmps = alloca (sizeof (rtx) * length); |
084a1106 JDA |
1542 | |
1543 | /* Skip a NULL entry in first slot. */ | |
1544 | i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; | |
1545 | ||
1546 | if (i) | |
1547 | tmps[0] = 0; | |
1548 | ||
1549 | for (; i < length; i++) | |
1550 | { | |
1551 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); | |
1552 | rtx offset = XEXP (XVECEXP (orig, 0, i), 1); | |
1553 | ||
1554 | tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); | |
1555 | } | |
1556 | ||
1557 | return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); | |
1558 | } | |
1559 | ||
6e985040 AM |
1560 | /* Emit code to move a block ORIG_SRC of type TYPE to a block DST, |
1561 | where DST is non-consecutive registers represented by a PARALLEL. | |
1562 | SSIZE represents the total size of block ORIG_SRC in bytes, or -1 | |
450b1728 | 1563 | if not known. */ |
fffa9c1d JW |
1564 | |
1565 | void | |
6e985040 | 1566 | emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) |
fffa9c1d | 1567 | { |
aac5cc16 RH |
1568 | rtx *tmps, src; |
1569 | int start, i; | |
fffa9c1d | 1570 | |
5b0264cb | 1571 | gcc_assert (GET_CODE (dst) == PARALLEL); |
fffa9c1d JW |
1572 | |
1573 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1574 | both on the stack and in registers. */ | |
aac5cc16 RH |
1575 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
1576 | start = 0; | |
fffa9c1d | 1577 | else |
aac5cc16 RH |
1578 | start = 1; |
1579 | ||
703ad42b | 1580 | tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0)); |
aac5cc16 | 1581 | |
aac5cc16 RH |
1582 | /* Process the pieces. */ |
1583 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1584 | { | |
1585 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
770ae6cc RK |
1586 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
1587 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
aac5cc16 RH |
1588 | int shift = 0; |
1589 | ||
1590 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1591 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
aac5cc16 | 1592 | { |
6e985040 AM |
1593 | /* Arrange to shift the fragment to where it belongs. |
1594 | extract_bit_field loads to the lsb of the reg. */ | |
1595 | if ( | |
1596 | #ifdef BLOCK_REG_PADDING | |
1597 | BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start) | |
1598 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
1599 | #else | |
1600 | BYTES_BIG_ENDIAN | |
1601 | #endif | |
1602 | ) | |
1603 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
aac5cc16 | 1604 | bytelen = ssize - bytepos; |
5b0264cb | 1605 | gcc_assert (bytelen > 0); |
aac5cc16 RH |
1606 | } |
1607 | ||
f3ce87a9 DE |
1608 | /* If we won't be loading directly from memory, protect the real source |
1609 | from strange tricks we might play; but make sure that the source can | |
1610 | be loaded directly into the destination. */ | |
1611 | src = orig_src; | |
3c0cb5de | 1612 | if (!MEM_P (orig_src) |
f3ce87a9 DE |
1613 | && (!CONSTANT_P (orig_src) |
1614 | || (GET_MODE (orig_src) != mode | |
1615 | && GET_MODE (orig_src) != VOIDmode))) | |
1616 | { | |
1617 | if (GET_MODE (orig_src) == VOIDmode) | |
1618 | src = gen_reg_rtx (mode); | |
1619 | else | |
1620 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
04050c69 | 1621 | |
f3ce87a9 DE |
1622 | emit_move_insn (src, orig_src); |
1623 | } | |
1624 | ||
aac5cc16 | 1625 | /* Optimize the access just a bit. */ |
3c0cb5de | 1626 | if (MEM_P (src) |
6e985040 AM |
1627 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src)) |
1628 | || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 1629 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 RH |
1630 | && bytelen == GET_MODE_SIZE (mode)) |
1631 | { | |
1632 | tmps[i] = gen_reg_rtx (mode); | |
f4ef873c | 1633 | emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
fffa9c1d | 1634 | } |
7c4a6db0 JW |
1635 | else if (GET_CODE (src) == CONCAT) |
1636 | { | |
015b1ad1 JDA |
1637 | unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); |
1638 | unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); | |
1639 | ||
1640 | if ((bytepos == 0 && bytelen == slen0) | |
1641 | || (bytepos != 0 && bytepos + bytelen <= slen)) | |
cbb92744 | 1642 | { |
015b1ad1 JDA |
1643 | /* The following assumes that the concatenated objects all |
1644 | have the same size. In this case, a simple calculation | |
1645 | can be used to determine the object and the bit field | |
1646 | to be extracted. */ | |
1647 | tmps[i] = XEXP (src, bytepos / slen0); | |
cbb92744 | 1648 | if (! CONSTANT_P (tmps[i]) |
f8cfc6aa | 1649 | && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)) |
cbb92744 | 1650 | tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, |
015b1ad1 | 1651 | (bytepos % slen0) * BITS_PER_UNIT, |
b3520980 | 1652 | 1, NULL_RTX, mode, mode); |
cbb92744 | 1653 | } |
5b0264cb | 1654 | else |
58f69841 | 1655 | { |
5b0264cb NS |
1656 | rtx mem; |
1657 | ||
1658 | gcc_assert (!bytepos); | |
1659 | mem = assign_stack_temp (GET_MODE (src), slen, 0); | |
58f69841 | 1660 | emit_move_insn (mem, src); |
04050c69 | 1661 | tmps[i] = adjust_address (mem, mode, 0); |
58f69841 | 1662 | } |
7c4a6db0 | 1663 | } |
9c0631a7 AH |
1664 | /* FIXME: A SIMD parallel will eventually lead to a subreg of a |
1665 | SIMD register, which is currently broken. While we get GCC | |
1666 | to emit proper RTL for these cases, let's dump to memory. */ | |
1667 | else if (VECTOR_MODE_P (GET_MODE (dst)) | |
f8cfc6aa | 1668 | && REG_P (src)) |
9c0631a7 AH |
1669 | { |
1670 | int slen = GET_MODE_SIZE (GET_MODE (src)); | |
1671 | rtx mem; | |
1672 | ||
1673 | mem = assign_stack_temp (GET_MODE (src), slen, 0); | |
1674 | emit_move_insn (mem, src); | |
1675 | tmps[i] = adjust_address (mem, mode, (int) bytepos); | |
1676 | } | |
d3a16cbd FJ |
1677 | else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode |
1678 | && XVECLEN (dst, 0) > 1) | |
1679 | tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos); | |
f3ce87a9 | 1680 | else if (CONSTANT_P (src) |
f8cfc6aa | 1681 | || (REG_P (src) && GET_MODE (src) == mode)) |
2ee5437b | 1682 | tmps[i] = src; |
fffa9c1d | 1683 | else |
19caa751 RK |
1684 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
1685 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
b3520980 | 1686 | mode, mode); |
fffa9c1d | 1687 | |
6e985040 | 1688 | if (shift) |
09b52670 | 1689 | tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], |
7d60be94 | 1690 | build_int_cst (NULL_TREE, shift), tmps[i], 0); |
fffa9c1d | 1691 | } |
19caa751 | 1692 | |
aac5cc16 RH |
1693 | /* Copy the extracted pieces into the proper (probable) hard regs. */ |
1694 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1695 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]); | |
fffa9c1d JW |
1696 | } |
1697 | ||
084a1106 JDA |
1698 | /* Emit code to move a block SRC to block DST, where SRC and DST are |
1699 | non-consecutive groups of registers, each represented by a PARALLEL. */ | |
1700 | ||
1701 | void | |
502b8322 | 1702 | emit_group_move (rtx dst, rtx src) |
084a1106 JDA |
1703 | { |
1704 | int i; | |
1705 | ||
5b0264cb NS |
1706 | gcc_assert (GET_CODE (src) == PARALLEL |
1707 | && GET_CODE (dst) == PARALLEL | |
1708 | && XVECLEN (src, 0) == XVECLEN (dst, 0)); | |
084a1106 JDA |
1709 | |
1710 | /* Skip first entry if NULL. */ | |
1711 | for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) | |
1712 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), | |
1713 | XEXP (XVECEXP (src, 0, i), 0)); | |
1714 | } | |
1715 | ||
6e985040 AM |
1716 | /* Emit code to move a block SRC to a block ORIG_DST of type TYPE, |
1717 | where SRC is non-consecutive registers represented by a PARALLEL. | |
1718 | SSIZE represents the total size of block ORIG_DST, or -1 if not | |
1719 | known. */ | |
fffa9c1d JW |
1720 | |
1721 | void | |
6e985040 | 1722 | emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) |
fffa9c1d | 1723 | { |
aac5cc16 RH |
1724 | rtx *tmps, dst; |
1725 | int start, i; | |
fffa9c1d | 1726 | |
5b0264cb | 1727 | gcc_assert (GET_CODE (src) == PARALLEL); |
fffa9c1d JW |
1728 | |
1729 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1730 | both on the stack and in registers. */ | |
aac5cc16 RH |
1731 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
1732 | start = 0; | |
fffa9c1d | 1733 | else |
aac5cc16 RH |
1734 | start = 1; |
1735 | ||
703ad42b | 1736 | tmps = alloca (sizeof (rtx) * XVECLEN (src, 0)); |
fffa9c1d | 1737 | |
aac5cc16 RH |
1738 | /* Copy the (probable) hard regs into pseudos. */ |
1739 | for (i = start; i < XVECLEN (src, 0); i++) | |
fffa9c1d | 1740 | { |
aac5cc16 RH |
1741 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
1742 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
1743 | emit_move_insn (tmps[i], reg); | |
1744 | } | |
fffa9c1d | 1745 | |
aac5cc16 RH |
1746 | /* If we won't be storing directly into memory, protect the real destination |
1747 | from strange tricks we might play. */ | |
1748 | dst = orig_dst; | |
10a9f2be JW |
1749 | if (GET_CODE (dst) == PARALLEL) |
1750 | { | |
1751 | rtx temp; | |
1752 | ||
1753 | /* We can get a PARALLEL dst if there is a conditional expression in | |
1754 | a return statement. In that case, the dst and src are the same, | |
1755 | so no action is necessary. */ | |
1756 | if (rtx_equal_p (dst, src)) | |
1757 | return; | |
1758 | ||
1759 | /* It is unclear if we can ever reach here, but we may as well handle | |
1760 | it. Allocate a temporary, and split this into a store/load to/from | |
1761 | the temporary. */ | |
1762 | ||
1763 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
6e985040 AM |
1764 | emit_group_store (temp, src, type, ssize); |
1765 | emit_group_load (dst, temp, type, ssize); | |
10a9f2be JW |
1766 | return; |
1767 | } | |
3c0cb5de | 1768 | else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) |
aac5cc16 RH |
1769 | { |
1770 | dst = gen_reg_rtx (GET_MODE (orig_dst)); | |
1771 | /* Make life a bit easier for combine. */ | |
8ae91fc0 | 1772 | emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst))); |
aac5cc16 | 1773 | } |
aac5cc16 RH |
1774 | |
1775 | /* Process the pieces. */ | |
1776 | for (i = start; i < XVECLEN (src, 0); i++) | |
1777 | { | |
770ae6cc | 1778 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
aac5cc16 | 1779 | enum machine_mode mode = GET_MODE (tmps[i]); |
770ae6cc | 1780 | unsigned int bytelen = GET_MODE_SIZE (mode); |
6ddae612 | 1781 | rtx dest = dst; |
aac5cc16 RH |
1782 | |
1783 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1784 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
71bc0330 | 1785 | { |
6e985040 AM |
1786 | /* store_bit_field always takes its value from the lsb. |
1787 | Move the fragment to the lsb if it's not already there. */ | |
1788 | if ( | |
1789 | #ifdef BLOCK_REG_PADDING | |
1790 | BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start) | |
1791 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
1792 | #else | |
1793 | BYTES_BIG_ENDIAN | |
1794 | #endif | |
1795 | ) | |
aac5cc16 RH |
1796 | { |
1797 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
09b52670 | 1798 | tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], |
7d60be94 NS |
1799 | build_int_cst (NULL_TREE, shift), |
1800 | tmps[i], 0); | |
aac5cc16 RH |
1801 | } |
1802 | bytelen = ssize - bytepos; | |
71bc0330 | 1803 | } |
fffa9c1d | 1804 | |
6ddae612 JJ |
1805 | if (GET_CODE (dst) == CONCAT) |
1806 | { | |
1807 | if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
1808 | dest = XEXP (dst, 0); | |
1809 | else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
1810 | { | |
1811 | bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); | |
1812 | dest = XEXP (dst, 1); | |
1813 | } | |
5b0264cb | 1814 | else |
0d446150 | 1815 | { |
5b0264cb | 1816 | gcc_assert (bytepos == 0 && XVECLEN (src, 0)); |
0d446150 JH |
1817 | dest = assign_stack_temp (GET_MODE (dest), |
1818 | GET_MODE_SIZE (GET_MODE (dest)), 0); | |
1819 | emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), | |
1820 | tmps[i]); | |
1821 | dst = dest; | |
1822 | break; | |
1823 | } | |
6ddae612 JJ |
1824 | } |
1825 | ||
aac5cc16 | 1826 | /* Optimize the access just a bit. */ |
3c0cb5de | 1827 | if (MEM_P (dest) |
6e985040 AM |
1828 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) |
1829 | || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 1830 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 | 1831 | && bytelen == GET_MODE_SIZE (mode)) |
6ddae612 | 1832 | emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); |
aac5cc16 | 1833 | else |
6ddae612 | 1834 | store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
b3520980 | 1835 | mode, tmps[i]); |
fffa9c1d | 1836 | } |
729a2125 | 1837 | |
aac5cc16 | 1838 | /* Copy from the pseudo into the (probable) hard reg. */ |
0d446150 | 1839 | if (orig_dst != dst) |
aac5cc16 | 1840 | emit_move_insn (orig_dst, dst); |
fffa9c1d JW |
1841 | } |
1842 | ||
c36fce9a GRK |
1843 | /* Generate code to copy a BLKmode object of TYPE out of a |
1844 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
1845 | is null, a stack temporary is created. TGTBLK is returned. | |
1846 | ||
c988af2b RS |
1847 | The purpose of this routine is to handle functions that return |
1848 | BLKmode structures in registers. Some machines (the PA for example) | |
1849 | want to return all small structures in registers regardless of the | |
1850 | structure's alignment. */ | |
c36fce9a GRK |
1851 | |
1852 | rtx | |
502b8322 | 1853 | copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) |
c36fce9a | 1854 | { |
19caa751 RK |
1855 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
1856 | rtx src = NULL, dst = NULL; | |
1857 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
c988af2b | 1858 | unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; |
19caa751 RK |
1859 | |
1860 | if (tgtblk == 0) | |
1861 | { | |
1da68f56 RK |
1862 | tgtblk = assign_temp (build_qualified_type (type, |
1863 | (TYPE_QUALS (type) | |
1864 | | TYPE_QUAL_CONST)), | |
1865 | 0, 1, 1); | |
19caa751 RK |
1866 | preserve_temp_slots (tgtblk); |
1867 | } | |
3a94c984 | 1868 | |
1ed1b4fb | 1869 | /* This code assumes srcreg is at least a full word. If it isn't, copy it |
9ac3e73b | 1870 | into a new pseudo which is a full word. */ |
0d7839da | 1871 | |
19caa751 RK |
1872 | if (GET_MODE (srcreg) != BLKmode |
1873 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
8df83eae | 1874 | srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type)); |
19caa751 | 1875 | |
c988af2b RS |
1876 | /* If the structure doesn't take up a whole number of words, see whether |
1877 | SRCREG is padded on the left or on the right. If it's on the left, | |
1878 | set PADDING_CORRECTION to the number of bits to skip. | |
1879 | ||
1880 | In most ABIs, the structure will be returned at the least end of | |
1881 | the register, which translates to right padding on little-endian | |
1882 | targets and left padding on big-endian targets. The opposite | |
1883 | holds if the structure is returned at the most significant | |
1884 | end of the register. */ | |
1885 | if (bytes % UNITS_PER_WORD != 0 | |
1886 | && (targetm.calls.return_in_msb (type) | |
1887 | ? !BYTES_BIG_ENDIAN | |
1888 | : BYTES_BIG_ENDIAN)) | |
1889 | padding_correction | |
19caa751 RK |
1890 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); |
1891 | ||
1892 | /* Copy the structure BITSIZE bites at a time. | |
3a94c984 | 1893 | |
19caa751 RK |
1894 | We could probably emit more efficient code for machines which do not use |
1895 | strict alignment, but it doesn't seem worth the effort at the current | |
1896 | time. */ | |
c988af2b | 1897 | for (bitpos = 0, xbitpos = padding_correction; |
19caa751 RK |
1898 | bitpos < bytes * BITS_PER_UNIT; |
1899 | bitpos += bitsize, xbitpos += bitsize) | |
1900 | { | |
3a94c984 | 1901 | /* We need a new source operand each time xbitpos is on a |
c988af2b | 1902 | word boundary and when xbitpos == padding_correction |
19caa751 RK |
1903 | (the first time through). */ |
1904 | if (xbitpos % BITS_PER_WORD == 0 | |
c988af2b | 1905 | || xbitpos == padding_correction) |
b47f8cfc JH |
1906 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
1907 | GET_MODE (srcreg)); | |
19caa751 RK |
1908 | |
1909 | /* We need a new destination operand each time bitpos is on | |
1910 | a word boundary. */ | |
1911 | if (bitpos % BITS_PER_WORD == 0) | |
1912 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
3a94c984 | 1913 | |
19caa751 RK |
1914 | /* Use xbitpos for the source extraction (right justified) and |
1915 | xbitpos for the destination store (left justified). */ | |
1916 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, | |
1917 | extract_bit_field (src, bitsize, | |
1918 | xbitpos % BITS_PER_WORD, 1, | |
b3520980 | 1919 | NULL_RTX, word_mode, word_mode)); |
19caa751 RK |
1920 | } |
1921 | ||
1922 | return tgtblk; | |
c36fce9a GRK |
1923 | } |
1924 | ||
94b25f81 RK |
1925 | /* Add a USE expression for REG to the (possibly empty) list pointed |
1926 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
bbf6f052 RK |
1927 | |
1928 | void | |
502b8322 | 1929 | use_reg (rtx *call_fusage, rtx reg) |
b3f8cf4a | 1930 | { |
5b0264cb NS |
1931 | gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); |
1932 | ||
b3f8cf4a | 1933 | *call_fusage |
38a448ca RH |
1934 | = gen_rtx_EXPR_LIST (VOIDmode, |
1935 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
b3f8cf4a RK |
1936 | } |
1937 | ||
94b25f81 RK |
1938 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
1939 | starting at REGNO. All of these registers must be hard registers. */ | |
b3f8cf4a RK |
1940 | |
1941 | void | |
502b8322 | 1942 | use_regs (rtx *call_fusage, int regno, int nregs) |
bbf6f052 | 1943 | { |
0304dfbb | 1944 | int i; |
bbf6f052 | 1945 | |
5b0264cb | 1946 | gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER); |
0304dfbb DE |
1947 | |
1948 | for (i = 0; i < nregs; i++) | |
e50126e8 | 1949 | use_reg (call_fusage, regno_reg_rtx[regno + i]); |
bbf6f052 | 1950 | } |
fffa9c1d JW |
1951 | |
1952 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
1953 | PARALLEL REGS. This is for calls that pass values in multiple | |
1954 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
1955 | ||
1956 | void | |
502b8322 | 1957 | use_group_regs (rtx *call_fusage, rtx regs) |
fffa9c1d JW |
1958 | { |
1959 | int i; | |
1960 | ||
6bd35f86 DE |
1961 | for (i = 0; i < XVECLEN (regs, 0); i++) |
1962 | { | |
1963 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
fffa9c1d | 1964 | |
6bd35f86 DE |
1965 | /* A NULL entry means the parameter goes both on the stack and in |
1966 | registers. This can also be a MEM for targets that pass values | |
1967 | partially on the stack and partially in registers. */ | |
f8cfc6aa | 1968 | if (reg != 0 && REG_P (reg)) |
6bd35f86 DE |
1969 | use_reg (call_fusage, reg); |
1970 | } | |
fffa9c1d | 1971 | } |
bbf6f052 | 1972 | \f |
57814e5e | 1973 | |
cf5124f6 RS |
1974 | /* Determine whether the LEN bytes generated by CONSTFUN can be |
1975 | stored to memory using several move instructions. CONSTFUNDATA is | |
1976 | a pointer which will be passed as argument in every CONSTFUN call. | |
1977 | ALIGN is maximum alignment we can assume. Return nonzero if a | |
1978 | call to store_by_pieces should succeed. */ | |
1979 | ||
57814e5e | 1980 | int |
502b8322 AJ |
1981 | can_store_by_pieces (unsigned HOST_WIDE_INT len, |
1982 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
1983 | void *constfundata, unsigned int align) | |
57814e5e | 1984 | { |
45d78e7f JJ |
1985 | unsigned HOST_WIDE_INT l; |
1986 | unsigned int max_size; | |
57814e5e JJ |
1987 | HOST_WIDE_INT offset = 0; |
1988 | enum machine_mode mode, tmode; | |
1989 | enum insn_code icode; | |
1990 | int reverse; | |
1991 | rtx cst; | |
1992 | ||
2c430630 RS |
1993 | if (len == 0) |
1994 | return 1; | |
1995 | ||
4977bab6 | 1996 | if (! STORE_BY_PIECES_P (len, align)) |
57814e5e JJ |
1997 | return 0; |
1998 | ||
f64d6991 DE |
1999 | tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
2000 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
2001 | align = GET_MODE_ALIGNMENT (tmode); | |
2002 | else | |
2003 | { | |
2004 | enum machine_mode xmode; | |
2005 | ||
2006 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
2007 | tmode != VOIDmode; | |
2008 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
2009 | if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES | |
2010 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
2011 | break; | |
2012 | ||
2013 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
2014 | } | |
57814e5e JJ |
2015 | |
2016 | /* We would first store what we can in the largest integer mode, then go to | |
2017 | successively smaller modes. */ | |
2018 | ||
2019 | for (reverse = 0; | |
2020 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2021 | reverse++) | |
2022 | { | |
2023 | l = len; | |
2024 | mode = VOIDmode; | |
cf5124f6 | 2025 | max_size = STORE_MAX_PIECES + 1; |
57814e5e JJ |
2026 | while (max_size > 1) |
2027 | { | |
2028 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2029 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2030 | if (GET_MODE_SIZE (tmode) < max_size) | |
2031 | mode = tmode; | |
2032 | ||
2033 | if (mode == VOIDmode) | |
2034 | break; | |
2035 | ||
2036 | icode = mov_optab->handlers[(int) mode].insn_code; | |
2037 | if (icode != CODE_FOR_nothing | |
2038 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2039 | { | |
2040 | unsigned int size = GET_MODE_SIZE (mode); | |
2041 | ||
2042 | while (l >= size) | |
2043 | { | |
2044 | if (reverse) | |
2045 | offset -= size; | |
2046 | ||
2047 | cst = (*constfun) (constfundata, offset, mode); | |
2048 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2049 | return 0; | |
2050 | ||
2051 | if (!reverse) | |
2052 | offset += size; | |
2053 | ||
2054 | l -= size; | |
2055 | } | |
2056 | } | |
2057 | ||
2058 | max_size = GET_MODE_SIZE (mode); | |
2059 | } | |
2060 | ||
2061 | /* The code above should have handled everything. */ | |
5b0264cb | 2062 | gcc_assert (!l); |
57814e5e JJ |
2063 | } |
2064 | ||
2065 | return 1; | |
2066 | } | |
2067 | ||
2068 | /* Generate several move instructions to store LEN bytes generated by | |
2069 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2070 | pointer which will be passed as argument in every CONSTFUN call. | |
8fd3cf4e JJ |
2071 | ALIGN is maximum alignment we can assume. |
2072 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala | |
2073 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
2074 | stpcpy. */ | |
57814e5e | 2075 | |
8fd3cf4e | 2076 | rtx |
502b8322 AJ |
2077 | store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
2078 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
2079 | void *constfundata, unsigned int align, int endp) | |
57814e5e JJ |
2080 | { |
2081 | struct store_by_pieces data; | |
2082 | ||
2c430630 RS |
2083 | if (len == 0) |
2084 | { | |
5b0264cb | 2085 | gcc_assert (endp != 2); |
2c430630 RS |
2086 | return to; |
2087 | } | |
2088 | ||
5b0264cb | 2089 | gcc_assert (STORE_BY_PIECES_P (len, align)); |
57814e5e JJ |
2090 | data.constfun = constfun; |
2091 | data.constfundata = constfundata; | |
2092 | data.len = len; | |
2093 | data.to = to; | |
2094 | store_by_pieces_1 (&data, align); | |
8fd3cf4e JJ |
2095 | if (endp) |
2096 | { | |
2097 | rtx to1; | |
2098 | ||
5b0264cb | 2099 | gcc_assert (!data.reverse); |
8fd3cf4e JJ |
2100 | if (data.autinc_to) |
2101 | { | |
2102 | if (endp == 2) | |
2103 | { | |
2104 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
2105 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
2106 | else | |
2107 | data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, | |
2108 | -1)); | |
2109 | } | |
2110 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
2111 | data.offset); | |
2112 | } | |
2113 | else | |
2114 | { | |
2115 | if (endp == 2) | |
2116 | --data.offset; | |
2117 | to1 = adjust_address (data.to, QImode, data.offset); | |
2118 | } | |
2119 | return to1; | |
2120 | } | |
2121 | else | |
2122 | return data.to; | |
57814e5e JJ |
2123 | } |
2124 | ||
19caa751 | 2125 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
ad76cef8 | 2126 | rtx with BLKmode). ALIGN is maximum alignment we can assume. */ |
9de08200 RK |
2127 | |
2128 | static void | |
342e2b74 | 2129 | clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) |
9de08200 | 2130 | { |
57814e5e JJ |
2131 | struct store_by_pieces data; |
2132 | ||
2c430630 RS |
2133 | if (len == 0) |
2134 | return; | |
2135 | ||
57814e5e | 2136 | data.constfun = clear_by_pieces_1; |
df4ae160 | 2137 | data.constfundata = NULL; |
57814e5e JJ |
2138 | data.len = len; |
2139 | data.to = to; | |
2140 | store_by_pieces_1 (&data, align); | |
2141 | } | |
2142 | ||
2143 | /* Callback routine for clear_by_pieces. | |
2144 | Return const0_rtx unconditionally. */ | |
2145 | ||
2146 | static rtx | |
502b8322 AJ |
2147 | clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, |
2148 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED, | |
2149 | enum machine_mode mode ATTRIBUTE_UNUSED) | |
57814e5e JJ |
2150 | { |
2151 | return const0_rtx; | |
2152 | } | |
2153 | ||
2154 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2155 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
ad76cef8 | 2156 | rtx with BLKmode). ALIGN is maximum alignment we can assume. */ |
57814e5e JJ |
2157 | |
2158 | static void | |
502b8322 AJ |
2159 | store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
2160 | unsigned int align ATTRIBUTE_UNUSED) | |
57814e5e JJ |
2161 | { |
2162 | rtx to_addr = XEXP (data->to, 0); | |
45d78e7f | 2163 | unsigned int max_size = STORE_MAX_PIECES + 1; |
fbe1758d AM |
2164 | enum machine_mode mode = VOIDmode, tmode; |
2165 | enum insn_code icode; | |
9de08200 | 2166 | |
57814e5e JJ |
2167 | data->offset = 0; |
2168 | data->to_addr = to_addr; | |
2169 | data->autinc_to | |
9de08200 RK |
2170 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2171 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2172 | ||
57814e5e JJ |
2173 | data->explicit_inc_to = 0; |
2174 | data->reverse | |
9de08200 | 2175 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
57814e5e JJ |
2176 | if (data->reverse) |
2177 | data->offset = data->len; | |
9de08200 | 2178 | |
57814e5e | 2179 | /* If storing requires more than two move insns, |
9de08200 RK |
2180 | copy addresses to registers (to make displacements shorter) |
2181 | and use post-increment if available. */ | |
57814e5e | 2182 | if (!data->autinc_to |
45d78e7f | 2183 | && move_by_pieces_ninsns (data->len, align, max_size) > 2) |
9de08200 | 2184 | { |
3a94c984 | 2185 | /* Determine the main mode we'll be using. */ |
fbe1758d AM |
2186 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2187 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2188 | if (GET_MODE_SIZE (tmode) < max_size) | |
2189 | mode = tmode; | |
2190 | ||
57814e5e | 2191 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
9de08200 | 2192 | { |
57814e5e JJ |
2193 | data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
2194 | data->autinc_to = 1; | |
2195 | data->explicit_inc_to = -1; | |
9de08200 | 2196 | } |
3bdf5ad1 | 2197 | |
57814e5e JJ |
2198 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2199 | && ! data->autinc_to) | |
9de08200 | 2200 | { |
57814e5e JJ |
2201 | data->to_addr = copy_addr_to_reg (to_addr); |
2202 | data->autinc_to = 1; | |
2203 | data->explicit_inc_to = 1; | |
9de08200 | 2204 | } |
3bdf5ad1 | 2205 | |
57814e5e JJ |
2206 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
2207 | data->to_addr = copy_addr_to_reg (to_addr); | |
9de08200 RK |
2208 | } |
2209 | ||
f64d6991 DE |
2210 | tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
2211 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
2212 | align = GET_MODE_ALIGNMENT (tmode); | |
2213 | else | |
2214 | { | |
2215 | enum machine_mode xmode; | |
2216 | ||
2217 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
2218 | tmode != VOIDmode; | |
2219 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
2220 | if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES | |
2221 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
2222 | break; | |
2223 | ||
2224 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
2225 | } | |
9de08200 | 2226 | |
57814e5e | 2227 | /* First store what we can in the largest integer mode, then go to |
9de08200 RK |
2228 | successively smaller modes. */ |
2229 | ||
2230 | while (max_size > 1) | |
2231 | { | |
9de08200 RK |
2232 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2233 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2234 | if (GET_MODE_SIZE (tmode) < max_size) | |
2235 | mode = tmode; | |
2236 | ||
2237 | if (mode == VOIDmode) | |
2238 | break; | |
2239 | ||
2240 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 2241 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
57814e5e | 2242 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
9de08200 RK |
2243 | |
2244 | max_size = GET_MODE_SIZE (mode); | |
2245 | } | |
2246 | ||
2247 | /* The code above should have handled everything. */ | |
5b0264cb | 2248 | gcc_assert (!data->len); |
9de08200 RK |
2249 | } |
2250 | ||
57814e5e | 2251 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
9de08200 RK |
2252 | with move instructions for mode MODE. GENFUN is the gen_... function |
2253 | to make a move insn for that mode. DATA has all the other info. */ | |
2254 | ||
2255 | static void | |
502b8322 AJ |
2256 | store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
2257 | struct store_by_pieces *data) | |
9de08200 | 2258 | { |
3bdf5ad1 | 2259 | unsigned int size = GET_MODE_SIZE (mode); |
57814e5e | 2260 | rtx to1, cst; |
9de08200 RK |
2261 | |
2262 | while (data->len >= size) | |
2263 | { | |
3bdf5ad1 RK |
2264 | if (data->reverse) |
2265 | data->offset -= size; | |
9de08200 | 2266 | |
3bdf5ad1 | 2267 | if (data->autinc_to) |
630036c6 JJ |
2268 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
2269 | data->offset); | |
3a94c984 | 2270 | else |
f4ef873c | 2271 | to1 = adjust_address (data->to, mode, data->offset); |
9de08200 | 2272 | |
940da324 | 2273 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
57814e5e JJ |
2274 | emit_insn (gen_add2_insn (data->to_addr, |
2275 | GEN_INT (-(HOST_WIDE_INT) size))); | |
9de08200 | 2276 | |
57814e5e JJ |
2277 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2278 | emit_insn ((*genfun) (to1, cst)); | |
3bdf5ad1 | 2279 | |
940da324 | 2280 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
9de08200 | 2281 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
9de08200 | 2282 | |
3bdf5ad1 RK |
2283 | if (! data->reverse) |
2284 | data->offset += size; | |
9de08200 RK |
2285 | |
2286 | data->len -= size; | |
2287 | } | |
2288 | } | |
2289 | \f | |
19caa751 | 2290 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
8ac61af7 | 2291 | its length in bytes. */ |
e9a25f70 JL |
2292 | |
2293 | rtx | |
502b8322 | 2294 | clear_storage (rtx object, rtx size) |
bbf6f052 | 2295 | { |
e9a25f70 | 2296 | rtx retval = 0; |
3c0cb5de | 2297 | unsigned int align = (MEM_P (object) ? MEM_ALIGN (object) |
8ac61af7 | 2298 | : GET_MODE_ALIGNMENT (GET_MODE (object))); |
e9a25f70 | 2299 | |
fcf1b822 RK |
2300 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2301 | just move a zero. Otherwise, do this a piece at a time. */ | |
69ef87e2 | 2302 | if (GET_MODE (object) != BLKmode |
fcf1b822 | 2303 | && GET_CODE (size) == CONST_INT |
4ca79136 | 2304 | && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object))) |
fcf1b822 RK |
2305 | emit_move_insn (object, CONST0_RTX (GET_MODE (object))); |
2306 | else | |
bbf6f052 | 2307 | { |
6972c506 | 2308 | if (size == const0_rtx) |
2c430630 RS |
2309 | ; |
2310 | else if (GET_CODE (size) == CONST_INT | |
78762e3b | 2311 | && CLEAR_BY_PIECES_P (INTVAL (size), align)) |
9de08200 | 2312 | clear_by_pieces (object, INTVAL (size), align); |
70128ad9 | 2313 | else if (clear_storage_via_clrmem (object, size, align)) |
4ca79136 | 2314 | ; |
9de08200 | 2315 | else |
4ca79136 RH |
2316 | retval = clear_storage_via_libcall (object, size); |
2317 | } | |
2318 | ||
2319 | return retval; | |
2320 | } | |
2321 | ||
70128ad9 | 2322 | /* A subroutine of clear_storage. Expand a clrmem pattern; |
4ca79136 RH |
2323 | return true if successful. */ |
2324 | ||
2325 | static bool | |
70128ad9 | 2326 | clear_storage_via_clrmem (rtx object, rtx size, unsigned int align) |
4ca79136 RH |
2327 | { |
2328 | /* Try the most limited insn first, because there's no point | |
2329 | including more than one in the machine description unless | |
2330 | the more limited one has some advantage. */ | |
2331 | ||
2332 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); | |
2333 | enum machine_mode mode; | |
2334 | ||
2335 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2336 | mode = GET_MODE_WIDER_MODE (mode)) | |
2337 | { | |
70128ad9 | 2338 | enum insn_code code = clrmem_optab[(int) mode]; |
4ca79136 RH |
2339 | insn_operand_predicate_fn pred; |
2340 | ||
2341 | if (code != CODE_FOR_nothing | |
2342 | /* We don't need MODE to be narrower than | |
2343 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2344 | the mode mask, as it is returned by the macro, it will | |
2345 | definitely be less than the actual mode mask. */ | |
2346 | && ((GET_CODE (size) == CONST_INT | |
2347 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
2348 | <= (GET_MODE_MASK (mode) >> 1))) | |
2349 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
2350 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
2351 | || (*pred) (object, BLKmode)) | |
2352 | && ((pred = insn_data[(int) code].operand[2].predicate) == 0 | |
2353 | || (*pred) (opalign, VOIDmode))) | |
9de08200 | 2354 | { |
4ca79136 RH |
2355 | rtx op1; |
2356 | rtx last = get_last_insn (); | |
2357 | rtx pat; | |
9de08200 | 2358 | |
4ca79136 RH |
2359 | op1 = convert_to_mode (mode, size, 1); |
2360 | pred = insn_data[(int) code].operand[1].predicate; | |
2361 | if (pred != 0 && ! (*pred) (op1, mode)) | |
2362 | op1 = copy_to_mode_reg (mode, op1); | |
9de08200 | 2363 | |
4ca79136 RH |
2364 | pat = GEN_FCN ((int) code) (object, op1, opalign); |
2365 | if (pat) | |
9de08200 | 2366 | { |
4ca79136 RH |
2367 | emit_insn (pat); |
2368 | return true; | |
2369 | } | |
2370 | else | |
2371 | delete_insns_since (last); | |
2372 | } | |
2373 | } | |
9de08200 | 2374 | |
4ca79136 RH |
2375 | return false; |
2376 | } | |
9de08200 | 2377 | |
8f99553f | 2378 | /* A subroutine of clear_storage. Expand a call to memset. |
4ca79136 | 2379 | Return the return value of memset, 0 otherwise. */ |
9de08200 | 2380 | |
4ca79136 | 2381 | static rtx |
502b8322 | 2382 | clear_storage_via_libcall (rtx object, rtx size) |
4ca79136 RH |
2383 | { |
2384 | tree call_expr, arg_list, fn, object_tree, size_tree; | |
2385 | enum machine_mode size_mode; | |
2386 | rtx retval; | |
9de08200 | 2387 | |
ad76cef8 PB |
2388 | /* Emit code to copy OBJECT and SIZE into new pseudos. We can then |
2389 | place those into new pseudos into a VAR_DECL and use them later. */ | |
52cf7115 | 2390 | |
4ca79136 | 2391 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); |
52cf7115 | 2392 | |
8f99553f | 2393 | size_mode = TYPE_MODE (sizetype); |
4ca79136 RH |
2394 | size = convert_to_mode (size_mode, size, 1); |
2395 | size = copy_to_mode_reg (size_mode, size); | |
52cf7115 | 2396 | |
4ca79136 RH |
2397 | /* It is incorrect to use the libcall calling conventions to call |
2398 | memset in this context. This could be a user call to memset and | |
2399 | the user may wish to examine the return value from memset. For | |
2400 | targets where libcalls and normal calls have different conventions | |
8f99553f | 2401 | for returning pointers, we could end up generating incorrect code. */ |
4bc973ae | 2402 | |
4ca79136 | 2403 | object_tree = make_tree (ptr_type_node, object); |
8f99553f | 2404 | size_tree = make_tree (sizetype, size); |
4ca79136 RH |
2405 | |
2406 | fn = clear_storage_libcall_fn (true); | |
2407 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
8f99553f | 2408 | arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list); |
4ca79136 RH |
2409 | arg_list = tree_cons (NULL_TREE, object_tree, arg_list); |
2410 | ||
2411 | /* Now we have to build up the CALL_EXPR itself. */ | |
2412 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
3244e67d RS |
2413 | call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), |
2414 | call_expr, arg_list, NULL_TREE); | |
4ca79136 RH |
2415 | |
2416 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
2417 | ||
8f99553f | 2418 | return retval; |
4ca79136 RH |
2419 | } |
2420 | ||
2421 | /* A subroutine of clear_storage_via_libcall. Create the tree node | |
2422 | for the function we use for block clears. The first time FOR_CALL | |
2423 | is true, we call assemble_external. */ | |
2424 | ||
2425 | static GTY(()) tree block_clear_fn; | |
66c60e67 | 2426 | |
9661b15f | 2427 | void |
502b8322 | 2428 | init_block_clear_fn (const char *asmspec) |
4ca79136 | 2429 | { |
9661b15f | 2430 | if (!block_clear_fn) |
4ca79136 | 2431 | { |
9661b15f JJ |
2432 | tree fn, args; |
2433 | ||
8f99553f JM |
2434 | fn = get_identifier ("memset"); |
2435 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
2436 | integer_type_node, sizetype, | |
2437 | NULL_TREE); | |
4ca79136 RH |
2438 | |
2439 | fn = build_decl (FUNCTION_DECL, fn, args); | |
2440 | DECL_EXTERNAL (fn) = 1; | |
2441 | TREE_PUBLIC (fn) = 1; | |
2442 | DECL_ARTIFICIAL (fn) = 1; | |
2443 | TREE_NOTHROW (fn) = 1; | |
2444 | ||
2445 | block_clear_fn = fn; | |
bbf6f052 | 2446 | } |
e9a25f70 | 2447 | |
9661b15f | 2448 | if (asmspec) |
0e6df31e | 2449 | set_user_assembler_name (block_clear_fn, asmspec); |
9661b15f JJ |
2450 | } |
2451 | ||
2452 | static tree | |
502b8322 | 2453 | clear_storage_libcall_fn (int for_call) |
9661b15f JJ |
2454 | { |
2455 | static bool emitted_extern; | |
2456 | ||
2457 | if (!block_clear_fn) | |
2458 | init_block_clear_fn (NULL); | |
2459 | ||
4ca79136 RH |
2460 | if (for_call && !emitted_extern) |
2461 | { | |
2462 | emitted_extern = true; | |
0e6df31e | 2463 | make_decl_rtl (block_clear_fn); |
9661b15f | 2464 | assemble_external (block_clear_fn); |
4ca79136 | 2465 | } |
bbf6f052 | 2466 | |
9661b15f | 2467 | return block_clear_fn; |
4ca79136 RH |
2468 | } |
2469 | \f | |
bbf6f052 RK |
2470 | /* Generate code to copy Y into X. |
2471 | Both Y and X must have the same mode, except that | |
2472 | Y can be a constant with VOIDmode. | |
2473 | This mode cannot be BLKmode; use emit_block_move for that. | |
2474 | ||
2475 | Return the last instruction emitted. */ | |
2476 | ||
2477 | rtx | |
502b8322 | 2478 | emit_move_insn (rtx x, rtx y) |
bbf6f052 RK |
2479 | { |
2480 | enum machine_mode mode = GET_MODE (x); | |
de1b33dd | 2481 | rtx y_cst = NULL_RTX; |
0c19a26f | 2482 | rtx last_insn, set; |
bbf6f052 | 2483 | |
5b0264cb NS |
2484 | gcc_assert (mode != BLKmode |
2485 | && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode)); | |
bbf6f052 | 2486 | |
6de9cd9a | 2487 | if (CONSTANT_P (y)) |
de1b33dd | 2488 | { |
51286de6 | 2489 | if (optimize |
075fc17a | 2490 | && SCALAR_FLOAT_MODE_P (GET_MODE (x)) |
51286de6 RH |
2491 | && (last_insn = compress_float_constant (x, y))) |
2492 | return last_insn; | |
2493 | ||
0c19a26f RS |
2494 | y_cst = y; |
2495 | ||
51286de6 RH |
2496 | if (!LEGITIMATE_CONSTANT_P (y)) |
2497 | { | |
51286de6 | 2498 | y = force_const_mem (mode, y); |
3a04ff64 RH |
2499 | |
2500 | /* If the target's cannot_force_const_mem prevented the spill, | |
2501 | assume that the target's move expanders will also take care | |
2502 | of the non-legitimate constant. */ | |
2503 | if (!y) | |
2504 | y = y_cst; | |
51286de6 | 2505 | } |
de1b33dd | 2506 | } |
bbf6f052 RK |
2507 | |
2508 | /* If X or Y are memory references, verify that their addresses are valid | |
2509 | for the machine. */ | |
3c0cb5de | 2510 | if (MEM_P (x) |
bbf6f052 RK |
2511 | && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) |
2512 | && ! push_operand (x, GET_MODE (x))) | |
2513 | || (flag_force_addr | |
2514 | && CONSTANT_ADDRESS_P (XEXP (x, 0))))) | |
792760b9 | 2515 | x = validize_mem (x); |
bbf6f052 | 2516 | |
3c0cb5de | 2517 | if (MEM_P (y) |
bbf6f052 RK |
2518 | && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) |
2519 | || (flag_force_addr | |
2520 | && CONSTANT_ADDRESS_P (XEXP (y, 0))))) | |
792760b9 | 2521 | y = validize_mem (y); |
bbf6f052 | 2522 | |
5b0264cb | 2523 | gcc_assert (mode != BLKmode); |
bbf6f052 | 2524 | |
de1b33dd AO |
2525 | last_insn = emit_move_insn_1 (x, y); |
2526 | ||
f8cfc6aa | 2527 | if (y_cst && REG_P (x) |
0c19a26f RS |
2528 | && (set = single_set (last_insn)) != NULL_RTX |
2529 | && SET_DEST (set) == x | |
2530 | && ! rtx_equal_p (y_cst, SET_SRC (set))) | |
3d238248 | 2531 | set_unique_reg_note (last_insn, REG_EQUAL, y_cst); |
de1b33dd AO |
2532 | |
2533 | return last_insn; | |
261c4230 RS |
2534 | } |
2535 | ||
2536 | /* Low level part of emit_move_insn. | |
2537 | Called just like emit_move_insn, but assumes X and Y | |
2538 | are basically valid. */ | |
2539 | ||
2540 | rtx | |
502b8322 | 2541 | emit_move_insn_1 (rtx x, rtx y) |
261c4230 RS |
2542 | { |
2543 | enum machine_mode mode = GET_MODE (x); | |
2544 | enum machine_mode submode; | |
2545 | enum mode_class class = GET_MODE_CLASS (mode); | |
261c4230 | 2546 | |
5b0264cb | 2547 | gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE); |
76bbe028 | 2548 | |
bbf6f052 RK |
2549 | if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
2550 | return | |
2551 | emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y)); | |
2552 | ||
89742723 | 2553 | /* Expand complex moves by moving real part and imag part, if possible. */ |
7308a047 | 2554 | else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT) |
27e58a70 | 2555 | && BLKmode != (submode = GET_MODE_INNER (mode)) |
7308a047 RS |
2556 | && (mov_optab->handlers[(int) submode].insn_code |
2557 | != CODE_FOR_nothing)) | |
2558 | { | |
2559 | /* Don't split destination if it is a stack push. */ | |
2560 | int stack = push_operand (x, GET_MODE (x)); | |
7308a047 | 2561 | |
79ce92d7 | 2562 | #ifdef PUSH_ROUNDING |
0e9cbd11 KH |
2563 | /* In case we output to the stack, but the size is smaller than the |
2564 | machine can push exactly, we need to use move instructions. */ | |
1a06f5fe | 2565 | if (stack |
bb93b973 RK |
2566 | && (PUSH_ROUNDING (GET_MODE_SIZE (submode)) |
2567 | != GET_MODE_SIZE (submode))) | |
1a06f5fe JH |
2568 | { |
2569 | rtx temp; | |
bb93b973 | 2570 | HOST_WIDE_INT offset1, offset2; |
1a06f5fe JH |
2571 | |
2572 | /* Do not use anti_adjust_stack, since we don't want to update | |
2573 | stack_pointer_delta. */ | |
2574 | temp = expand_binop (Pmode, | |
2575 | #ifdef STACK_GROWS_DOWNWARD | |
2576 | sub_optab, | |
2577 | #else | |
2578 | add_optab, | |
2579 | #endif | |
2580 | stack_pointer_rtx, | |
2581 | GEN_INT | |
bb93b973 RK |
2582 | (PUSH_ROUNDING |
2583 | (GET_MODE_SIZE (GET_MODE (x)))), | |
2584 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); | |
2585 | ||
1a06f5fe JH |
2586 | if (temp != stack_pointer_rtx) |
2587 | emit_move_insn (stack_pointer_rtx, temp); | |
bb93b973 | 2588 | |
1a06f5fe JH |
2589 | #ifdef STACK_GROWS_DOWNWARD |
2590 | offset1 = 0; | |
2591 | offset2 = GET_MODE_SIZE (submode); | |
2592 | #else | |
2593 | offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); | |
2594 | offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) | |
2595 | + GET_MODE_SIZE (submode)); | |
2596 | #endif | |
bb93b973 | 2597 | |
1a06f5fe JH |
2598 | emit_move_insn (change_address (x, submode, |
2599 | gen_rtx_PLUS (Pmode, | |
2600 | stack_pointer_rtx, | |
2601 | GEN_INT (offset1))), | |
2602 | gen_realpart (submode, y)); | |
2603 | emit_move_insn (change_address (x, submode, | |
2604 | gen_rtx_PLUS (Pmode, | |
2605 | stack_pointer_rtx, | |
2606 | GEN_INT (offset2))), | |
2607 | gen_imagpart (submode, y)); | |
2608 | } | |
e9c0bd54 | 2609 | else |
79ce92d7 | 2610 | #endif |
7308a047 RS |
2611 | /* If this is a stack, push the highpart first, so it |
2612 | will be in the argument order. | |
2613 | ||
2614 | In that case, change_address is used only to convert | |
2615 | the mode, not to change the address. */ | |
e9c0bd54 | 2616 | if (stack) |
c937357e | 2617 | { |
e33c0d66 RS |
2618 | /* Note that the real part always precedes the imag part in memory |
2619 | regardless of machine's endianness. */ | |
c937357e | 2620 | #ifdef STACK_GROWS_DOWNWARD |
a79b3dc7 RS |
2621 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), |
2622 | gen_imagpart (submode, y)); | |
2623 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), | |
2624 | gen_realpart (submode, y)); | |
c937357e | 2625 | #else |
a79b3dc7 RS |
2626 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), |
2627 | gen_realpart (submode, y)); | |
2628 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), | |
2629 | gen_imagpart (submode, y)); | |
c937357e RS |
2630 | #endif |
2631 | } | |
2632 | else | |
2633 | { | |
235ae7be DM |
2634 | rtx realpart_x, realpart_y; |
2635 | rtx imagpart_x, imagpart_y; | |
2636 | ||
405f63da MM |
2637 | /* If this is a complex value with each part being smaller than a |
2638 | word, the usual calling sequence will likely pack the pieces into | |
2639 | a single register. Unfortunately, SUBREG of hard registers only | |
2640 | deals in terms of words, so we have a problem converting input | |
2641 | arguments to the CONCAT of two registers that is used elsewhere | |
2642 | for complex values. If this is before reload, we can copy it into | |
2643 | memory and reload. FIXME, we should see about using extract and | |
2644 | insert on integer registers, but complex short and complex char | |
2645 | variables should be rarely used. */ | |
3a94c984 | 2646 | if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD |
405f63da MM |
2647 | && (reload_in_progress | reload_completed) == 0) |
2648 | { | |
bb93b973 RK |
2649 | int packed_dest_p |
2650 | = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); | |
2651 | int packed_src_p | |
2652 | = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); | |
405f63da MM |
2653 | |
2654 | if (packed_dest_p || packed_src_p) | |
2655 | { | |
2656 | enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) | |
2657 | ? MODE_FLOAT : MODE_INT); | |
2658 | ||
1da68f56 RK |
2659 | enum machine_mode reg_mode |
2660 | = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); | |
405f63da MM |
2661 | |
2662 | if (reg_mode != BLKmode) | |
2663 | { | |
2664 | rtx mem = assign_stack_temp (reg_mode, | |
2665 | GET_MODE_SIZE (mode), 0); | |
f4ef873c | 2666 | rtx cmem = adjust_address (mem, mode, 0); |
405f63da | 2667 | |
405f63da MM |
2668 | if (packed_dest_p) |
2669 | { | |
2670 | rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); | |
bb93b973 | 2671 | |
405f63da MM |
2672 | emit_move_insn_1 (cmem, y); |
2673 | return emit_move_insn_1 (sreg, mem); | |
2674 | } | |
2675 | else | |
2676 | { | |
2677 | rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); | |
bb93b973 | 2678 | |
405f63da MM |
2679 | emit_move_insn_1 (mem, sreg); |
2680 | return emit_move_insn_1 (x, cmem); | |
2681 | } | |
2682 | } | |
2683 | } | |
2684 | } | |
2685 | ||
235ae7be DM |
2686 | realpart_x = gen_realpart (submode, x); |
2687 | realpart_y = gen_realpart (submode, y); | |
2688 | imagpart_x = gen_imagpart (submode, x); | |
2689 | imagpart_y = gen_imagpart (submode, y); | |
2690 | ||
2691 | /* Show the output dies here. This is necessary for SUBREGs | |
2692 | of pseudos since we cannot track their lifetimes correctly; | |
c14c6529 RH |
2693 | hard regs shouldn't appear here except as return values. |
2694 | We never want to emit such a clobber after reload. */ | |
2695 | if (x != y | |
235ae7be DM |
2696 | && ! (reload_in_progress || reload_completed) |
2697 | && (GET_CODE (realpart_x) == SUBREG | |
2698 | || GET_CODE (imagpart_x) == SUBREG)) | |
bb93b973 | 2699 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
2638126a | 2700 | |
a79b3dc7 RS |
2701 | emit_move_insn (realpart_x, realpart_y); |
2702 | emit_move_insn (imagpart_x, imagpart_y); | |
c937357e | 2703 | } |
7308a047 | 2704 | |
7a1ab50a | 2705 | return get_last_insn (); |
7308a047 RS |
2706 | } |
2707 | ||
a3600c71 HPN |
2708 | /* Handle MODE_CC modes: If we don't have a special move insn for this mode, |
2709 | find a mode to do it in. If we have a movcc, use it. Otherwise, | |
2710 | find the MODE_INT mode of the same width. */ | |
2711 | else if (GET_MODE_CLASS (mode) == MODE_CC | |
2712 | && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) | |
2713 | { | |
2714 | enum insn_code insn_code; | |
2715 | enum machine_mode tmode = VOIDmode; | |
2716 | rtx x1 = x, y1 = y; | |
2717 | ||
2718 | if (mode != CCmode | |
2719 | && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing) | |
2720 | tmode = CCmode; | |
2721 | else | |
2722 | for (tmode = QImode; tmode != VOIDmode; | |
2723 | tmode = GET_MODE_WIDER_MODE (tmode)) | |
2724 | if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode)) | |
2725 | break; | |
2726 | ||
5b0264cb | 2727 | gcc_assert (tmode != VOIDmode); |
a3600c71 HPN |
2728 | |
2729 | /* Get X and Y in TMODE. We can't use gen_lowpart here because it | |
2730 | may call change_address which is not appropriate if we were | |
2731 | called when a reload was in progress. We don't have to worry | |
2732 | about changing the address since the size in bytes is supposed to | |
2733 | be the same. Copy the MEM to change the mode and move any | |
2734 | substitutions from the old MEM to the new one. */ | |
2735 | ||
2736 | if (reload_in_progress) | |
2737 | { | |
2738 | x = gen_lowpart_common (tmode, x1); | |
3c0cb5de | 2739 | if (x == 0 && MEM_P (x1)) |
a3600c71 HPN |
2740 | { |
2741 | x = adjust_address_nv (x1, tmode, 0); | |
2742 | copy_replacements (x1, x); | |
2743 | } | |
2744 | ||
2745 | y = gen_lowpart_common (tmode, y1); | |
3c0cb5de | 2746 | if (y == 0 && MEM_P (y1)) |
a3600c71 HPN |
2747 | { |
2748 | y = adjust_address_nv (y1, tmode, 0); | |
2749 | copy_replacements (y1, y); | |
2750 | } | |
2751 | } | |
2752 | else | |
2753 | { | |
2754 | x = gen_lowpart (tmode, x); | |
2755 | y = gen_lowpart (tmode, y); | |
2756 | } | |
502b8322 | 2757 | |
a3600c71 HPN |
2758 | insn_code = mov_optab->handlers[(int) tmode].insn_code; |
2759 | return emit_insn (GEN_FCN (insn_code) (x, y)); | |
2760 | } | |
2761 | ||
5581fc91 RS |
2762 | /* Try using a move pattern for the corresponding integer mode. This is |
2763 | only safe when simplify_subreg can convert MODE constants into integer | |
2764 | constants. At present, it can only do this reliably if the value | |
2765 | fits within a HOST_WIDE_INT. */ | |
2766 | else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
2767 | && (submode = int_mode_for_mode (mode)) != BLKmode | |
2768 | && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing) | |
2769 | return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code) | |
2770 | (simplify_gen_subreg (submode, x, mode, 0), | |
2771 | simplify_gen_subreg (submode, y, mode, 0))); | |
2772 | ||
cffa2189 R |
2773 | /* This will handle any multi-word or full-word mode that lacks a move_insn |
2774 | pattern. However, you will get better code if you define such patterns, | |
bbf6f052 | 2775 | even if they must turn into multiple assembler instructions. */ |
5b0264cb | 2776 | else |
bbf6f052 RK |
2777 | { |
2778 | rtx last_insn = 0; | |
3ef1eef4 | 2779 | rtx seq, inner; |
235ae7be | 2780 | int need_clobber; |
bb93b973 | 2781 | int i; |
5b0264cb NS |
2782 | |
2783 | gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); | |
2784 | ||
a98c9f1a RK |
2785 | #ifdef PUSH_ROUNDING |
2786 | ||
2787 | /* If X is a push on the stack, do the push now and replace | |
2788 | X with a reference to the stack pointer. */ | |
2789 | if (push_operand (x, GET_MODE (x))) | |
2790 | { | |
918a6124 GK |
2791 | rtx temp; |
2792 | enum rtx_code code; | |
0fb7aeda | 2793 | |
918a6124 GK |
2794 | /* Do not use anti_adjust_stack, since we don't want to update |
2795 | stack_pointer_delta. */ | |
2796 | temp = expand_binop (Pmode, | |
2797 | #ifdef STACK_GROWS_DOWNWARD | |
2798 | sub_optab, | |
2799 | #else | |
2800 | add_optab, | |
2801 | #endif | |
2802 | stack_pointer_rtx, | |
2803 | GEN_INT | |
bb93b973 RK |
2804 | (PUSH_ROUNDING |
2805 | (GET_MODE_SIZE (GET_MODE (x)))), | |
a426c92e | 2806 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); |
bb93b973 | 2807 | |
0fb7aeda KH |
2808 | if (temp != stack_pointer_rtx) |
2809 | emit_move_insn (stack_pointer_rtx, temp); | |
918a6124 GK |
2810 | |
2811 | code = GET_CODE (XEXP (x, 0)); | |
bb93b973 | 2812 | |
918a6124 GK |
2813 | /* Just hope that small offsets off SP are OK. */ |
2814 | if (code == POST_INC) | |
0fb7aeda | 2815 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
bb93b973 RK |
2816 | GEN_INT (-((HOST_WIDE_INT) |
2817 | GET_MODE_SIZE (GET_MODE (x))))); | |
918a6124 | 2818 | else if (code == POST_DEC) |
0fb7aeda | 2819 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
918a6124 GK |
2820 | GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); |
2821 | else | |
2822 | temp = stack_pointer_rtx; | |
2823 | ||
2824 | x = change_address (x, VOIDmode, temp); | |
a98c9f1a RK |
2825 | } |
2826 | #endif | |
3a94c984 | 2827 | |
3ef1eef4 RK |
2828 | /* If we are in reload, see if either operand is a MEM whose address |
2829 | is scheduled for replacement. */ | |
3c0cb5de | 2830 | if (reload_in_progress && MEM_P (x) |
3ef1eef4 | 2831 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) |
f1ec5147 | 2832 | x = replace_equiv_address_nv (x, inner); |
3c0cb5de | 2833 | if (reload_in_progress && MEM_P (y) |
3ef1eef4 | 2834 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) |
f1ec5147 | 2835 | y = replace_equiv_address_nv (y, inner); |
3ef1eef4 | 2836 | |
235ae7be | 2837 | start_sequence (); |
15a7a8ec | 2838 | |
235ae7be | 2839 | need_clobber = 0; |
bbf6f052 | 2840 | for (i = 0; |
3a94c984 | 2841 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
bbf6f052 RK |
2842 | i++) |
2843 | { | |
2844 | rtx xpart = operand_subword (x, i, 1, mode); | |
2845 | rtx ypart = operand_subword (y, i, 1, mode); | |
2846 | ||
2847 | /* If we can't get a part of Y, put Y into memory if it is a | |
2848 | constant. Otherwise, force it into a register. If we still | |
2849 | can't get a part of Y, abort. */ | |
2850 | if (ypart == 0 && CONSTANT_P (y)) | |
2851 | { | |
2852 | y = force_const_mem (mode, y); | |
2853 | ypart = operand_subword (y, i, 1, mode); | |
2854 | } | |
2855 | else if (ypart == 0) | |
2856 | ypart = operand_subword_force (y, i, mode); | |
2857 | ||
5b0264cb | 2858 | gcc_assert (xpart && ypart); |
bbf6f052 | 2859 | |
235ae7be DM |
2860 | need_clobber |= (GET_CODE (xpart) == SUBREG); |
2861 | ||
bbf6f052 RK |
2862 | last_insn = emit_move_insn (xpart, ypart); |
2863 | } | |
6551fa4d | 2864 | |
2f937369 | 2865 | seq = get_insns (); |
235ae7be DM |
2866 | end_sequence (); |
2867 | ||
2868 | /* Show the output dies here. This is necessary for SUBREGs | |
2869 | of pseudos since we cannot track their lifetimes correctly; | |
2870 | hard regs shouldn't appear here except as return values. | |
2871 | We never want to emit such a clobber after reload. */ | |
2872 | if (x != y | |
2873 | && ! (reload_in_progress || reload_completed) | |
2874 | && need_clobber != 0) | |
bb93b973 | 2875 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
235ae7be DM |
2876 | |
2877 | emit_insn (seq); | |
2878 | ||
bbf6f052 RK |
2879 | return last_insn; |
2880 | } | |
bbf6f052 | 2881 | } |
51286de6 RH |
2882 | |
2883 | /* If Y is representable exactly in a narrower mode, and the target can | |
2884 | perform the extension directly from constant or memory, then emit the | |
2885 | move as an extension. */ | |
2886 | ||
2887 | static rtx | |
502b8322 | 2888 | compress_float_constant (rtx x, rtx y) |
51286de6 RH |
2889 | { |
2890 | enum machine_mode dstmode = GET_MODE (x); | |
2891 | enum machine_mode orig_srcmode = GET_MODE (y); | |
2892 | enum machine_mode srcmode; | |
2893 | REAL_VALUE_TYPE r; | |
2894 | ||
2895 | REAL_VALUE_FROM_CONST_DOUBLE (r, y); | |
2896 | ||
2897 | for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); | |
2898 | srcmode != orig_srcmode; | |
2899 | srcmode = GET_MODE_WIDER_MODE (srcmode)) | |
2900 | { | |
2901 | enum insn_code ic; | |
2902 | rtx trunc_y, last_insn; | |
2903 | ||
2904 | /* Skip if the target can't extend this way. */ | |
2905 | ic = can_extend_p (dstmode, srcmode, 0); | |
2906 | if (ic == CODE_FOR_nothing) | |
2907 | continue; | |
2908 | ||
2909 | /* Skip if the narrowed value isn't exact. */ | |
2910 | if (! exact_real_truncate (srcmode, &r)) | |
2911 | continue; | |
2912 | ||
2913 | trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); | |
2914 | ||
2915 | if (LEGITIMATE_CONSTANT_P (trunc_y)) | |
2916 | { | |
2917 | /* Skip if the target needs extra instructions to perform | |
2918 | the extension. */ | |
2919 | if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) | |
2920 | continue; | |
2921 | } | |
2922 | else if (float_extend_from_mem[dstmode][srcmode]) | |
2923 | trunc_y = validize_mem (force_const_mem (srcmode, trunc_y)); | |
2924 | else | |
2925 | continue; | |
2926 | ||
2927 | emit_unop_insn (ic, x, trunc_y, UNKNOWN); | |
2928 | last_insn = get_last_insn (); | |
2929 | ||
f8cfc6aa | 2930 | if (REG_P (x)) |
0c19a26f | 2931 | set_unique_reg_note (last_insn, REG_EQUAL, y); |
51286de6 RH |
2932 | |
2933 | return last_insn; | |
2934 | } | |
2935 | ||
2936 | return NULL_RTX; | |
2937 | } | |
bbf6f052 RK |
2938 | \f |
2939 | /* Pushing data onto the stack. */ | |
2940 | ||
2941 | /* Push a block of length SIZE (perhaps variable) | |
2942 | and return an rtx to address the beginning of the block. | |
bbf6f052 RK |
2943 | The value may be virtual_outgoing_args_rtx. |
2944 | ||
2945 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
2946 | BELOW nonzero means this padding comes at low addresses; | |
2947 | otherwise, the padding comes at high addresses. */ | |
2948 | ||
2949 | rtx | |
502b8322 | 2950 | push_block (rtx size, int extra, int below) |
bbf6f052 | 2951 | { |
b3694847 | 2952 | rtx temp; |
88f63c77 RK |
2953 | |
2954 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
bbf6f052 RK |
2955 | if (CONSTANT_P (size)) |
2956 | anti_adjust_stack (plus_constant (size, extra)); | |
f8cfc6aa | 2957 | else if (REG_P (size) && extra == 0) |
bbf6f052 RK |
2958 | anti_adjust_stack (size); |
2959 | else | |
2960 | { | |
ce48579b | 2961 | temp = copy_to_mode_reg (Pmode, size); |
bbf6f052 | 2962 | if (extra != 0) |
906c4e36 | 2963 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
bbf6f052 RK |
2964 | temp, 0, OPTAB_LIB_WIDEN); |
2965 | anti_adjust_stack (temp); | |
2966 | } | |
2967 | ||
f73ad30e | 2968 | #ifndef STACK_GROWS_DOWNWARD |
f73ad30e | 2969 | if (0) |
f73ad30e JH |
2970 | #else |
2971 | if (1) | |
bbf6f052 | 2972 | #endif |
f73ad30e | 2973 | { |
f73ad30e JH |
2974 | temp = virtual_outgoing_args_rtx; |
2975 | if (extra != 0 && below) | |
2976 | temp = plus_constant (temp, extra); | |
2977 | } | |
2978 | else | |
2979 | { | |
2980 | if (GET_CODE (size) == CONST_INT) | |
2981 | temp = plus_constant (virtual_outgoing_args_rtx, | |
3a94c984 | 2982 | -INTVAL (size) - (below ? 0 : extra)); |
f73ad30e JH |
2983 | else if (extra != 0 && !below) |
2984 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3bdf5ad1 | 2985 | negate_rtx (Pmode, plus_constant (size, extra))); |
f73ad30e JH |
2986 | else |
2987 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
2988 | negate_rtx (Pmode, size)); | |
2989 | } | |
bbf6f052 RK |
2990 | |
2991 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
2992 | } | |
2993 | ||
21d93687 RK |
2994 | #ifdef PUSH_ROUNDING |
2995 | ||
566aa174 | 2996 | /* Emit single push insn. */ |
21d93687 | 2997 | |
566aa174 | 2998 | static void |
502b8322 | 2999 | emit_single_push_insn (enum machine_mode mode, rtx x, tree type) |
566aa174 | 3000 | { |
566aa174 | 3001 | rtx dest_addr; |
918a6124 | 3002 | unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
566aa174 | 3003 | rtx dest; |
371b8fc0 JH |
3004 | enum insn_code icode; |
3005 | insn_operand_predicate_fn pred; | |
566aa174 | 3006 | |
371b8fc0 JH |
3007 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
3008 | /* If there is push pattern, use it. Otherwise try old way of throwing | |
3009 | MEM representing push operation to move expander. */ | |
3010 | icode = push_optab->handlers[(int) mode].insn_code; | |
3011 | if (icode != CODE_FOR_nothing) | |
3012 | { | |
3013 | if (((pred = insn_data[(int) icode].operand[0].predicate) | |
505ddab6 | 3014 | && !((*pred) (x, mode)))) |
371b8fc0 JH |
3015 | x = force_reg (mode, x); |
3016 | emit_insn (GEN_FCN (icode) (x)); | |
3017 | return; | |
3018 | } | |
566aa174 JH |
3019 | if (GET_MODE_SIZE (mode) == rounded_size) |
3020 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
329d586f KH |
3021 | /* If we are to pad downward, adjust the stack pointer first and |
3022 | then store X into the stack location using an offset. This is | |
3023 | because emit_move_insn does not know how to pad; it does not have | |
3024 | access to type. */ | |
3025 | else if (FUNCTION_ARG_PADDING (mode, type) == downward) | |
3026 | { | |
3027 | unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); | |
3028 | HOST_WIDE_INT offset; | |
3029 | ||
3030 | emit_move_insn (stack_pointer_rtx, | |
3031 | expand_binop (Pmode, | |
3032 | #ifdef STACK_GROWS_DOWNWARD | |
3033 | sub_optab, | |
3034 | #else | |
3035 | add_optab, | |
3036 | #endif | |
3037 | stack_pointer_rtx, | |
3038 | GEN_INT (rounded_size), | |
3039 | NULL_RTX, 0, OPTAB_LIB_WIDEN)); | |
3040 | ||
3041 | offset = (HOST_WIDE_INT) padding_size; | |
3042 | #ifdef STACK_GROWS_DOWNWARD | |
3043 | if (STACK_PUSH_CODE == POST_DEC) | |
3044 | /* We have already decremented the stack pointer, so get the | |
3045 | previous value. */ | |
3046 | offset += (HOST_WIDE_INT) rounded_size; | |
3047 | #else | |
3048 | if (STACK_PUSH_CODE == POST_INC) | |
3049 | /* We have already incremented the stack pointer, so get the | |
3050 | previous value. */ | |
3051 | offset -= (HOST_WIDE_INT) rounded_size; | |
3052 | #endif | |
3053 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset)); | |
3054 | } | |
566aa174 JH |
3055 | else |
3056 | { | |
3057 | #ifdef STACK_GROWS_DOWNWARD | |
329d586f | 3058 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ |
566aa174 | 3059 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
505ddab6 | 3060 | GEN_INT (-(HOST_WIDE_INT) rounded_size)); |
566aa174 | 3061 | #else |
329d586f | 3062 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ |
566aa174 JH |
3063 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
3064 | GEN_INT (rounded_size)); | |
3065 | #endif | |
3066 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3067 | } | |
3068 | ||
3069 | dest = gen_rtx_MEM (mode, dest_addr); | |
3070 | ||
566aa174 JH |
3071 | if (type != 0) |
3072 | { | |
3073 | set_mem_attributes (dest, type, 1); | |
c3d32120 RK |
3074 | |
3075 | if (flag_optimize_sibling_calls) | |
3076 | /* Function incoming arguments may overlap with sibling call | |
3077 | outgoing arguments and we cannot allow reordering of reads | |
3078 | from function arguments with stores to outgoing arguments | |
3079 | of sibling calls. */ | |
3080 | set_mem_alias_set (dest, 0); | |
566aa174 JH |
3081 | } |
3082 | emit_move_insn (dest, x); | |
566aa174 | 3083 | } |
21d93687 | 3084 | #endif |
566aa174 | 3085 | |
bbf6f052 RK |
3086 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3087 | type TYPE. | |
3088 | MODE is redundant except when X is a CONST_INT (since they don't | |
3089 | carry mode info). | |
3090 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3091 | needed only if X is BLKmode. | |
3092 | ||
f1eaaf73 | 3093 | ALIGN (in bits) is maximum alignment we can assume. |
bbf6f052 | 3094 | |
cd048831 RK |
3095 | If PARTIAL and REG are both nonzero, then copy that many of the first |
3096 | words of X into registers starting with REG, and push the rest of X. | |
bbf6f052 RK |
3097 | The amount of space pushed is decreased by PARTIAL words, |
3098 | rounded *down* to a multiple of PARM_BOUNDARY. | |
3099 | REG must be a hard register in this case. | |
cd048831 RK |
3100 | If REG is zero but PARTIAL is not, take any all others actions for an |
3101 | argument partially in registers, but do not actually load any | |
3102 | registers. | |
bbf6f052 RK |
3103 | |
3104 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
6dc42e49 | 3105 | This is ignored if an argument block has already been allocated. |
bbf6f052 RK |
3106 | |
3107 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3108 | the bottom of the argument block for this call. We use indexing off there | |
3109 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3110 | argument block has not been preallocated. | |
3111 | ||
e5e809f4 JL |
3112 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3113 | ||
3114 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3115 | for arguments passed in registers. If nonzero, it will be the number | |
3116 | of bytes required. */ | |
bbf6f052 RK |
3117 | |
3118 | void | |
502b8322 AJ |
3119 | emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
3120 | unsigned int align, int partial, rtx reg, int extra, | |
3121 | rtx args_addr, rtx args_so_far, int reg_parm_stack_space, | |
3122 | rtx alignment_pad) | |
bbf6f052 RK |
3123 | { |
3124 | rtx xinner; | |
3125 | enum direction stack_direction | |
3126 | #ifdef STACK_GROWS_DOWNWARD | |
3127 | = downward; | |
3128 | #else | |
3129 | = upward; | |
3130 | #endif | |
3131 | ||
3132 | /* Decide where to pad the argument: `downward' for below, | |
3133 | `upward' for above, or `none' for don't pad it. | |
3134 | Default is below for small data on big-endian machines; else above. */ | |
3135 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3136 | ||
0fb7aeda | 3137 | /* Invert direction if stack is post-decrement. |
9e0e11bf GK |
3138 | FIXME: why? */ |
3139 | if (STACK_PUSH_CODE == POST_DEC) | |
bbf6f052 RK |
3140 | if (where_pad != none) |
3141 | where_pad = (where_pad == downward ? upward : downward); | |
3142 | ||
ad76cef8 | 3143 | xinner = x; |
bbf6f052 RK |
3144 | |
3145 | if (mode == BLKmode) | |
3146 | { | |
3147 | /* Copy a block into the stack, entirely or partially. */ | |
3148 | ||
b3694847 | 3149 | rtx temp; |
bbf6f052 | 3150 | int used = partial * UNITS_PER_WORD; |
531547e9 | 3151 | int offset; |
bbf6f052 | 3152 | int skip; |
3a94c984 | 3153 | |
531547e9 FJ |
3154 | if (reg && GET_CODE (reg) == PARALLEL) |
3155 | { | |
3156 | /* Use the size of the elt to compute offset. */ | |
3157 | rtx elt = XEXP (XVECEXP (reg, 0, 0), 0); | |
3158 | used = partial * GET_MODE_SIZE (GET_MODE (elt)); | |
3159 | offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3160 | } | |
3161 | else | |
3162 | offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3163 | ||
5b0264cb | 3164 | gcc_assert (size); |
bbf6f052 RK |
3165 | |
3166 | used -= offset; | |
3167 | ||
3168 | /* USED is now the # of bytes we need not copy to the stack | |
3169 | because registers will take care of them. */ | |
3170 | ||
3171 | if (partial != 0) | |
f4ef873c | 3172 | xinner = adjust_address (xinner, BLKmode, used); |
bbf6f052 RK |
3173 | |
3174 | /* If the partial register-part of the arg counts in its stack size, | |
3175 | skip the part of stack space corresponding to the registers. | |
3176 | Otherwise, start copying to the beginning of the stack space, | |
3177 | by setting SKIP to 0. */ | |
e5e809f4 | 3178 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
bbf6f052 RK |
3179 | |
3180 | #ifdef PUSH_ROUNDING | |
3181 | /* Do it with several push insns if that doesn't take lots of insns | |
3182 | and if there is no difficulty with push insns that skip bytes | |
3183 | on the stack for alignment purposes. */ | |
3184 | if (args_addr == 0 | |
f73ad30e | 3185 | && PUSH_ARGS |
bbf6f052 RK |
3186 | && GET_CODE (size) == CONST_INT |
3187 | && skip == 0 | |
f26aca6d | 3188 | && MEM_ALIGN (xinner) >= align |
15914757 | 3189 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
bbf6f052 RK |
3190 | /* Here we avoid the case of a structure whose weak alignment |
3191 | forces many pushes of a small amount of data, | |
3192 | and such small pushes do rounding that causes trouble. */ | |
e1565e65 | 3193 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
19caa751 | 3194 | || align >= BIGGEST_ALIGNMENT |
f1eaaf73 DE |
3195 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3196 | == (align / BITS_PER_UNIT))) | |
bbf6f052 RK |
3197 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3198 | { | |
3199 | /* Push padding now if padding above and stack grows down, | |
3200 | or if padding below and stack grows up. | |
3201 | But if space already allocated, this has already been done. */ | |
3202 | if (extra && args_addr == 0 | |
3203 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3204 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 | 3205 | |
8fd3cf4e | 3206 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); |
bbf6f052 RK |
3207 | } |
3208 | else | |
3a94c984 | 3209 | #endif /* PUSH_ROUNDING */ |
bbf6f052 | 3210 | { |
7ab923cc JJ |
3211 | rtx target; |
3212 | ||
bbf6f052 RK |
3213 | /* Otherwise make space on the stack and copy the data |
3214 | to the address of that space. */ | |
3215 | ||
3216 | /* Deduct words put into registers from the size we must copy. */ | |
3217 | if (partial != 0) | |
3218 | { | |
3219 | if (GET_CODE (size) == CONST_INT) | |
906c4e36 | 3220 | size = GEN_INT (INTVAL (size) - used); |
bbf6f052 RK |
3221 | else |
3222 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
906c4e36 RK |
3223 | GEN_INT (used), NULL_RTX, 0, |
3224 | OPTAB_LIB_WIDEN); | |
bbf6f052 RK |
3225 | } |
3226 | ||
3227 | /* Get the address of the stack space. | |
3228 | In this case, we do not deal with EXTRA separately. | |
3229 | A single stack adjust will do. */ | |
3230 | if (! args_addr) | |
3231 | { | |
3232 | temp = push_block (size, extra, where_pad == downward); | |
3233 | extra = 0; | |
3234 | } | |
3235 | else if (GET_CODE (args_so_far) == CONST_INT) | |
3236 | temp = memory_address (BLKmode, | |
3237 | plus_constant (args_addr, | |
3238 | skip + INTVAL (args_so_far))); | |
3239 | else | |
3240 | temp = memory_address (BLKmode, | |
38a448ca RH |
3241 | plus_constant (gen_rtx_PLUS (Pmode, |
3242 | args_addr, | |
3243 | args_so_far), | |
bbf6f052 | 3244 | skip)); |
4ca79136 RH |
3245 | |
3246 | if (!ACCUMULATE_OUTGOING_ARGS) | |
3247 | { | |
3248 | /* If the source is referenced relative to the stack pointer, | |
3249 | copy it to another register to stabilize it. We do not need | |
3250 | to do this if we know that we won't be changing sp. */ | |
3251 | ||
3252 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) | |
3253 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3254 | temp = copy_to_reg (temp); | |
3255 | } | |
3256 | ||
3a94c984 | 3257 | target = gen_rtx_MEM (BLKmode, temp); |
7ab923cc | 3258 | |
2bb16349 RH |
3259 | /* We do *not* set_mem_attributes here, because incoming arguments |
3260 | may overlap with sibling call outgoing arguments and we cannot | |
3261 | allow reordering of reads from function arguments with stores | |
3262 | to outgoing arguments of sibling calls. We do, however, want | |
3263 | to record the alignment of the stack slot. */ | |
44bb111a RH |
3264 | /* ALIGN may well be better aligned than TYPE, e.g. due to |
3265 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3266 | set_mem_align (target, align); | |
4ca79136 | 3267 | |
44bb111a | 3268 | emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); |
bbf6f052 RK |
3269 | } |
3270 | } | |
3271 | else if (partial > 0) | |
3272 | { | |
3273 | /* Scalar partly in registers. */ | |
3274 | ||
3275 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3276 | int i; | |
3277 | int not_stack; | |
3278 | /* # words of start of argument | |
3279 | that we must make space for but need not store. */ | |
3280 | int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); | |
3281 | int args_offset = INTVAL (args_so_far); | |
3282 | int skip; | |
3283 | ||
3284 | /* Push padding now if padding above and stack grows down, | |
3285 | or if padding below and stack grows up. | |
3286 | But if space already allocated, this has already been done. */ | |
3287 | if (extra && args_addr == 0 | |
3288 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3289 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3290 | |
3291 | /* If we make space by pushing it, we might as well push | |
3292 | the real data. Otherwise, we can leave OFFSET nonzero | |
3293 | and leave the space uninitialized. */ | |
3294 | if (args_addr == 0) | |
3295 | offset = 0; | |
3296 | ||
3297 | /* Now NOT_STACK gets the number of words that we don't need to | |
3298 | allocate on the stack. */ | |
3299 | not_stack = partial - offset; | |
3300 | ||
3301 | /* If the partial register-part of the arg counts in its stack size, | |
3302 | skip the part of stack space corresponding to the registers. | |
3303 | Otherwise, start copying to the beginning of the stack space, | |
3304 | by setting SKIP to 0. */ | |
e5e809f4 | 3305 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
bbf6f052 RK |
3306 | |
3307 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3308 | x = validize_mem (force_const_mem (mode, x)); | |
3309 | ||
3310 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3311 | SUBREGs of such registers are not allowed. */ | |
f8cfc6aa | 3312 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
bbf6f052 RK |
3313 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) |
3314 | x = copy_to_reg (x); | |
3315 | ||
3316 | /* Loop over all the words allocated on the stack for this arg. */ | |
3317 | /* We can do it by words, because any scalar bigger than a word | |
3318 | has a size a multiple of a word. */ | |
3319 | #ifndef PUSH_ARGS_REVERSED | |
3320 | for (i = not_stack; i < size; i++) | |
3321 | #else | |
3322 | for (i = size - 1; i >= not_stack; i--) | |
3323 | #endif | |
3324 | if (i >= not_stack + offset) | |
3325 | emit_push_insn (operand_subword_force (x, i, mode), | |
906c4e36 RK |
3326 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3327 | 0, args_addr, | |
3328 | GEN_INT (args_offset + ((i - not_stack + skip) | |
e5e809f4 | 3329 | * UNITS_PER_WORD)), |
4fc026cd | 3330 | reg_parm_stack_space, alignment_pad); |
bbf6f052 RK |
3331 | } |
3332 | else | |
3333 | { | |
3334 | rtx addr; | |
3bdf5ad1 | 3335 | rtx dest; |
bbf6f052 RK |
3336 | |
3337 | /* Push padding now if padding above and stack grows down, | |
3338 | or if padding below and stack grows up. | |
3339 | But if space already allocated, this has already been done. */ | |
3340 | if (extra && args_addr == 0 | |
3341 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3342 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3343 | |
3344 | #ifdef PUSH_ROUNDING | |
f73ad30e | 3345 | if (args_addr == 0 && PUSH_ARGS) |
566aa174 | 3346 | emit_single_push_insn (mode, x, type); |
bbf6f052 RK |
3347 | else |
3348 | #endif | |
921b3427 RK |
3349 | { |
3350 | if (GET_CODE (args_so_far) == CONST_INT) | |
3351 | addr | |
3352 | = memory_address (mode, | |
3a94c984 | 3353 | plus_constant (args_addr, |
921b3427 | 3354 | INTVAL (args_so_far))); |
3a94c984 | 3355 | else |
38a448ca RH |
3356 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3357 | args_so_far)); | |
566aa174 | 3358 | dest = gen_rtx_MEM (mode, addr); |
2bb16349 RH |
3359 | |
3360 | /* We do *not* set_mem_attributes here, because incoming arguments | |
3361 | may overlap with sibling call outgoing arguments and we cannot | |
3362 | allow reordering of reads from function arguments with stores | |
3363 | to outgoing arguments of sibling calls. We do, however, want | |
3364 | to record the alignment of the stack slot. */ | |
3365 | /* ALIGN may well be better aligned than TYPE, e.g. due to | |
3366 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3367 | set_mem_align (dest, align); | |
bbf6f052 | 3368 | |
566aa174 | 3369 | emit_move_insn (dest, x); |
566aa174 | 3370 | } |
bbf6f052 RK |
3371 | } |
3372 | ||
bbf6f052 RK |
3373 | /* If part should go in registers, copy that part |
3374 | into the appropriate registers. Do this now, at the end, | |
3375 | since mem-to-mem copies above may do function calls. */ | |
cd048831 | 3376 | if (partial > 0 && reg != 0) |
fffa9c1d JW |
3377 | { |
3378 | /* Handle calls that pass values in multiple non-contiguous locations. | |
3379 | The Irix 6 ABI has examples of this. */ | |
3380 | if (GET_CODE (reg) == PARALLEL) | |
6e985040 | 3381 | emit_group_load (reg, x, type, -1); |
fffa9c1d JW |
3382 | else |
3383 | move_block_to_reg (REGNO (reg), x, partial, mode); | |
3384 | } | |
bbf6f052 RK |
3385 | |
3386 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
906c4e36 | 3387 | anti_adjust_stack (GEN_INT (extra)); |
3a94c984 | 3388 | |
3ea2292a | 3389 | if (alignment_pad && args_addr == 0) |
4fc026cd | 3390 | anti_adjust_stack (alignment_pad); |
bbf6f052 RK |
3391 | } |
3392 | \f | |
296b4ed9 RK |
3393 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
3394 | operations. */ | |
3395 | ||
3396 | static rtx | |
502b8322 | 3397 | get_subtarget (rtx x) |
296b4ed9 | 3398 | { |
7c27e184 PB |
3399 | return (optimize |
3400 | || x == 0 | |
296b4ed9 | 3401 | /* Only registers can be subtargets. */ |
f8cfc6aa | 3402 | || !REG_P (x) |
296b4ed9 RK |
3403 | /* Don't use hard regs to avoid extending their life. */ |
3404 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
296b4ed9 RK |
3405 | ? 0 : x); |
3406 | } | |
3407 | ||
bbf6f052 RK |
3408 | /* Expand an assignment that stores the value of FROM into TO. |
3409 | If WANT_VALUE is nonzero, return an rtx for the value of TO. | |
96985307 | 3410 | (If the value is constant, this rtx is a constant.) |
b90f141a | 3411 | Otherwise, the returned value is NULL_RTX. */ |
bbf6f052 RK |
3412 | |
3413 | rtx | |
b90f141a | 3414 | expand_assignment (tree to, tree from, int want_value) |
bbf6f052 | 3415 | { |
b3694847 | 3416 | rtx to_rtx = 0; |
bbf6f052 RK |
3417 | rtx result; |
3418 | ||
3419 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
3420 | ||
3421 | if (TREE_CODE (to) == ERROR_MARK) | |
709f5be1 RS |
3422 | { |
3423 | result = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
3424 | return want_value ? result : NULL_RTX; | |
3425 | } | |
bbf6f052 RK |
3426 | |
3427 | /* Assignment of a structure component needs special treatment | |
3428 | if the structure component's rtx is not simply a MEM. | |
6be58303 JW |
3429 | Assignment of an array element at a constant index, and assignment of |
3430 | an array element in an unaligned packed structure field, has the same | |
3431 | problem. */ | |
bbf6f052 | 3432 | |
08293add | 3433 | if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF |
7c02ae17 DE |
3434 | || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF |
3435 | || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) | |
bbf6f052 RK |
3436 | { |
3437 | enum machine_mode mode1; | |
770ae6cc | 3438 | HOST_WIDE_INT bitsize, bitpos; |
a06ef755 | 3439 | rtx orig_to_rtx; |
7bb0943f | 3440 | tree offset; |
bbf6f052 RK |
3441 | int unsignedp; |
3442 | int volatilep = 0; | |
0088fcb1 RK |
3443 | tree tem; |
3444 | ||
3445 | push_temp_slots (); | |
839c4796 | 3446 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
a06ef755 | 3447 | &unsignedp, &volatilep); |
bbf6f052 RK |
3448 | |
3449 | /* If we are going to use store_bit_field and extract_bit_field, | |
3450 | make sure to_rtx will be safe for multiple use. */ | |
3451 | ||
3452 | if (mode1 == VOIDmode && want_value) | |
3453 | tem = stabilize_reference (tem); | |
3454 | ||
1ed1b4fb RK |
3455 | orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); |
3456 | ||
7bb0943f RS |
3457 | if (offset != 0) |
3458 | { | |
e3c8ea67 | 3459 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
7bb0943f | 3460 | |
5b0264cb | 3461 | gcc_assert (MEM_P (to_rtx)); |
bd070e1a | 3462 | |
bd070e1a | 3463 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 | 3464 | if (GET_MODE (offset_rtx) != Pmode) |
267b28bd | 3465 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
fa06ab5c RK |
3466 | #else |
3467 | if (GET_MODE (offset_rtx) != ptr_mode) | |
3468 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
bd070e1a | 3469 | #endif |
bd070e1a | 3470 | |
9a7b9f4f JL |
3471 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
3472 | to call force_reg for that case. Avoid that case. */ | |
3c0cb5de | 3473 | if (MEM_P (to_rtx) |
89752202 | 3474 | && GET_MODE (to_rtx) == BLKmode |
9a7b9f4f | 3475 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
a06ef755 | 3476 | && bitsize > 0 |
3a94c984 | 3477 | && (bitpos % bitsize) == 0 |
89752202 | 3478 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 3479 | && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 3480 | { |
e3c8ea67 | 3481 | to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
3482 | bitpos = 0; |
3483 | } | |
3484 | ||
0d4903b8 | 3485 | to_rtx = offset_address (to_rtx, offset_rtx, |
d50a16c4 EB |
3486 | highest_pow2_factor_for_target (to, |
3487 | offset)); | |
7bb0943f | 3488 | } |
c5c76735 | 3489 | |
3c0cb5de | 3490 | if (MEM_P (to_rtx)) |
998d7deb | 3491 | { |
998d7deb RH |
3492 | /* If the field is at offset zero, we could have been given the |
3493 | DECL_RTX of the parent struct. Don't munge it. */ | |
3494 | to_rtx = shallow_copy_rtx (to_rtx); | |
3495 | ||
6f1087be | 3496 | set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); |
998d7deb | 3497 | } |
effbcc6a | 3498 | |
a06ef755 RK |
3499 | /* Deal with volatile and readonly fields. The former is only done |
3500 | for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ | |
3c0cb5de | 3501 | if (volatilep && MEM_P (to_rtx)) |
a06ef755 RK |
3502 | { |
3503 | if (to_rtx == orig_to_rtx) | |
3504 | to_rtx = copy_rtx (to_rtx); | |
3505 | MEM_VOLATILE_P (to_rtx) = 1; | |
bbf6f052 RK |
3506 | } |
3507 | ||
3c0cb5de | 3508 | if (MEM_P (to_rtx) && ! can_address_p (to)) |
a06ef755 RK |
3509 | { |
3510 | if (to_rtx == orig_to_rtx) | |
3511 | to_rtx = copy_rtx (to_rtx); | |
3512 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
3513 | } | |
3514 | ||
b8b139c7 JJ |
3515 | /* Optimize bitfld op= val in certain cases. */ |
3516 | while (mode1 == VOIDmode && !want_value | |
3517 | && bitsize > 0 && bitsize < BITS_PER_WORD | |
60ba25bf JJ |
3518 | && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD |
3519 | && !TREE_SIDE_EFFECTS (to) | |
3520 | && !TREE_THIS_VOLATILE (to)) | |
df62f18a | 3521 | { |
60ba25bf | 3522 | tree src, op0, op1; |
b8b139c7 JJ |
3523 | rtx value, str_rtx = to_rtx; |
3524 | HOST_WIDE_INT bitpos1 = bitpos; | |
60ba25bf JJ |
3525 | optab binop; |
3526 | ||
3527 | src = from; | |
3528 | STRIP_NOPS (src); | |
3529 | if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE | |
3530 | || TREE_CODE_CLASS (TREE_CODE (src)) != '2') | |
3531 | break; | |
3532 | ||
3533 | op0 = TREE_OPERAND (src, 0); | |
3534 | op1 = TREE_OPERAND (src, 1); | |
3535 | STRIP_NOPS (op0); | |
3536 | ||
3537 | if (! operand_equal_p (to, op0, 0)) | |
3538 | break; | |
df62f18a | 3539 | |
b8b139c7 JJ |
3540 | if (MEM_P (str_rtx)) |
3541 | { | |
3542 | enum machine_mode mode = GET_MODE (str_rtx); | |
3543 | HOST_WIDE_INT offset1; | |
3544 | ||
3545 | if (GET_MODE_BITSIZE (mode) == 0 | |
3546 | || GET_MODE_BITSIZE (mode) > BITS_PER_WORD) | |
3547 | mode = word_mode; | |
3548 | mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx), | |
3549 | mode, 0); | |
3550 | if (mode == VOIDmode) | |
3551 | break; | |
3552 | ||
3553 | offset1 = bitpos1; | |
3554 | bitpos1 %= GET_MODE_BITSIZE (mode); | |
3555 | offset1 = (offset1 - bitpos1) / BITS_PER_UNIT; | |
3556 | str_rtx = adjust_address (str_rtx, mode, offset1); | |
3557 | } | |
3558 | else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) | |
3559 | break; | |
3560 | ||
3561 | /* If the bit field covers the whole REG/MEM, store_field | |
3562 | will likely generate better code. */ | |
3563 | if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx))) | |
3564 | break; | |
3565 | ||
2a7e31df | 3566 | /* We can't handle fields split across multiple entities. */ |
b8b139c7 JJ |
3567 | if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx))) |
3568 | break; | |
3569 | ||
df62f18a | 3570 | if (BYTES_BIG_ENDIAN) |
b8b139c7 JJ |
3571 | bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1 |
3572 | - bitsize; | |
df62f18a JJ |
3573 | |
3574 | /* Special case some bitfield op= exp. */ | |
60ba25bf | 3575 | switch (TREE_CODE (src)) |
df62f18a JJ |
3576 | { |
3577 | case PLUS_EXPR: | |
3578 | case MINUS_EXPR: | |
df62f18a | 3579 | /* For now, just optimize the case of the topmost bitfield |
60ba25bf JJ |
3580 | where we don't need to do any masking and also |
3581 | 1 bit bitfields where xor can be used. | |
df62f18a JJ |
3582 | We might win by one instruction for the other bitfields |
3583 | too if insv/extv instructions aren't used, so that | |
3584 | can be added later. */ | |
b8b139c7 | 3585 | if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)) |
60ba25bf | 3586 | && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST)) |
df62f18a | 3587 | break; |
b8b139c7 JJ |
3588 | value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0); |
3589 | value = convert_modes (GET_MODE (str_rtx), | |
3590 | TYPE_MODE (TREE_TYPE (op1)), value, | |
3591 | TYPE_UNSIGNED (TREE_TYPE (op1))); | |
3592 | ||
3593 | /* We may be accessing data outside the field, which means | |
3594 | we can alias adjacent data. */ | |
3595 | if (MEM_P (str_rtx)) | |
3596 | { | |
3597 | str_rtx = shallow_copy_rtx (str_rtx); | |
3598 | set_mem_alias_set (str_rtx, 0); | |
3599 | set_mem_expr (str_rtx, 0); | |
3600 | } | |
3601 | ||
60ba25bf JJ |
3602 | binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab; |
3603 | if (bitsize == 1 | |
b8b139c7 | 3604 | && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))) |
60ba25bf | 3605 | { |
b8b139c7 | 3606 | value = expand_and (GET_MODE (str_rtx), value, const1_rtx, |
60ba25bf JJ |
3607 | NULL_RTX); |
3608 | binop = xor_optab; | |
3609 | } | |
4a90aeeb | 3610 | value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value, |
7d60be94 | 3611 | build_int_cst (NULL_TREE, bitpos1), |
df62f18a | 3612 | NULL_RTX, 1); |
b8b139c7 JJ |
3613 | result = expand_binop (GET_MODE (str_rtx), binop, str_rtx, |
3614 | value, str_rtx, 1, OPTAB_WIDEN); | |
3615 | if (result != str_rtx) | |
3616 | emit_move_insn (str_rtx, result); | |
df62f18a JJ |
3617 | free_temp_slots (); |
3618 | pop_temp_slots (); | |
3619 | return NULL_RTX; | |
b8b139c7 | 3620 | |
df62f18a JJ |
3621 | default: |
3622 | break; | |
3623 | } | |
60ba25bf JJ |
3624 | |
3625 | break; | |
df62f18a JJ |
3626 | } |
3627 | ||
a06ef755 RK |
3628 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, |
3629 | (want_value | |
3630 | /* Spurious cast for HPUX compiler. */ | |
3631 | ? ((enum machine_mode) | |
3632 | TYPE_MODE (TREE_TYPE (to))) | |
3633 | : VOIDmode), | |
3634 | unsignedp, TREE_TYPE (tem), get_alias_set (to)); | |
a69beca1 | 3635 | |
a06ef755 RK |
3636 | preserve_temp_slots (result); |
3637 | free_temp_slots (); | |
3638 | pop_temp_slots (); | |
a69beca1 | 3639 | |
a06ef755 RK |
3640 | /* If the value is meaningful, convert RESULT to the proper mode. |
3641 | Otherwise, return nothing. */ | |
3642 | return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)), | |
3643 | TYPE_MODE (TREE_TYPE (from)), | |
3644 | result, | |
8df83eae | 3645 | TYPE_UNSIGNED (TREE_TYPE (to))) |
a06ef755 | 3646 | : NULL_RTX); |
bbf6f052 RK |
3647 | } |
3648 | ||
cd1db108 RS |
3649 | /* If the rhs is a function call and its value is not an aggregate, |
3650 | call the function before we start to compute the lhs. | |
3651 | This is needed for correct code for cases such as | |
3652 | val = setjmp (buf) on machines where reference to val | |
1ad87b63 RK |
3653 | requires loading up part of an address in a separate insn. |
3654 | ||
1858863b JW |
3655 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
3656 | since it might be a promoted variable where the zero- or sign- extension | |
3657 | needs to be done. Handling this in the normal way is safe because no | |
3658 | computation is done before the call. */ | |
61f71b34 | 3659 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) |
b35cd3c1 | 3660 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
1858863b | 3661 | && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
f8cfc6aa | 3662 | && REG_P (DECL_RTL (to)))) |
cd1db108 | 3663 | { |
0088fcb1 RK |
3664 | rtx value; |
3665 | ||
3666 | push_temp_slots (); | |
3667 | value = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
cd1db108 | 3668 | if (to_rtx == 0) |
37a08a29 | 3669 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
aaf87c45 | 3670 | |
fffa9c1d JW |
3671 | /* Handle calls that return values in multiple non-contiguous locations. |
3672 | The Irix 6 ABI has examples of this. */ | |
3673 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
3674 | emit_group_load (to_rtx, value, TREE_TYPE (from), |
3675 | int_size_in_bytes (TREE_TYPE (from))); | |
fffa9c1d | 3676 | else if (GET_MODE (to_rtx) == BLKmode) |
44bb111a | 3677 | emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); |
aaf87c45 | 3678 | else |
6419e5b0 | 3679 | { |
5ae6cd0d | 3680 | if (POINTER_TYPE_P (TREE_TYPE (to))) |
6419e5b0 | 3681 | value = convert_memory_address (GET_MODE (to_rtx), value); |
6419e5b0 DT |
3682 | emit_move_insn (to_rtx, value); |
3683 | } | |
cd1db108 RS |
3684 | preserve_temp_slots (to_rtx); |
3685 | free_temp_slots (); | |
0088fcb1 | 3686 | pop_temp_slots (); |
709f5be1 | 3687 | return want_value ? to_rtx : NULL_RTX; |
cd1db108 RS |
3688 | } |
3689 | ||
bbf6f052 RK |
3690 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
3691 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
3692 | ||
3693 | if (to_rtx == 0) | |
37a08a29 | 3694 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
bbf6f052 | 3695 | |
86d38d25 | 3696 | /* Don't move directly into a return register. */ |
14a774a9 | 3697 | if (TREE_CODE (to) == RESULT_DECL |
f8cfc6aa | 3698 | && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL)) |
86d38d25 | 3699 | { |
0088fcb1 RK |
3700 | rtx temp; |
3701 | ||
3702 | push_temp_slots (); | |
3703 | temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); | |
14a774a9 RK |
3704 | |
3705 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
3706 | emit_group_load (to_rtx, temp, TREE_TYPE (from), |
3707 | int_size_in_bytes (TREE_TYPE (from))); | |
14a774a9 RK |
3708 | else |
3709 | emit_move_insn (to_rtx, temp); | |
3710 | ||
86d38d25 RS |
3711 | preserve_temp_slots (to_rtx); |
3712 | free_temp_slots (); | |
0088fcb1 | 3713 | pop_temp_slots (); |
709f5be1 | 3714 | return want_value ? to_rtx : NULL_RTX; |
86d38d25 RS |
3715 | } |
3716 | ||
bbf6f052 RK |
3717 | /* In case we are returning the contents of an object which overlaps |
3718 | the place the value is being stored, use a safe function when copying | |
3719 | a value through a pointer into a structure value return block. */ | |
3720 | if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF | |
3721 | && current_function_returns_struct | |
3722 | && !current_function_returns_pcc_struct) | |
3723 | { | |
0088fcb1 RK |
3724 | rtx from_rtx, size; |
3725 | ||
3726 | push_temp_slots (); | |
33a20d10 | 3727 | size = expr_size (from); |
37a08a29 | 3728 | from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); |
bbf6f052 | 3729 | |
8f99553f JM |
3730 | emit_library_call (memmove_libfunc, LCT_NORMAL, |
3731 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, | |
3732 | XEXP (from_rtx, 0), Pmode, | |
3733 | convert_to_mode (TYPE_MODE (sizetype), | |
3734 | size, TYPE_UNSIGNED (sizetype)), | |
3735 | TYPE_MODE (sizetype)); | |
bbf6f052 RK |
3736 | |
3737 | preserve_temp_slots (to_rtx); | |
3738 | free_temp_slots (); | |
0088fcb1 | 3739 | pop_temp_slots (); |
709f5be1 | 3740 | return want_value ? to_rtx : NULL_RTX; |
bbf6f052 RK |
3741 | } |
3742 | ||
3743 | /* Compute FROM and store the value in the rtx we got. */ | |
3744 | ||
0088fcb1 | 3745 | push_temp_slots (); |
bbf6f052 RK |
3746 | result = store_expr (from, to_rtx, want_value); |
3747 | preserve_temp_slots (result); | |
3748 | free_temp_slots (); | |
0088fcb1 | 3749 | pop_temp_slots (); |
709f5be1 | 3750 | return want_value ? result : NULL_RTX; |
bbf6f052 RK |
3751 | } |
3752 | ||
3753 | /* Generate code for computing expression EXP, | |
3754 | and storing the value into TARGET. | |
bbf6f052 | 3755 | |
8403445a | 3756 | If WANT_VALUE & 1 is nonzero, return a copy of the value |
709f5be1 RS |
3757 | not in TARGET, so that we can be sure to use the proper |
3758 | value in a containing expression even if TARGET has something | |
3759 | else stored in it. If possible, we copy the value through a pseudo | |
3760 | and return that pseudo. Or, if the value is constant, we try to | |
3761 | return the constant. In some cases, we return a pseudo | |
3762 | copied *from* TARGET. | |
3763 | ||
3764 | If the mode is BLKmode then we may return TARGET itself. | |
3765 | It turns out that in BLKmode it doesn't cause a problem. | |
3766 | because C has no operators that could combine two different | |
3767 | assignments into the same BLKmode object with different values | |
3768 | with no sequence point. Will other languages need this to | |
3769 | be more thorough? | |
3770 | ||
8403445a | 3771 | If WANT_VALUE & 1 is 0, we return NULL, to make sure |
709f5be1 | 3772 | to catch quickly any cases where the caller uses the value |
8403445a AM |
3773 | and fails to set WANT_VALUE. |
3774 | ||
3775 | If WANT_VALUE & 2 is set, this is a store into a call param on the | |
3776 | stack, and block moves may need to be treated specially. */ | |
bbf6f052 RK |
3777 | |
3778 | rtx | |
502b8322 | 3779 | store_expr (tree exp, rtx target, int want_value) |
bbf6f052 | 3780 | { |
b3694847 | 3781 | rtx temp; |
0fab64a3 | 3782 | rtx alt_rtl = NULL_RTX; |
bbf6f052 | 3783 | int dont_return_target = 0; |
e5408e52 | 3784 | int dont_store_target = 0; |
bbf6f052 | 3785 | |
847311f4 AL |
3786 | if (VOID_TYPE_P (TREE_TYPE (exp))) |
3787 | { | |
3788 | /* C++ can generate ?: expressions with a throw expression in one | |
3789 | branch and an rvalue in the other. Here, we resolve attempts to | |
4d6922ee | 3790 | store the throw expression's nonexistent result. */ |
5b0264cb | 3791 | gcc_assert (!want_value); |
847311f4 AL |
3792 | expand_expr (exp, const0_rtx, VOIDmode, 0); |
3793 | return NULL_RTX; | |
3794 | } | |
bbf6f052 RK |
3795 | if (TREE_CODE (exp) == COMPOUND_EXPR) |
3796 | { | |
3797 | /* Perform first part of compound expression, then assign from second | |
3798 | part. */ | |
8403445a AM |
3799 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
3800 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
709f5be1 | 3801 | return store_expr (TREE_OPERAND (exp, 1), target, want_value); |
bbf6f052 RK |
3802 | } |
3803 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
3804 | { | |
3805 | /* For conditional expression, get safe form of the target. Then | |
3806 | test the condition, doing the appropriate assignment on either | |
3807 | side. This avoids the creation of unnecessary temporaries. | |
3808 | For non-BLKmode, it is more efficient not to do this. */ | |
3809 | ||
3810 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
3811 | ||
dabf8373 | 3812 | do_pending_stack_adjust (); |
bbf6f052 RK |
3813 | NO_DEFER_POP; |
3814 | jumpifnot (TREE_OPERAND (exp, 0), lab1); | |
8403445a | 3815 | store_expr (TREE_OPERAND (exp, 1), target, want_value & 2); |
bbf6f052 RK |
3816 | emit_jump_insn (gen_jump (lab2)); |
3817 | emit_barrier (); | |
3818 | emit_label (lab1); | |
8403445a | 3819 | store_expr (TREE_OPERAND (exp, 2), target, want_value & 2); |
bbf6f052 RK |
3820 | emit_label (lab2); |
3821 | OK_DEFER_POP; | |
a3a58acc | 3822 | |
8403445a | 3823 | return want_value & 1 ? target : NULL_RTX; |
bbf6f052 | 3824 | } |
8403445a | 3825 | else if ((want_value & 1) != 0 |
3c0cb5de | 3826 | && MEM_P (target) |
8403445a | 3827 | && ! MEM_VOLATILE_P (target) |
12f06d17 CH |
3828 | && GET_MODE (target) != BLKmode) |
3829 | /* If target is in memory and caller wants value in a register instead, | |
3830 | arrange that. Pass TARGET as target for expand_expr so that, | |
3831 | if EXP is another assignment, WANT_VALUE will be nonzero for it. | |
3832 | We know expand_expr will not use the target in that case. | |
3833 | Don't do this if TARGET is volatile because we are supposed | |
3834 | to write it and then read it. */ | |
3835 | { | |
8403445a AM |
3836 | temp = expand_expr (exp, target, GET_MODE (target), |
3837 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
12f06d17 | 3838 | if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) |
e5408e52 JJ |
3839 | { |
3840 | /* If TEMP is already in the desired TARGET, only copy it from | |
3841 | memory and don't store it there again. */ | |
3842 | if (temp == target | |
3843 | || (rtx_equal_p (temp, target) | |
3844 | && ! side_effects_p (temp) && ! side_effects_p (target))) | |
3845 | dont_store_target = 1; | |
3846 | temp = copy_to_reg (temp); | |
3847 | } | |
12f06d17 CH |
3848 | dont_return_target = 1; |
3849 | } | |
1499e0a8 | 3850 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
09da1532 | 3851 | /* If this is a scalar in a register that is stored in a wider mode |
1499e0a8 RK |
3852 | than the declared mode, compute the result into its declared mode |
3853 | and then convert to the wider mode. Our value is the computed | |
3854 | expression. */ | |
3855 | { | |
b76b08ef RK |
3856 | rtx inner_target = 0; |
3857 | ||
5a32d038 | 3858 | /* If we don't want a value, we can do the conversion inside EXP, |
f635a84d RK |
3859 | which will often result in some optimizations. Do the conversion |
3860 | in two steps: first change the signedness, if needed, then | |
ab6c58f1 RK |
3861 | the extend. But don't do this if the type of EXP is a subtype |
3862 | of something else since then the conversion might involve | |
3863 | more than just converting modes. */ | |
8403445a AM |
3864 | if ((want_value & 1) == 0 |
3865 | && INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
7e7d1b4b RH |
3866 | && TREE_TYPE (TREE_TYPE (exp)) == 0 |
3867 | && (!lang_hooks.reduce_bit_field_operations | |
3868 | || (GET_MODE_PRECISION (GET_MODE (target)) | |
3869 | == TYPE_PRECISION (TREE_TYPE (exp))))) | |
f635a84d | 3870 | { |
8df83eae | 3871 | if (TYPE_UNSIGNED (TREE_TYPE (exp)) |
f635a84d | 3872 | != SUBREG_PROMOTED_UNSIGNED_P (target)) |
ceef8ce4 | 3873 | exp = convert |
ae2bcd98 | 3874 | (lang_hooks.types.signed_or_unsigned_type |
ceef8ce4 | 3875 | (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp); |
f635a84d | 3876 | |
ae2bcd98 | 3877 | exp = convert (lang_hooks.types.type_for_mode |
b0c48229 NB |
3878 | (GET_MODE (SUBREG_REG (target)), |
3879 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
f635a84d | 3880 | exp); |
b76b08ef RK |
3881 | |
3882 | inner_target = SUBREG_REG (target); | |
f635a84d | 3883 | } |
3a94c984 | 3884 | |
8403445a AM |
3885 | temp = expand_expr (exp, inner_target, VOIDmode, |
3886 | want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL); | |
b258707c | 3887 | |
7abec5be | 3888 | /* If TEMP is a MEM and we want a result value, make the access |
502b8322 AJ |
3889 | now so it gets done only once. Strictly speaking, this is |
3890 | only necessary if the MEM is volatile, or if the address | |
7abec5be RH |
3891 | overlaps TARGET. But not performing the load twice also |
3892 | reduces the amount of rtl we generate and then have to CSE. */ | |
3c0cb5de | 3893 | if (MEM_P (temp) && (want_value & 1) != 0) |
766f36c7 RK |
3894 | temp = copy_to_reg (temp); |
3895 | ||
b258707c RS |
3896 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
3897 | sure that we properly convert it. */ | |
3898 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1f1b0541 RH |
3899 | { |
3900 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
3901 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3902 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
3903 | GET_MODE (target), temp, | |
3904 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3905 | } | |
b258707c | 3906 | |
1499e0a8 RK |
3907 | convert_move (SUBREG_REG (target), temp, |
3908 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
3909 | |
3910 | /* If we promoted a constant, change the mode back down to match | |
3911 | target. Otherwise, the caller might get confused by a result whose | |
3912 | mode is larger than expected. */ | |
3913 | ||
8403445a | 3914 | if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target)) |
3dbecef9 | 3915 | { |
b3ca30df JJ |
3916 | if (GET_MODE (temp) != VOIDmode) |
3917 | { | |
3918 | temp = gen_lowpart_SUBREG (GET_MODE (target), temp); | |
3919 | SUBREG_PROMOTED_VAR_P (temp) = 1; | |
0fb7aeda | 3920 | SUBREG_PROMOTED_UNSIGNED_SET (temp, |
7879b81e | 3921 | SUBREG_PROMOTED_UNSIGNED_P (target)); |
b3ca30df JJ |
3922 | } |
3923 | else | |
3924 | temp = convert_modes (GET_MODE (target), | |
3925 | GET_MODE (SUBREG_REG (target)), | |
3926 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
3927 | } |
3928 | ||
8403445a | 3929 | return want_value & 1 ? temp : NULL_RTX; |
1499e0a8 | 3930 | } |
bbf6f052 RK |
3931 | else |
3932 | { | |
0fab64a3 | 3933 | temp = expand_expr_real (exp, target, GET_MODE (target), |
caf93cb0 | 3934 | (want_value & 2 |
0fab64a3 MM |
3935 | ? EXPAND_STACK_PARM : EXPAND_NORMAL), |
3936 | &alt_rtl); | |
766f36c7 | 3937 | /* Return TARGET if it's a specified hardware register. |
709f5be1 RS |
3938 | If TARGET is a volatile mem ref, either return TARGET |
3939 | or return a reg copied *from* TARGET; ANSI requires this. | |
3940 | ||
3941 | Otherwise, if TEMP is not TARGET, return TEMP | |
3942 | if it is constant (for efficiency), | |
3943 | or if we really want the correct value. */ | |
f8cfc6aa | 3944 | if (!(target && REG_P (target) |
bbf6f052 | 3945 | && REGNO (target) < FIRST_PSEUDO_REGISTER) |
3c0cb5de | 3946 | && !(MEM_P (target) && MEM_VOLATILE_P (target)) |
effbcc6a | 3947 | && ! rtx_equal_p (temp, target) |
8403445a | 3948 | && (CONSTANT_P (temp) || (want_value & 1) != 0)) |
bbf6f052 RK |
3949 | dont_return_target = 1; |
3950 | } | |
3951 | ||
b258707c RS |
3952 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
3953 | the same as that of TARGET, adjust the constant. This is needed, for | |
3954 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
3955 | value. */ | |
3956 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
c1da1f33 | 3957 | && TREE_CODE (exp) != ERROR_MARK |
b258707c RS |
3958 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
3959 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
8df83eae | 3960 | temp, TYPE_UNSIGNED (TREE_TYPE (exp))); |
b258707c | 3961 | |
bbf6f052 | 3962 | /* If value was not generated in the target, store it there. |
1bbd65cd EB |
3963 | Convert the value to TARGET's type first if necessary and emit the |
3964 | pending incrementations that have been queued when expanding EXP. | |
3965 | Note that we cannot emit the whole queue blindly because this will | |
3966 | effectively disable the POST_INC optimization later. | |
3967 | ||
37a08a29 | 3968 | If TEMP and TARGET compare equal according to rtx_equal_p, but |
f3f2255a R |
3969 | one or both of them are volatile memory refs, we have to distinguish |
3970 | two cases: | |
3971 | - expand_expr has used TARGET. In this case, we must not generate | |
3972 | another copy. This can be detected by TARGET being equal according | |
3973 | to == . | |
3974 | - expand_expr has not used TARGET - that means that the source just | |
3975 | happens to have the same RTX form. Since temp will have been created | |
3976 | by expand_expr, it will compare unequal according to == . | |
3977 | We must generate a copy in this case, to reach the correct number | |
3978 | of volatile memory references. */ | |
bbf6f052 | 3979 | |
6036acbb | 3980 | if ((! rtx_equal_p (temp, target) |
f3f2255a R |
3981 | || (temp != target && (side_effects_p (temp) |
3982 | || side_effects_p (target)))) | |
e5408e52 | 3983 | && TREE_CODE (exp) != ERROR_MARK |
a9772b60 | 3984 | && ! dont_store_target |
9c5c5f2c MM |
3985 | /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, |
3986 | but TARGET is not valid memory reference, TEMP will differ | |
3987 | from TARGET although it is really the same location. */ | |
0fab64a3 | 3988 | && !(alt_rtl && rtx_equal_p (alt_rtl, target)) |
e56fc090 HPN |
3989 | /* If there's nothing to copy, don't bother. Don't call expr_size |
3990 | unless necessary, because some front-ends (C++) expr_size-hook | |
3991 | aborts on objects that are not supposed to be bit-copied or | |
3992 | bit-initialized. */ | |
3993 | && expr_size (exp) != const0_rtx) | |
bbf6f052 | 3994 | { |
bbf6f052 | 3995 | if (GET_MODE (temp) != GET_MODE (target) |
f0348c25 | 3996 | && GET_MODE (temp) != VOIDmode) |
bbf6f052 | 3997 | { |
8df83eae | 3998 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
bbf6f052 RK |
3999 | if (dont_return_target) |
4000 | { | |
4001 | /* In this case, we will return TEMP, | |
4002 | so make sure it has the proper mode. | |
4003 | But don't forget to store the value into TARGET. */ | |
4004 | temp = convert_to_mode (GET_MODE (target), temp, unsignedp); | |
4005 | emit_move_insn (target, temp); | |
4006 | } | |
4007 | else | |
4008 | convert_move (target, temp, unsignedp); | |
4009 | } | |
4010 | ||
4011 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4012 | { | |
c24ae149 RK |
4013 | /* Handle copying a string constant into an array. The string |
4014 | constant may be shorter than the array. So copy just the string's | |
4015 | actual length, and clear the rest. First get the size of the data | |
4016 | type of the string, which is actually the size of the target. */ | |
4017 | rtx size = expr_size (exp); | |
bbf6f052 | 4018 | |
e87b4f3f RS |
4019 | if (GET_CODE (size) == CONST_INT |
4020 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) | |
8403445a AM |
4021 | emit_block_move (target, temp, size, |
4022 | (want_value & 2 | |
4023 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
e87b4f3f | 4024 | else |
bbf6f052 | 4025 | { |
e87b4f3f RS |
4026 | /* Compute the size of the data to copy from the string. */ |
4027 | tree copy_size | |
c03b7665 | 4028 | = size_binop (MIN_EXPR, |
b50d17a1 | 4029 | make_tree (sizetype, size), |
fed3cef0 | 4030 | size_int (TREE_STRING_LENGTH (exp))); |
8403445a AM |
4031 | rtx copy_size_rtx |
4032 | = expand_expr (copy_size, NULL_RTX, VOIDmode, | |
4033 | (want_value & 2 | |
4034 | ? EXPAND_STACK_PARM : EXPAND_NORMAL)); | |
e87b4f3f RS |
4035 | rtx label = 0; |
4036 | ||
4037 | /* Copy that much. */ | |
267b28bd | 4038 | copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, |
8df83eae | 4039 | TYPE_UNSIGNED (sizetype)); |
8403445a AM |
4040 | emit_block_move (target, temp, copy_size_rtx, |
4041 | (want_value & 2 | |
4042 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
e87b4f3f | 4043 | |
88f63c77 RK |
4044 | /* Figure out how much is left in TARGET that we have to clear. |
4045 | Do all calculations in ptr_mode. */ | |
e87b4f3f RS |
4046 | if (GET_CODE (copy_size_rtx) == CONST_INT) |
4047 | { | |
c24ae149 RK |
4048 | size = plus_constant (size, -INTVAL (copy_size_rtx)); |
4049 | target = adjust_address (target, BLKmode, | |
4050 | INTVAL (copy_size_rtx)); | |
e87b4f3f RS |
4051 | } |
4052 | else | |
4053 | { | |
fa06ab5c | 4054 | size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, |
906c4e36 RK |
4055 | copy_size_rtx, NULL_RTX, 0, |
4056 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4057 | |
c24ae149 RK |
4058 | #ifdef POINTERS_EXTEND_UNSIGNED |
4059 | if (GET_MODE (copy_size_rtx) != Pmode) | |
267b28bd | 4060 | copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx, |
8df83eae | 4061 | TYPE_UNSIGNED (sizetype)); |
c24ae149 RK |
4062 | #endif |
4063 | ||
4064 | target = offset_address (target, copy_size_rtx, | |
4065 | highest_pow2_factor (copy_size)); | |
e87b4f3f | 4066 | label = gen_label_rtx (); |
c5d5d461 | 4067 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
a06ef755 | 4068 | GET_MODE (size), 0, label); |
e87b4f3f RS |
4069 | } |
4070 | ||
4071 | if (size != const0_rtx) | |
37a08a29 | 4072 | clear_storage (target, size); |
22619c3f | 4073 | |
e87b4f3f RS |
4074 | if (label) |
4075 | emit_label (label); | |
bbf6f052 RK |
4076 | } |
4077 | } | |
fffa9c1d JW |
4078 | /* Handle calls that return values in multiple non-contiguous locations. |
4079 | The Irix 6 ABI has examples of this. */ | |
4080 | else if (GET_CODE (target) == PARALLEL) | |
6e985040 AM |
4081 | emit_group_load (target, temp, TREE_TYPE (exp), |
4082 | int_size_in_bytes (TREE_TYPE (exp))); | |
bbf6f052 | 4083 | else if (GET_MODE (temp) == BLKmode) |
8403445a AM |
4084 | emit_block_move (target, temp, expr_size (exp), |
4085 | (want_value & 2 | |
4086 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
bbf6f052 | 4087 | else |
b0dccb00 RH |
4088 | { |
4089 | temp = force_operand (temp, target); | |
4090 | if (temp != target) | |
4091 | emit_move_insn (target, temp); | |
4092 | } | |
bbf6f052 | 4093 | } |
709f5be1 | 4094 | |
766f36c7 | 4095 | /* If we don't want a value, return NULL_RTX. */ |
8403445a | 4096 | if ((want_value & 1) == 0) |
766f36c7 RK |
4097 | return NULL_RTX; |
4098 | ||
4099 | /* If we are supposed to return TEMP, do so as long as it isn't a MEM. | |
4100 | ??? The latter test doesn't seem to make sense. */ | |
3c0cb5de | 4101 | else if (dont_return_target && !MEM_P (temp)) |
bbf6f052 | 4102 | return temp; |
766f36c7 RK |
4103 | |
4104 | /* Return TARGET itself if it is a hard register. */ | |
8403445a AM |
4105 | else if ((want_value & 1) != 0 |
4106 | && GET_MODE (target) != BLKmode | |
f8cfc6aa | 4107 | && ! (REG_P (target) |
766f36c7 | 4108 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
709f5be1 | 4109 | return copy_to_reg (target); |
3a94c984 | 4110 | |
766f36c7 | 4111 | else |
709f5be1 | 4112 | return target; |
bbf6f052 RK |
4113 | } |
4114 | \f | |
1ea7e6ad | 4115 | /* Examine CTOR. Discover how many scalar fields are set to nonzero |
6de9cd9a DN |
4116 | values and place it in *P_NZ_ELTS. Discover how many scalar fields |
4117 | are set to non-constant values and place it in *P_NC_ELTS. */ | |
9de08200 | 4118 | |
6de9cd9a DN |
4119 | static void |
4120 | categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, | |
4121 | HOST_WIDE_INT *p_nc_elts) | |
9de08200 | 4122 | { |
6de9cd9a DN |
4123 | HOST_WIDE_INT nz_elts, nc_elts; |
4124 | tree list; | |
9de08200 | 4125 | |
6de9cd9a DN |
4126 | nz_elts = 0; |
4127 | nc_elts = 0; | |
caf93cb0 | 4128 | |
6de9cd9a | 4129 | for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list)) |
9de08200 | 4130 | { |
6de9cd9a DN |
4131 | tree value = TREE_VALUE (list); |
4132 | tree purpose = TREE_PURPOSE (list); | |
4133 | HOST_WIDE_INT mult; | |
9de08200 | 4134 | |
6de9cd9a DN |
4135 | mult = 1; |
4136 | if (TREE_CODE (purpose) == RANGE_EXPR) | |
4137 | { | |
4138 | tree lo_index = TREE_OPERAND (purpose, 0); | |
4139 | tree hi_index = TREE_OPERAND (purpose, 1); | |
9de08200 | 4140 | |
6de9cd9a DN |
4141 | if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1)) |
4142 | mult = (tree_low_cst (hi_index, 1) | |
4143 | - tree_low_cst (lo_index, 1) + 1); | |
4144 | } | |
9de08200 | 4145 | |
6de9cd9a DN |
4146 | switch (TREE_CODE (value)) |
4147 | { | |
4148 | case CONSTRUCTOR: | |
4149 | { | |
4150 | HOST_WIDE_INT nz = 0, nc = 0; | |
4151 | categorize_ctor_elements_1 (value, &nz, &nc); | |
4152 | nz_elts += mult * nz; | |
4153 | nc_elts += mult * nc; | |
4154 | } | |
4155 | break; | |
9de08200 | 4156 | |
6de9cd9a DN |
4157 | case INTEGER_CST: |
4158 | case REAL_CST: | |
4159 | if (!initializer_zerop (value)) | |
4160 | nz_elts += mult; | |
4161 | break; | |
4162 | case COMPLEX_CST: | |
4163 | if (!initializer_zerop (TREE_REALPART (value))) | |
4164 | nz_elts += mult; | |
4165 | if (!initializer_zerop (TREE_IMAGPART (value))) | |
4166 | nz_elts += mult; | |
4167 | break; | |
4168 | case VECTOR_CST: | |
4169 | { | |
4170 | tree v; | |
4171 | for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v)) | |
4172 | if (!initializer_zerop (TREE_VALUE (v))) | |
4173 | nz_elts += mult; | |
4174 | } | |
4175 | break; | |
69ef87e2 | 4176 | |
6de9cd9a DN |
4177 | default: |
4178 | nz_elts += mult; | |
4179 | if (!initializer_constant_valid_p (value, TREE_TYPE (value))) | |
4180 | nc_elts += mult; | |
4181 | break; | |
4182 | } | |
4183 | } | |
69ef87e2 | 4184 | |
6de9cd9a DN |
4185 | *p_nz_elts += nz_elts; |
4186 | *p_nc_elts += nc_elts; | |
4187 | } | |
4188 | ||
4189 | void | |
4190 | categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts, | |
4191 | HOST_WIDE_INT *p_nc_elts) | |
4192 | { | |
4193 | *p_nz_elts = 0; | |
4194 | *p_nc_elts = 0; | |
4195 | categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts); | |
4196 | } | |
4197 | ||
4198 | /* Count the number of scalars in TYPE. Return -1 on overflow or | |
4199 | variable-sized. */ | |
4200 | ||
4201 | HOST_WIDE_INT | |
4202 | count_type_elements (tree type) | |
4203 | { | |
4204 | const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); | |
4205 | switch (TREE_CODE (type)) | |
4206 | { | |
4207 | case ARRAY_TYPE: | |
4208 | { | |
4209 | tree telts = array_type_nelts (type); | |
4210 | if (telts && host_integerp (telts, 1)) | |
4211 | { | |
5377d5ba | 4212 | HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1; |
6de9cd9a DN |
4213 | HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type)); |
4214 | if (n == 0) | |
4215 | return 0; | |
5377d5ba | 4216 | else if (max / n > m) |
6de9cd9a DN |
4217 | return n * m; |
4218 | } | |
4219 | return -1; | |
4220 | } | |
4221 | ||
4222 | case RECORD_TYPE: | |
4223 | { | |
4224 | HOST_WIDE_INT n = 0, t; | |
4225 | tree f; | |
4226 | ||
4227 | for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) | |
4228 | if (TREE_CODE (f) == FIELD_DECL) | |
4229 | { | |
4230 | t = count_type_elements (TREE_TYPE (f)); | |
4231 | if (t < 0) | |
4232 | return -1; | |
4233 | n += t; | |
4234 | } | |
4235 | ||
4236 | return n; | |
4237 | } | |
9de08200 | 4238 | |
6de9cd9a DN |
4239 | case UNION_TYPE: |
4240 | case QUAL_UNION_TYPE: | |
4241 | { | |
4242 | /* Ho hum. How in the world do we guess here? Clearly it isn't | |
4243 | right to count the fields. Guess based on the number of words. */ | |
4244 | HOST_WIDE_INT n = int_size_in_bytes (type); | |
4245 | if (n < 0) | |
4246 | return -1; | |
4247 | return n / UNITS_PER_WORD; | |
4248 | } | |
4249 | ||
4250 | case COMPLEX_TYPE: | |
4251 | return 2; | |
4252 | ||
4253 | case VECTOR_TYPE: | |
3a021db2 | 4254 | return TYPE_VECTOR_SUBPARTS (type); |
6de9cd9a DN |
4255 | |
4256 | case INTEGER_TYPE: | |
4257 | case REAL_TYPE: | |
4258 | case ENUMERAL_TYPE: | |
4259 | case BOOLEAN_TYPE: | |
4260 | case CHAR_TYPE: | |
4261 | case POINTER_TYPE: | |
4262 | case OFFSET_TYPE: | |
4263 | case REFERENCE_TYPE: | |
9de08200 | 4264 | return 1; |
3a94c984 | 4265 | |
6de9cd9a DN |
4266 | case VOID_TYPE: |
4267 | case METHOD_TYPE: | |
4268 | case FILE_TYPE: | |
4269 | case SET_TYPE: | |
4270 | case FUNCTION_TYPE: | |
4271 | case LANG_TYPE: | |
e9a25f70 | 4272 | default: |
5b0264cb | 4273 | gcc_unreachable (); |
9de08200 | 4274 | } |
9de08200 RK |
4275 | } |
4276 | ||
4277 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
4278 | ||
40209195 | 4279 | int |
502b8322 | 4280 | mostly_zeros_p (tree exp) |
9de08200 | 4281 | { |
9de08200 | 4282 | if (TREE_CODE (exp) == CONSTRUCTOR) |
caf93cb0 | 4283 | |
9de08200 | 4284 | { |
6de9cd9a DN |
4285 | HOST_WIDE_INT nz_elts, nc_elts, elts; |
4286 | ||
4287 | /* If there are no ranges of true bits, it is all zero. */ | |
e1a43f73 | 4288 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) |
6de9cd9a DN |
4289 | return CONSTRUCTOR_ELTS (exp) == NULL_TREE; |
4290 | ||
4291 | categorize_ctor_elements (exp, &nz_elts, &nc_elts); | |
4292 | elts = count_type_elements (TREE_TYPE (exp)); | |
9de08200 | 4293 | |
6de9cd9a | 4294 | return nz_elts < elts / 4; |
9de08200 RK |
4295 | } |
4296 | ||
6de9cd9a | 4297 | return initializer_zerop (exp); |
9de08200 RK |
4298 | } |
4299 | \f | |
e1a43f73 PB |
4300 | /* Helper function for store_constructor. |
4301 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
4302 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
04050c69 | 4303 | CLEARED is as for store_constructor. |
23cb1766 | 4304 | ALIAS_SET is the alias set to use for any stores. |
23ccec44 JW |
4305 | |
4306 | This provides a recursive shortcut back to store_constructor when it isn't | |
4307 | necessary to go through store_field. This is so that we can pass through | |
4308 | the cleared field to let store_constructor know that we may not have to | |
4309 | clear a substructure if the outer structure has already been cleared. */ | |
e1a43f73 PB |
4310 | |
4311 | static void | |
502b8322 AJ |
4312 | store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, |
4313 | HOST_WIDE_INT bitpos, enum machine_mode mode, | |
4314 | tree exp, tree type, int cleared, int alias_set) | |
e1a43f73 PB |
4315 | { |
4316 | if (TREE_CODE (exp) == CONSTRUCTOR | |
6c89c39a RK |
4317 | /* We can only call store_constructor recursively if the size and |
4318 | bit position are on a byte boundary. */ | |
23ccec44 | 4319 | && bitpos % BITS_PER_UNIT == 0 |
6c89c39a | 4320 | && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) |
cc2902df | 4321 | /* If we have a nonzero bitpos for a register target, then we just |
23ccec44 JW |
4322 | let store_field do the bitfield handling. This is unlikely to |
4323 | generate unnecessary clear instructions anyways. */ | |
3c0cb5de | 4324 | && (bitpos == 0 || MEM_P (target))) |
e1a43f73 | 4325 | { |
3c0cb5de | 4326 | if (MEM_P (target)) |
61cb205c RK |
4327 | target |
4328 | = adjust_address (target, | |
4329 | GET_MODE (target) == BLKmode | |
4330 | || 0 != (bitpos | |
4331 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
4332 | ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | |
23cb1766 | 4333 | |
e0339ef7 | 4334 | |
04050c69 | 4335 | /* Update the alias set, if required. */ |
3c0cb5de | 4336 | if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) |
10b76d73 | 4337 | && MEM_ALIAS_SET (target) != 0) |
70072ed9 RK |
4338 | { |
4339 | target = copy_rtx (target); | |
4340 | set_mem_alias_set (target, alias_set); | |
4341 | } | |
e0339ef7 | 4342 | |
dbb5c281 | 4343 | store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
4344 | } |
4345 | else | |
a06ef755 RK |
4346 | store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
4347 | alias_set); | |
e1a43f73 PB |
4348 | } |
4349 | ||
bbf6f052 | 4350 | /* Store the value of constructor EXP into the rtx TARGET. |
04050c69 RK |
4351 | TARGET is either a REG or a MEM; we know it cannot conflict, since |
4352 | safe_from_p has been called. | |
dbb5c281 RK |
4353 | CLEARED is true if TARGET is known to have been zero'd. |
4354 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
b7010412 RK |
4355 | may not be the same as the size of EXP if we are assigning to a field |
4356 | which has been packed to exclude padding bits. */ | |
bbf6f052 RK |
4357 | |
4358 | static void | |
502b8322 | 4359 | store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
bbf6f052 | 4360 | { |
4af3895e | 4361 | tree type = TREE_TYPE (exp); |
a5efcd63 | 4362 | #ifdef WORD_REGISTER_OPERATIONS |
13eb1f7f | 4363 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
a5efcd63 | 4364 | #endif |
4af3895e | 4365 | |
5b0264cb | 4366 | switch (TREE_CODE (type)) |
bbf6f052 | 4367 | { |
5b0264cb NS |
4368 | case RECORD_TYPE: |
4369 | case UNION_TYPE: | |
4370 | case QUAL_UNION_TYPE: | |
4371 | { | |
4372 | tree elt; | |
9de08200 | 4373 | |
5b0264cb NS |
4374 | /* If size is zero or the target is already cleared, do nothing. */ |
4375 | if (size == 0 || cleared) | |
9de08200 | 4376 | cleared = 1; |
5b0264cb NS |
4377 | /* We either clear the aggregate or indicate the value is dead. */ |
4378 | else if ((TREE_CODE (type) == UNION_TYPE | |
4379 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
4380 | && ! CONSTRUCTOR_ELTS (exp)) | |
4381 | /* If the constructor is empty, clear the union. */ | |
4382 | { | |
4383 | clear_storage (target, expr_size (exp)); | |
4384 | cleared = 1; | |
4385 | } | |
bbf6f052 | 4386 | |
5b0264cb NS |
4387 | /* If we are building a static constructor into a register, |
4388 | set the initial value as zero so we can fold the value into | |
4389 | a constant. But if more than one register is involved, | |
4390 | this probably loses. */ | |
4391 | else if (REG_P (target) && TREE_STATIC (exp) | |
4392 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) | |
4393 | { | |
4394 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
4395 | cleared = 1; | |
4396 | } | |
3a94c984 | 4397 | |
5b0264cb NS |
4398 | /* If the constructor has fewer fields than the structure or |
4399 | if we are initializing the structure to mostly zeros, clear | |
4400 | the whole structure first. Don't do this if TARGET is a | |
4401 | register whose mode size isn't equal to SIZE since | |
4402 | clear_storage can't handle this case. */ | |
4403 | else if (size > 0 | |
4404 | && ((list_length (CONSTRUCTOR_ELTS (exp)) | |
4405 | != fields_length (type)) | |
4406 | || mostly_zeros_p (exp)) | |
4407 | && (!REG_P (target) | |
4408 | || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) | |
4409 | == size))) | |
4410 | { | |
4411 | clear_storage (target, GEN_INT (size)); | |
4412 | cleared = 1; | |
4413 | } | |
b50d17a1 | 4414 | |
5b0264cb NS |
4415 | if (! cleared) |
4416 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
bbf6f052 | 4417 | |
5b0264cb NS |
4418 | /* Store each element of the constructor into the |
4419 | corresponding field of TARGET. */ | |
b50d17a1 | 4420 | |
5b0264cb NS |
4421 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
4422 | { | |
4423 | tree field = TREE_PURPOSE (elt); | |
4424 | tree value = TREE_VALUE (elt); | |
4425 | enum machine_mode mode; | |
4426 | HOST_WIDE_INT bitsize; | |
4427 | HOST_WIDE_INT bitpos = 0; | |
4428 | tree offset; | |
4429 | rtx to_rtx = target; | |
4430 | ||
4431 | /* Just ignore missing fields. We cleared the whole | |
4432 | structure, above, if any fields are missing. */ | |
4433 | if (field == 0) | |
4434 | continue; | |
4435 | ||
4436 | if (cleared && initializer_zerop (value)) | |
4437 | continue; | |
4438 | ||
4439 | if (host_integerp (DECL_SIZE (field), 1)) | |
4440 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
4441 | else | |
4442 | bitsize = -1; | |
4443 | ||
4444 | mode = DECL_MODE (field); | |
4445 | if (DECL_BIT_FIELD (field)) | |
4446 | mode = VOIDmode; | |
4447 | ||
4448 | offset = DECL_FIELD_OFFSET (field); | |
4449 | if (host_integerp (offset, 0) | |
4450 | && host_integerp (bit_position (field), 0)) | |
4451 | { | |
4452 | bitpos = int_bit_position (field); | |
4453 | offset = 0; | |
4454 | } | |
4455 | else | |
4456 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); | |
4457 | ||
4458 | if (offset) | |
4459 | { | |
4460 | rtx offset_rtx; | |
4461 | ||
4462 | offset | |
4463 | = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, | |
4464 | make_tree (TREE_TYPE (exp), | |
4465 | target)); | |
4466 | ||
4467 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); | |
4468 | gcc_assert (MEM_P (to_rtx)); | |
4469 | ||
bd070e1a | 4470 | #ifdef POINTERS_EXTEND_UNSIGNED |
5b0264cb NS |
4471 | if (GET_MODE (offset_rtx) != Pmode) |
4472 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); | |
fa06ab5c | 4473 | #else |
5b0264cb NS |
4474 | if (GET_MODE (offset_rtx) != ptr_mode) |
4475 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
bd070e1a | 4476 | #endif |
bd070e1a | 4477 | |
5b0264cb NS |
4478 | to_rtx = offset_address (to_rtx, offset_rtx, |
4479 | highest_pow2_factor (offset)); | |
4480 | } | |
c5c76735 | 4481 | |
34c73909 | 4482 | #ifdef WORD_REGISTER_OPERATIONS |
5b0264cb NS |
4483 | /* If this initializes a field that is smaller than a |
4484 | word, at the start of a word, try to widen it to a full | |
4485 | word. This special case allows us to output C++ member | |
4486 | function initializations in a form that the optimizers | |
4487 | can understand. */ | |
4488 | if (REG_P (target) | |
4489 | && bitsize < BITS_PER_WORD | |
4490 | && bitpos % BITS_PER_WORD == 0 | |
4491 | && GET_MODE_CLASS (mode) == MODE_INT | |
4492 | && TREE_CODE (value) == INTEGER_CST | |
4493 | && exp_size >= 0 | |
4494 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
4495 | { | |
4496 | tree type = TREE_TYPE (value); | |
4497 | ||
4498 | if (TYPE_PRECISION (type) < BITS_PER_WORD) | |
4499 | { | |
4500 | type = lang_hooks.types.type_for_size | |
4501 | (BITS_PER_WORD, TYPE_UNSIGNED (type)); | |
4502 | value = convert (type, value); | |
4503 | } | |
4504 | ||
4505 | if (BYTES_BIG_ENDIAN) | |
4506 | value | |
4507 | = fold (build2 (LSHIFT_EXPR, type, value, | |
4508 | build_int_cst (NULL_TREE, | |
4509 | BITS_PER_WORD - bitsize))); | |
4510 | bitsize = BITS_PER_WORD; | |
4511 | mode = word_mode; | |
4512 | } | |
34c73909 | 4513 | #endif |
10b76d73 | 4514 | |
5b0264cb NS |
4515 | if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) |
4516 | && DECL_NONADDRESSABLE_P (field)) | |
4517 | { | |
4518 | to_rtx = copy_rtx (to_rtx); | |
4519 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
4520 | } | |
4521 | ||
4522 | store_constructor_field (to_rtx, bitsize, bitpos, mode, | |
4523 | value, type, cleared, | |
4524 | get_alias_set (TREE_TYPE (field))); | |
4525 | } | |
4526 | break; | |
4527 | } | |
4528 | case ARRAY_TYPE: | |
4529 | { | |
4530 | tree elt; | |
4531 | int i; | |
4532 | int need_to_clear; | |
4533 | tree domain; | |
4534 | tree elttype = TREE_TYPE (type); | |
4535 | int const_bounds_p; | |
4536 | HOST_WIDE_INT minelt = 0; | |
4537 | HOST_WIDE_INT maxelt = 0; | |
4538 | ||
4539 | domain = TYPE_DOMAIN (type); | |
4540 | const_bounds_p = (TYPE_MIN_VALUE (domain) | |
4541 | && TYPE_MAX_VALUE (domain) | |
4542 | && host_integerp (TYPE_MIN_VALUE (domain), 0) | |
4543 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
4544 | ||
4545 | /* If we have constant bounds for the range of the type, get them. */ | |
4546 | if (const_bounds_p) | |
4547 | { | |
4548 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
4549 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
4550 | } | |
3a021db2 | 4551 | |
5b0264cb NS |
4552 | /* If the constructor has fewer elements than the array, clear |
4553 | the whole array first. Similarly if this is static | |
4554 | constructor of a non-BLKmode object. */ | |
4555 | if (cleared) | |
4556 | need_to_clear = 0; | |
4557 | else if (REG_P (target) && TREE_STATIC (exp)) | |
4558 | need_to_clear = 1; | |
4559 | else | |
4560 | { | |
4561 | HOST_WIDE_INT count = 0, zero_count = 0; | |
4562 | need_to_clear = ! const_bounds_p; | |
4563 | ||
4564 | /* This loop is a more accurate version of the loop in | |
4565 | mostly_zeros_p (it handles RANGE_EXPR in an index). It | |
4566 | is also needed to check for missing elements. */ | |
4567 | for (elt = CONSTRUCTOR_ELTS (exp); | |
4568 | elt != NULL_TREE && ! need_to_clear; | |
4569 | elt = TREE_CHAIN (elt)) | |
4570 | { | |
4571 | tree index = TREE_PURPOSE (elt); | |
4572 | HOST_WIDE_INT this_node_count; | |
4573 | ||
4574 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) | |
4575 | { | |
4576 | tree lo_index = TREE_OPERAND (index, 0); | |
4577 | tree hi_index = TREE_OPERAND (index, 1); | |
4578 | ||
4579 | if (! host_integerp (lo_index, 1) | |
4580 | || ! host_integerp (hi_index, 1)) | |
4581 | { | |
4582 | need_to_clear = 1; | |
4583 | break; | |
4584 | } | |
4585 | ||
4586 | this_node_count = (tree_low_cst (hi_index, 1) | |
4587 | - tree_low_cst (lo_index, 1) + 1); | |
4588 | } | |
4589 | else | |
4590 | this_node_count = 1; | |
4591 | ||
4592 | count += this_node_count; | |
4593 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4594 | zero_count += this_node_count; | |
4595 | } | |
4596 | ||
4597 | /* Clear the entire array first if there are any missing | |
4598 | elements, or if the incidence of zero elements is >= | |
4599 | 75%. */ | |
4600 | if (! need_to_clear | |
4601 | && (count < maxelt - minelt + 1 | |
4602 | || 4 * zero_count >= 3 * count)) | |
4603 | need_to_clear = 1; | |
4604 | } | |
4605 | ||
4606 | if (need_to_clear && size > 0) | |
4607 | { | |
4608 | if (REG_P (target)) | |
4609 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
4610 | else | |
4611 | clear_storage (target, GEN_INT (size)); | |
4612 | cleared = 1; | |
4613 | } | |
3a021db2 | 4614 | |
5b0264cb NS |
4615 | if (!cleared && REG_P (target)) |
4616 | /* Inform later passes that the old value is dead. */ | |
4617 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
3a021db2 | 4618 | |
5b0264cb NS |
4619 | /* Store each element of the constructor into the |
4620 | corresponding element of TARGET, determined by counting the | |
4621 | elements. */ | |
4622 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
4623 | elt; | |
4624 | elt = TREE_CHAIN (elt), i++) | |
4625 | { | |
4626 | enum machine_mode mode; | |
4627 | HOST_WIDE_INT bitsize; | |
4628 | HOST_WIDE_INT bitpos; | |
4629 | int unsignedp; | |
4630 | tree value = TREE_VALUE (elt); | |
4631 | tree index = TREE_PURPOSE (elt); | |
4632 | rtx xtarget = target; | |
4633 | ||
4634 | if (cleared && initializer_zerop (value)) | |
4635 | continue; | |
4636 | ||
4637 | unsignedp = TYPE_UNSIGNED (elttype); | |
4638 | mode = TYPE_MODE (elttype); | |
4639 | if (mode == BLKmode) | |
4640 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) | |
4641 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
4642 | : -1); | |
4643 | else | |
4644 | bitsize = GET_MODE_BITSIZE (mode); | |
4645 | ||
4646 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) | |
4647 | { | |
4648 | tree lo_index = TREE_OPERAND (index, 0); | |
4649 | tree hi_index = TREE_OPERAND (index, 1); | |
4650 | rtx index_r, pos_rtx; | |
4651 | HOST_WIDE_INT lo, hi, count; | |
4652 | tree position; | |
4653 | ||
4654 | /* If the range is constant and "small", unroll the loop. */ | |
4655 | if (const_bounds_p | |
4656 | && host_integerp (lo_index, 0) | |
4657 | && host_integerp (hi_index, 0) | |
4658 | && (lo = tree_low_cst (lo_index, 0), | |
4659 | hi = tree_low_cst (hi_index, 0), | |
4660 | count = hi - lo + 1, | |
4661 | (!MEM_P (target) | |
4662 | || count <= 2 | |
4663 | || (host_integerp (TYPE_SIZE (elttype), 1) | |
4664 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
4665 | <= 40 * 8))))) | |
4666 | { | |
4667 | lo -= minelt; hi -= minelt; | |
4668 | for (; lo <= hi; lo++) | |
4669 | { | |
4670 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); | |
4671 | ||
4672 | if (MEM_P (target) | |
4673 | && !MEM_KEEP_ALIAS_SET_P (target) | |
4674 | && TREE_CODE (type) == ARRAY_TYPE | |
4675 | && TYPE_NONALIASED_COMPONENT (type)) | |
4676 | { | |
4677 | target = copy_rtx (target); | |
4678 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
4679 | } | |
4680 | ||
4681 | store_constructor_field | |
4682 | (target, bitsize, bitpos, mode, value, type, cleared, | |
4683 | get_alias_set (elttype)); | |
4684 | } | |
4685 | } | |
4686 | else | |
4687 | { | |
4688 | rtx loop_start = gen_label_rtx (); | |
4689 | rtx loop_end = gen_label_rtx (); | |
4690 | tree exit_cond; | |
4691 | ||
4692 | expand_expr (hi_index, NULL_RTX, VOIDmode, 0); | |
4693 | unsignedp = TYPE_UNSIGNED (domain); | |
4694 | ||
4695 | index = build_decl (VAR_DECL, NULL_TREE, domain); | |
4696 | ||
4697 | index_r | |
4698 | = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), | |
4699 | &unsignedp, 0)); | |
4700 | SET_DECL_RTL (index, index_r); | |
4701 | store_expr (lo_index, index_r, 0); | |
4702 | ||
4703 | /* Build the head of the loop. */ | |
4704 | do_pending_stack_adjust (); | |
4705 | emit_label (loop_start); | |
4706 | ||
4707 | /* Assign value to element index. */ | |
4708 | position | |
4709 | = convert (ssizetype, | |
4710 | fold (build2 (MINUS_EXPR, TREE_TYPE (index), | |
4711 | index, TYPE_MIN_VALUE (domain)))); | |
4712 | position = size_binop (MULT_EXPR, position, | |
4713 | convert (ssizetype, | |
4714 | TYPE_SIZE_UNIT (elttype))); | |
4715 | ||
4716 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); | |
4717 | xtarget = offset_address (target, pos_rtx, | |
4718 | highest_pow2_factor (position)); | |
4719 | xtarget = adjust_address (xtarget, mode, 0); | |
4720 | if (TREE_CODE (value) == CONSTRUCTOR) | |
4721 | store_constructor (value, xtarget, cleared, | |
4722 | bitsize / BITS_PER_UNIT); | |
4723 | else | |
4724 | store_expr (value, xtarget, 0); | |
4725 | ||
4726 | /* Generate a conditional jump to exit the loop. */ | |
4727 | exit_cond = build2 (LT_EXPR, integer_type_node, | |
4728 | index, hi_index); | |
4729 | jumpif (exit_cond, loop_end); | |
4730 | ||
4731 | /* Update the loop counter, and jump to the head of | |
4732 | the loop. */ | |
4733 | expand_assignment (index, | |
4734 | build2 (PLUS_EXPR, TREE_TYPE (index), | |
4735 | index, integer_one_node), 0); | |
4736 | ||
4737 | emit_jump (loop_start); | |
4738 | ||
4739 | /* Build the end of the loop. */ | |
4740 | emit_label (loop_end); | |
4741 | } | |
4742 | } | |
4743 | else if ((index != 0 && ! host_integerp (index, 0)) | |
4744 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
4745 | { | |
4746 | tree position; | |
4747 | ||
4748 | if (index == 0) | |
4749 | index = ssize_int (1); | |
4750 | ||
4751 | if (minelt) | |
4752 | index = fold_convert (ssizetype, | |
4753 | fold (build2 (MINUS_EXPR, | |
4754 | TREE_TYPE (index), | |
4755 | index, | |
4756 | TYPE_MIN_VALUE (domain)))); | |
4757 | ||
4758 | position = size_binop (MULT_EXPR, index, | |
4759 | convert (ssizetype, | |
4760 | TYPE_SIZE_UNIT (elttype))); | |
4761 | xtarget = offset_address (target, | |
4762 | expand_expr (position, 0, VOIDmode, 0), | |
4763 | highest_pow2_factor (position)); | |
4764 | xtarget = adjust_address (xtarget, mode, 0); | |
4765 | store_expr (value, xtarget, 0); | |
4766 | } | |
4767 | else | |
4768 | { | |
4769 | if (index != 0) | |
4770 | bitpos = ((tree_low_cst (index, 0) - minelt) | |
4771 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
4772 | else | |
4773 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
4774 | ||
4775 | if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) | |
4776 | && TREE_CODE (type) == ARRAY_TYPE | |
4777 | && TYPE_NONALIASED_COMPONENT (type)) | |
4778 | { | |
4779 | target = copy_rtx (target); | |
4780 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
4781 | } | |
4782 | store_constructor_field (target, bitsize, bitpos, mode, value, | |
4783 | type, cleared, get_alias_set (elttype)); | |
4784 | } | |
4785 | } | |
4786 | break; | |
4787 | } | |
3a021db2 | 4788 | |
5b0264cb NS |
4789 | case VECTOR_TYPE: |
4790 | { | |
4791 | tree elt; | |
4792 | int i; | |
4793 | int need_to_clear; | |
4794 | int icode = 0; | |
4795 | tree elttype = TREE_TYPE (type); | |
4796 | int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); | |
4797 | enum machine_mode eltmode = TYPE_MODE (elttype); | |
4798 | HOST_WIDE_INT bitsize; | |
4799 | HOST_WIDE_INT bitpos; | |
4800 | rtx *vector = NULL; | |
4801 | unsigned n_elts; | |
4802 | ||
4803 | gcc_assert (eltmode != BLKmode); | |
4804 | ||
4805 | n_elts = TYPE_VECTOR_SUBPARTS (type); | |
4806 | if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) | |
4807 | { | |
4808 | enum machine_mode mode = GET_MODE (target); | |
4809 | ||
4810 | icode = (int) vec_init_optab->handlers[mode].insn_code; | |
4811 | if (icode != CODE_FOR_nothing) | |
4812 | { | |
4813 | unsigned int i; | |
4814 | ||
4815 | vector = alloca (n_elts); | |
4816 | for (i = 0; i < n_elts; i++) | |
4817 | vector [i] = CONST0_RTX (GET_MODE_INNER (mode)); | |
4818 | } | |
4819 | } | |
4820 | ||
4821 | /* If the constructor has fewer elements than the vector, | |
4822 | clear the whole array first. Similarly if this is static | |
4823 | constructor of a non-BLKmode object. */ | |
4824 | if (cleared) | |
4825 | need_to_clear = 0; | |
4826 | else if (REG_P (target) && TREE_STATIC (exp)) | |
4827 | need_to_clear = 1; | |
4828 | else | |
4829 | { | |
4830 | unsigned HOST_WIDE_INT count = 0, zero_count = 0; | |
4831 | ||
4832 | for (elt = CONSTRUCTOR_ELTS (exp); | |
4833 | elt != NULL_TREE; | |
4834 | elt = TREE_CHAIN (elt)) | |
4835 | { | |
4836 | int n_elts_here = tree_low_cst | |
4837 | (int_const_binop (TRUNC_DIV_EXPR, | |
4838 | TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))), | |
4839 | TYPE_SIZE (elttype), 0), 1); | |
4840 | ||
4841 | count += n_elts_here; | |
4842 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4843 | zero_count += n_elts_here; | |
4844 | } | |
3a021db2 | 4845 | |
5b0264cb NS |
4846 | /* Clear the entire vector first if there are any missing elements, |
4847 | or if the incidence of zero elements is >= 75%. */ | |
4848 | need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); | |
4849 | } | |
4850 | ||
4851 | if (need_to_clear && size > 0 && !vector) | |
4852 | { | |
4853 | if (REG_P (target)) | |
4854 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
4855 | else | |
4856 | clear_storage (target, GEN_INT (size)); | |
4857 | cleared = 1; | |
4858 | } | |
4859 | ||
4860 | if (!cleared && REG_P (target)) | |
4861 | /* Inform later passes that the old value is dead. */ | |
4862 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
4863 | ||
4864 | /* Store each element of the constructor into the corresponding | |
4865 | element of TARGET, determined by counting the elements. */ | |
4866 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
4867 | elt; | |
4868 | elt = TREE_CHAIN (elt), i += bitsize / elt_size) | |
4869 | { | |
4870 | tree value = TREE_VALUE (elt); | |
4871 | tree index = TREE_PURPOSE (elt); | |
4872 | HOST_WIDE_INT eltpos; | |
4873 | ||
4874 | bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); | |
4875 | if (cleared && initializer_zerop (value)) | |
4876 | continue; | |
4877 | ||
4878 | if (index != 0) | |
4879 | eltpos = tree_low_cst (index, 1); | |
4880 | else | |
4881 | eltpos = i; | |
4882 | ||
4883 | if (vector) | |
4884 | { | |
4885 | /* Vector CONSTRUCTORs should only be built from smaller | |
4886 | vectors in the case of BLKmode vectors. */ | |
4887 | gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE); | |
4888 | vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0); | |
4889 | } | |
4890 | else | |
4891 | { | |
4892 | enum machine_mode value_mode = | |
4893 | TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE | |
3a021db2 PB |
4894 | ? TYPE_MODE (TREE_TYPE (value)) |
4895 | : eltmode; | |
5b0264cb NS |
4896 | bitpos = eltpos * elt_size; |
4897 | store_constructor_field (target, bitsize, bitpos, | |
4898 | value_mode, value, type, | |
4899 | cleared, get_alias_set (elttype)); | |
4900 | } | |
4901 | } | |
4902 | ||
4903 | if (vector) | |
4904 | emit_insn (GEN_FCN (icode) | |
4905 | (target, | |
4906 | gen_rtx_PARALLEL (GET_MODE (target), | |
4907 | gen_rtvec_v (n_elts, vector)))); | |
4908 | break; | |
4909 | } | |
19caa751 | 4910 | |
5b0264cb NS |
4911 | /* Set constructor assignments. */ |
4912 | case SET_TYPE: | |
4913 | { | |
4914 | tree elt = CONSTRUCTOR_ELTS (exp); | |
4915 | unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; | |
4916 | tree domain = TYPE_DOMAIN (type); | |
4917 | tree domain_min, domain_max, bitlength; | |
4918 | ||
4919 | /* The default implementation strategy is to extract the | |
4920 | constant parts of the constructor, use that to initialize | |
4921 | the target, and then "or" in whatever non-constant ranges | |
4922 | we need in addition. | |
4923 | ||
4924 | If a large set is all zero or all ones, it is probably | |
4925 | better to set it using memset. Also, if a large set has | |
4926 | just a single range, it may also be better to first clear | |
4927 | all the first clear the set (using memset), and set the | |
4928 | bits we want. */ | |
4929 | ||
4930 | /* Check for all zeros. */ | |
4931 | if (elt == NULL_TREE && size > 0) | |
4932 | { | |
4933 | if (!cleared) | |
4934 | clear_storage (target, GEN_INT (size)); | |
4935 | return; | |
4936 | } | |
4937 | ||
4938 | domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); | |
4939 | domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); | |
4940 | bitlength = size_binop (PLUS_EXPR, | |
4941 | size_diffop (domain_max, domain_min), | |
4942 | ssize_int (1)); | |
4943 | ||
4944 | nbits = tree_low_cst (bitlength, 1); | |
4945 | ||
4946 | /* For "small" sets, or "medium-sized" (up to 32 bytes) sets | |
4947 | that are "complicated" (more than one range), initialize | |
4948 | (the constant parts) by copying from a constant. */ | |
4949 | if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD | |
4950 | || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) | |
4951 | { | |
4952 | unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); | |
4953 | enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); | |
4954 | char *bit_buffer = alloca (nbits); | |
4955 | HOST_WIDE_INT word = 0; | |
4956 | unsigned int bit_pos = 0; | |
4957 | unsigned int ibit = 0; | |
4958 | unsigned int offset = 0; /* In bytes from beginning of set. */ | |
4959 | ||
4960 | elt = get_set_constructor_bits (exp, bit_buffer, nbits); | |
4961 | for (;;) | |
4962 | { | |
4963 | if (bit_buffer[ibit]) | |
4964 | { | |
4965 | if (BYTES_BIG_ENDIAN) | |
4966 | word |= (1 << (set_word_size - 1 - bit_pos)); | |
4967 | else | |
4968 | word |= 1 << bit_pos; | |
4969 | } | |
4970 | ||
4971 | bit_pos++; ibit++; | |
4972 | if (bit_pos >= set_word_size || ibit == nbits) | |
4973 | { | |
4974 | if (word != 0 || ! cleared) | |
4975 | { | |
4976 | rtx datum = gen_int_mode (word, mode); | |
4977 | rtx to_rtx; | |
4978 | ||
4979 | /* The assumption here is that it is safe to | |
4980 | use XEXP if the set is multi-word, but not | |
4981 | if it's single-word. */ | |
4982 | if (MEM_P (target)) | |
4983 | to_rtx = adjust_address (target, mode, offset); | |
4984 | else | |
4985 | { | |
4986 | gcc_assert (!offset); | |
4987 | to_rtx = target; | |
4988 | } | |
4989 | emit_move_insn (to_rtx, datum); | |
4990 | } | |
4991 | ||
4992 | if (ibit == nbits) | |
4993 | break; | |
4994 | word = 0; | |
4995 | bit_pos = 0; | |
4996 | offset += set_word_size / BITS_PER_UNIT; | |
4997 | } | |
4998 | } | |
4999 | } | |
5000 | else if (!cleared) | |
5001 | /* Don't bother clearing storage if the set is all ones. */ | |
5002 | if (TREE_CHAIN (elt) != NULL_TREE | |
5003 | || (TREE_PURPOSE (elt) == NULL_TREE | |
5004 | ? nbits != 1 | |
5005 | : ( ! host_integerp (TREE_VALUE (elt), 0) | |
5006 | || ! host_integerp (TREE_PURPOSE (elt), 0) | |
5007 | || (tree_low_cst (TREE_VALUE (elt), 0) | |
5008 | - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 | |
5009 | != (HOST_WIDE_INT) nbits)))) | |
5010 | clear_storage (target, expr_size (exp)); | |
5011 | ||
5012 | for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) | |
5013 | { | |
5014 | /* Start of range of element or NULL. */ | |
5015 | tree startbit = TREE_PURPOSE (elt); | |
5016 | /* End of range of element, or element value. */ | |
5017 | tree endbit = TREE_VALUE (elt); | |
5018 | HOST_WIDE_INT startb, endb; | |
5019 | rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; | |
5020 | ||
5021 | bitlength_rtx = expand_expr (bitlength, | |
5022 | NULL_RTX, MEM, EXPAND_CONST_ADDRESS); | |
5023 | ||
5024 | /* Handle non-range tuple element like [ expr ]. */ | |
5025 | if (startbit == NULL_TREE) | |
5026 | { | |
5027 | startbit = save_expr (endbit); | |
5028 | endbit = startbit; | |
5029 | } | |
5030 | ||
5031 | startbit = convert (sizetype, startbit); | |
5032 | endbit = convert (sizetype, endbit); | |
5033 | if (! integer_zerop (domain_min)) | |
5034 | { | |
5035 | startbit = size_binop (MINUS_EXPR, startbit, domain_min); | |
5036 | endbit = size_binop (MINUS_EXPR, endbit, domain_min); | |
5037 | } | |
5038 | startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, | |
5039 | EXPAND_CONST_ADDRESS); | |
5040 | endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, | |
071a6595 | 5041 | EXPAND_CONST_ADDRESS); |
5b0264cb NS |
5042 | |
5043 | if (REG_P (target)) | |
5044 | { | |
5045 | targetx | |
5046 | = assign_temp | |
ae2bcd98 | 5047 | ((build_qualified_type (lang_hooks.types.type_for_mode |
b0c48229 | 5048 | (GET_MODE (target), 0), |
1da68f56 RK |
5049 | TYPE_QUAL_CONST)), |
5050 | 0, 1, 1); | |
5b0264cb NS |
5051 | emit_move_insn (targetx, target); |
5052 | } | |
5053 | ||
5054 | else | |
5055 | { | |
5056 | gcc_assert (MEM_P (target)); | |
5057 | targetx = target; | |
5058 | } | |
19caa751 | 5059 | |
5b0264cb NS |
5060 | /* Optimization: If startbit and endbit are constants divisible |
5061 | by BITS_PER_UNIT, call memset instead. */ | |
5062 | if (TREE_CODE (startbit) == INTEGER_CST | |
5063 | && TREE_CODE (endbit) == INTEGER_CST | |
5064 | && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 | |
5065 | && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) | |
5066 | { | |
5067 | emit_library_call (memset_libfunc, LCT_NORMAL, | |
5068 | VOIDmode, 3, | |
5069 | plus_constant (XEXP (targetx, 0), | |
5070 | startb / BITS_PER_UNIT), | |
5071 | Pmode, | |
5072 | constm1_rtx, TYPE_MODE (integer_type_node), | |
5073 | GEN_INT ((endb - startb) / BITS_PER_UNIT), | |
5074 | TYPE_MODE (sizetype)); | |
5075 | } | |
5076 | else | |
5077 | emit_library_call (setbits_libfunc, LCT_NORMAL, | |
5078 | VOIDmode, 4, XEXP (targetx, 0), | |
5079 | Pmode, bitlength_rtx, TYPE_MODE (sizetype), | |
5080 | startbit_rtx, TYPE_MODE (sizetype), | |
5081 | endbit_rtx, TYPE_MODE (sizetype)); | |
5082 | ||
5083 | if (REG_P (target)) | |
5084 | emit_move_insn (target, targetx); | |
5085 | } | |
5086 | break; | |
5087 | } | |
5088 | default: | |
5089 | gcc_unreachable (); | |
071a6595 | 5090 | } |
bbf6f052 RK |
5091 | } |
5092 | ||
5093 | /* Store the value of EXP (an expression tree) | |
5094 | into a subfield of TARGET which has mode MODE and occupies | |
5095 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
5096 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
5097 | ||
5098 | If VALUE_MODE is VOIDmode, return nothing in particular. | |
5099 | UNSIGNEDP is not used in this case. | |
5100 | ||
5101 | Otherwise, return an rtx for the value stored. This rtx | |
5102 | has mode VALUE_MODE if that is convenient to do. | |
5103 | In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. | |
5104 | ||
a06ef755 | 5105 | TYPE is the type of the underlying object, |
ece32014 MM |
5106 | |
5107 | ALIAS_SET is the alias set for the destination. This value will | |
5108 | (in general) be different from that for TARGET, since TARGET is a | |
5109 | reference to the containing structure. */ | |
bbf6f052 RK |
5110 | |
5111 | static rtx | |
502b8322 AJ |
5112 | store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
5113 | enum machine_mode mode, tree exp, enum machine_mode value_mode, | |
5114 | int unsignedp, tree type, int alias_set) | |
bbf6f052 | 5115 | { |
906c4e36 | 5116 | HOST_WIDE_INT width_mask = 0; |
bbf6f052 | 5117 | |
e9a25f70 JL |
5118 | if (TREE_CODE (exp) == ERROR_MARK) |
5119 | return const0_rtx; | |
5120 | ||
2be6a7e9 RK |
5121 | /* If we have nothing to store, do nothing unless the expression has |
5122 | side-effects. */ | |
5123 | if (bitsize == 0) | |
5124 | return expand_expr (exp, const0_rtx, VOIDmode, 0); | |
6a87d634 | 5125 | else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT) |
906c4e36 | 5126 | width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; |
bbf6f052 RK |
5127 | |
5128 | /* If we are storing into an unaligned field of an aligned union that is | |
5129 | in a register, we may have the mode of TARGET being an integer mode but | |
5130 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5131 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5132 | the store if the field being stored is the entire width of TARGET). Then | |
5133 | call ourselves recursively to store the field into a BLKmode version of | |
5134 | that object. Finally, load from the object into TARGET. This is not | |
5135 | very efficient in general, but should only be slightly more expensive | |
5136 | than the otherwise-required unaligned accesses. Perhaps this can be | |
85a43a2f RK |
5137 | cleaned up later. It's tempting to make OBJECT readonly, but it's set |
5138 | twice, once with emit_move_insn and once via store_field. */ | |
bbf6f052 RK |
5139 | |
5140 | if (mode == BLKmode | |
f8cfc6aa | 5141 | && (REG_P (target) || GET_CODE (target) == SUBREG)) |
bbf6f052 | 5142 | { |
85a43a2f | 5143 | rtx object = assign_temp (type, 0, 1, 1); |
c4e59f51 | 5144 | rtx blk_object = adjust_address (object, BLKmode, 0); |
bbf6f052 | 5145 | |
8752c357 | 5146 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
bbf6f052 RK |
5147 | emit_move_insn (object, target); |
5148 | ||
a06ef755 RK |
5149 | store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
5150 | alias_set); | |
bbf6f052 RK |
5151 | |
5152 | emit_move_insn (target, object); | |
5153 | ||
a06ef755 | 5154 | /* We want to return the BLKmode version of the data. */ |
46093b97 | 5155 | return blk_object; |
bbf6f052 | 5156 | } |
c3b247b4 JM |
5157 | |
5158 | if (GET_CODE (target) == CONCAT) | |
5159 | { | |
5160 | /* We're storing into a struct containing a single __complex. */ | |
5161 | ||
5b0264cb | 5162 | gcc_assert (!bitpos); |
6de9cd9a | 5163 | return store_expr (exp, target, value_mode != VOIDmode); |
c3b247b4 | 5164 | } |
bbf6f052 RK |
5165 | |
5166 | /* If the structure is in a register or if the component | |
5167 | is a bit field, we cannot use addressing to access it. | |
5168 | Use bit-field techniques or SUBREG to store in it. */ | |
5169 | ||
4fa52007 | 5170 | if (mode == VOIDmode |
6ab06cbb JW |
5171 | || (mode != BLKmode && ! direct_store[(int) mode] |
5172 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5173 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
f8cfc6aa | 5174 | || REG_P (target) |
c980ac49 | 5175 | || GET_CODE (target) == SUBREG |
ccc98036 RS |
5176 | /* If the field isn't aligned enough to store as an ordinary memref, |
5177 | store it as a bit field. */ | |
15b19a7d | 5178 | || (mode != BLKmode |
9e5f281f OH |
5179 | && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) |
5180 | || bitpos % GET_MODE_ALIGNMENT (mode)) | |
5181 | && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) | |
502b8322 | 5182 | || (bitpos % BITS_PER_UNIT != 0))) |
14a774a9 RK |
5183 | /* If the RHS and field are a constant size and the size of the |
5184 | RHS isn't the same size as the bitfield, we must use bitfield | |
5185 | operations. */ | |
05bccae2 RK |
5186 | || (bitsize >= 0 |
5187 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
5188 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) | |
bbf6f052 | 5189 | { |
906c4e36 | 5190 | rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
bbd6cf73 | 5191 | |
ef19912d RK |
5192 | /* If BITSIZE is narrower than the size of the type of EXP |
5193 | we will be narrowing TEMP. Normally, what's wanted are the | |
5194 | low-order bits. However, if EXP's type is a record and this is | |
5195 | big-endian machine, we want the upper BITSIZE bits. */ | |
5196 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
65a07688 | 5197 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) |
ef19912d RK |
5198 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) |
5199 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5200 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5201 | - bitsize), | |
c1853da7 | 5202 | NULL_RTX, 1); |
ef19912d | 5203 | |
bbd6cf73 RK |
5204 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5205 | MODE. */ | |
5206 | if (mode != VOIDmode && mode != BLKmode | |
5207 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5208 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5209 | ||
a281e72d RK |
5210 | /* If the modes of TARGET and TEMP are both BLKmode, both |
5211 | must be in memory and BITPOS must be aligned on a byte | |
5212 | boundary. If so, we simply do a block copy. */ | |
5213 | if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) | |
5214 | { | |
5b0264cb NS |
5215 | gcc_assert (MEM_P (target) && MEM_P (temp) |
5216 | && !(bitpos % BITS_PER_UNIT)); | |
a281e72d | 5217 | |
f4ef873c | 5218 | target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); |
a281e72d | 5219 | emit_block_move (target, temp, |
a06ef755 | 5220 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
44bb111a RH |
5221 | / BITS_PER_UNIT), |
5222 | BLOCK_OP_NORMAL); | |
a281e72d RK |
5223 | |
5224 | return value_mode == VOIDmode ? const0_rtx : target; | |
5225 | } | |
5226 | ||
bbf6f052 | 5227 | /* Store the value in the bitfield. */ |
b3520980 | 5228 | store_bit_field (target, bitsize, bitpos, mode, temp); |
a06ef755 | 5229 | |
bbf6f052 RK |
5230 | if (value_mode != VOIDmode) |
5231 | { | |
04050c69 RK |
5232 | /* The caller wants an rtx for the value. |
5233 | If possible, avoid refetching from the bitfield itself. */ | |
bbf6f052 | 5234 | if (width_mask != 0 |
3c0cb5de | 5235 | && ! (MEM_P (target) && MEM_VOLATILE_P (target))) |
5c4d7cfb | 5236 | { |
9074de27 | 5237 | tree count; |
5c4d7cfb | 5238 | enum machine_mode tmode; |
86a2c12a | 5239 | |
5c4d7cfb | 5240 | tmode = GET_MODE (temp); |
86a2c12a RS |
5241 | if (tmode == VOIDmode) |
5242 | tmode = value_mode; | |
22273300 JJ |
5243 | |
5244 | if (unsignedp) | |
5245 | return expand_and (tmode, temp, | |
2496c7bd | 5246 | gen_int_mode (width_mask, tmode), |
22273300 JJ |
5247 | NULL_RTX); |
5248 | ||
4a90aeeb | 5249 | count = build_int_cst (NULL_TREE, |
7d60be94 | 5250 | GET_MODE_BITSIZE (tmode) - bitsize); |
5c4d7cfb RS |
5251 | temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); |
5252 | return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5253 | } | |
04050c69 | 5254 | |
bbf6f052 | 5255 | return extract_bit_field (target, bitsize, bitpos, unsignedp, |
b3520980 | 5256 | NULL_RTX, value_mode, VOIDmode); |
bbf6f052 RK |
5257 | } |
5258 | return const0_rtx; | |
5259 | } | |
5260 | else | |
5261 | { | |
5262 | rtx addr = XEXP (target, 0); | |
a06ef755 | 5263 | rtx to_rtx = target; |
bbf6f052 RK |
5264 | |
5265 | /* If a value is wanted, it must be the lhs; | |
5266 | so make the address stable for multiple use. */ | |
5267 | ||
f8cfc6aa | 5268 | if (value_mode != VOIDmode && !REG_P (addr) |
bbf6f052 RK |
5269 | && ! CONSTANT_ADDRESS_P (addr) |
5270 | /* A frame-pointer reference is already stable. */ | |
5271 | && ! (GET_CODE (addr) == PLUS | |
5272 | && GET_CODE (XEXP (addr, 1)) == CONST_INT | |
5273 | && (XEXP (addr, 0) == virtual_incoming_args_rtx | |
5274 | || XEXP (addr, 0) == virtual_stack_vars_rtx))) | |
a06ef755 | 5275 | to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr)); |
bbf6f052 RK |
5276 | |
5277 | /* Now build a reference to just the desired component. */ | |
5278 | ||
a06ef755 RK |
5279 | to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); |
5280 | ||
5281 | if (to_rtx == target) | |
5282 | to_rtx = copy_rtx (to_rtx); | |
792760b9 | 5283 | |
c6df88cb | 5284 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
10b76d73 | 5285 | if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
a06ef755 | 5286 | set_mem_alias_set (to_rtx, alias_set); |
bbf6f052 RK |
5287 | |
5288 | return store_expr (exp, to_rtx, value_mode != VOIDmode); | |
5289 | } | |
5290 | } | |
5291 | \f | |
5292 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
b4e3fabb RK |
5293 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
5294 | codes and find the ultimate containing object, which we return. | |
bbf6f052 RK |
5295 | |
5296 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
5297 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
7bb0943f RS |
5298 | If the position of the field is variable, we store a tree |
5299 | giving the variable offset (in units) in *POFFSET. | |
5300 | This offset is in addition to the bit position. | |
5301 | If the position is not variable, we store 0 in *POFFSET. | |
bbf6f052 RK |
5302 | |
5303 | If any of the extraction expressions is volatile, | |
5304 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
5305 | ||
5306 | If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it | |
5307 | is a mode that can be used to access the field. In that case, *PBITSIZE | |
e7c33f54 RK |
5308 | is redundant. |
5309 | ||
5310 | If the field describes a variable-sized object, *PMODE is set to | |
5311 | VOIDmode and *PBITSIZE is set to -1. An access cannot be made in | |
6d2f8887 | 5312 | this case, but the address of the object can be found. */ |
bbf6f052 RK |
5313 | |
5314 | tree | |
502b8322 AJ |
5315 | get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, |
5316 | HOST_WIDE_INT *pbitpos, tree *poffset, | |
5317 | enum machine_mode *pmode, int *punsignedp, | |
5318 | int *pvolatilep) | |
bbf6f052 RK |
5319 | { |
5320 | tree size_tree = 0; | |
5321 | enum machine_mode mode = VOIDmode; | |
fed3cef0 | 5322 | tree offset = size_zero_node; |
770ae6cc | 5323 | tree bit_offset = bitsize_zero_node; |
770ae6cc | 5324 | tree tem; |
bbf6f052 | 5325 | |
770ae6cc RK |
5326 | /* First get the mode, signedness, and size. We do this from just the |
5327 | outermost expression. */ | |
bbf6f052 RK |
5328 | if (TREE_CODE (exp) == COMPONENT_REF) |
5329 | { | |
5330 | size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | |
5331 | if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) | |
5332 | mode = DECL_MODE (TREE_OPERAND (exp, 1)); | |
770ae6cc | 5333 | |
a150de29 | 5334 | *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1)); |
bbf6f052 RK |
5335 | } |
5336 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
5337 | { | |
5338 | size_tree = TREE_OPERAND (exp, 1); | |
a150de29 | 5339 | *punsignedp = BIT_FIELD_REF_UNSIGNED (exp); |
bbf6f052 RK |
5340 | } |
5341 | else | |
5342 | { | |
5343 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
8df83eae | 5344 | *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
770ae6cc | 5345 | |
ab87f8c8 JL |
5346 | if (mode == BLKmode) |
5347 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
770ae6cc RK |
5348 | else |
5349 | *pbitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 5350 | } |
3a94c984 | 5351 | |
770ae6cc | 5352 | if (size_tree != 0) |
bbf6f052 | 5353 | { |
770ae6cc | 5354 | if (! host_integerp (size_tree, 1)) |
e7c33f54 RK |
5355 | mode = BLKmode, *pbitsize = -1; |
5356 | else | |
770ae6cc | 5357 | *pbitsize = tree_low_cst (size_tree, 1); |
bbf6f052 RK |
5358 | } |
5359 | ||
5360 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
5361 | and find the ultimate containing object. */ | |
bbf6f052 RK |
5362 | while (1) |
5363 | { | |
770ae6cc RK |
5364 | if (TREE_CODE (exp) == BIT_FIELD_REF) |
5365 | bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); | |
5366 | else if (TREE_CODE (exp) == COMPONENT_REF) | |
bbf6f052 | 5367 | { |
770ae6cc | 5368 | tree field = TREE_OPERAND (exp, 1); |
44de5aeb | 5369 | tree this_offset = component_ref_field_offset (exp); |
bbf6f052 | 5370 | |
e7f3c83f RK |
5371 | /* If this field hasn't been filled in yet, don't go |
5372 | past it. This should only happen when folding expressions | |
5373 | made during type construction. */ | |
770ae6cc | 5374 | if (this_offset == 0) |
e7f3c83f RK |
5375 | break; |
5376 | ||
7156dead | 5377 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
770ae6cc RK |
5378 | bit_offset = size_binop (PLUS_EXPR, bit_offset, |
5379 | DECL_FIELD_BIT_OFFSET (field)); | |
e6d8c385 | 5380 | |
a06ef755 | 5381 | /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
bbf6f052 | 5382 | } |
7156dead | 5383 | |
b4e3fabb RK |
5384 | else if (TREE_CODE (exp) == ARRAY_REF |
5385 | || TREE_CODE (exp) == ARRAY_RANGE_REF) | |
bbf6f052 | 5386 | { |
742920c7 | 5387 | tree index = TREE_OPERAND (exp, 1); |
44de5aeb RK |
5388 | tree low_bound = array_ref_low_bound (exp); |
5389 | tree unit_size = array_ref_element_size (exp); | |
742920c7 | 5390 | |
770ae6cc RK |
5391 | /* We assume all arrays have sizes that are a multiple of a byte. |
5392 | First subtract the lower bound, if any, in the type of the | |
5393 | index, then convert to sizetype and multiply by the size of the | |
5394 | array element. */ | |
44de5aeb | 5395 | if (! integer_zerop (low_bound)) |
3244e67d RS |
5396 | index = fold (build2 (MINUS_EXPR, TREE_TYPE (index), |
5397 | index, low_bound)); | |
f8dac6eb | 5398 | |
770ae6cc RK |
5399 | offset = size_binop (PLUS_EXPR, offset, |
5400 | size_binop (MULT_EXPR, | |
5401 | convert (sizetype, index), | |
7156dead | 5402 | unit_size)); |
bbf6f052 | 5403 | } |
7156dead | 5404 | |
c1853da7 RK |
5405 | /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal |
5406 | conversions that don't change the mode, and all view conversions | |
5407 | except those that need to "step up" the alignment. */ | |
bbf6f052 | 5408 | else if (TREE_CODE (exp) != NON_LVALUE_EXPR |
c1853da7 RK |
5409 | && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR |
5410 | && ! ((TYPE_ALIGN (TREE_TYPE (exp)) | |
5411 | > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
5412 | && STRICT_ALIGNMENT | |
5413 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
5414 | < BIGGEST_ALIGNMENT) | |
5415 | && (TYPE_ALIGN_OK (TREE_TYPE (exp)) | |
5416 | || TYPE_ALIGN_OK (TREE_TYPE | |
5417 | (TREE_OPERAND (exp, 0)))))) | |
bbf6f052 RK |
5418 | && ! ((TREE_CODE (exp) == NOP_EXPR |
5419 | || TREE_CODE (exp) == CONVERT_EXPR) | |
5420 | && (TYPE_MODE (TREE_TYPE (exp)) | |
5421 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) | |
5422 | break; | |
7bb0943f RS |
5423 | |
5424 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
5425 | if (TREE_THIS_VOLATILE (exp)) | |
5426 | *pvolatilep = 1; | |
839c4796 | 5427 | |
bbf6f052 RK |
5428 | exp = TREE_OPERAND (exp, 0); |
5429 | } | |
5430 | ||
770ae6cc RK |
5431 | /* If OFFSET is constant, see if we can return the whole thing as a |
5432 | constant bit position. Otherwise, split it up. */ | |
5433 | if (host_integerp (offset, 0) | |
5434 | && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), | |
5435 | bitsize_unit_node)) | |
5436 | && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) | |
5437 | && host_integerp (tem, 0)) | |
5438 | *pbitpos = tree_low_cst (tem, 0), *poffset = 0; | |
5439 | else | |
5440 | *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; | |
b50d17a1 | 5441 | |
bbf6f052 | 5442 | *pmode = mode; |
bbf6f052 RK |
5443 | return exp; |
5444 | } | |
921b3427 | 5445 | |
44de5aeb RK |
5446 | /* Return a tree of sizetype representing the size, in bytes, of the element |
5447 | of EXP, an ARRAY_REF. */ | |
5448 | ||
5449 | tree | |
5450 | array_ref_element_size (tree exp) | |
5451 | { | |
5452 | tree aligned_size = TREE_OPERAND (exp, 3); | |
5453 | tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5454 | ||
5455 | /* If a size was specified in the ARRAY_REF, it's the size measured | |
5456 | in alignment units of the element type. So multiply by that value. */ | |
5457 | if (aligned_size) | |
bc482be4 RH |
5458 | { |
5459 | /* ??? tree_ssa_useless_type_conversion will eliminate casts to | |
5460 | sizetype from another type of the same width and signedness. */ | |
5461 | if (TREE_TYPE (aligned_size) != sizetype) | |
5462 | aligned_size = fold_convert (sizetype, aligned_size); | |
5463 | return size_binop (MULT_EXPR, aligned_size, | |
a4e9ffe5 | 5464 | size_int (TYPE_ALIGN_UNIT (elmt_type))); |
bc482be4 | 5465 | } |
44de5aeb | 5466 | |
caf93cb0 | 5467 | /* Otherwise, take the size from that of the element type. Substitute |
44de5aeb RK |
5468 | any PLACEHOLDER_EXPR that we have. */ |
5469 | else | |
5470 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); | |
5471 | } | |
5472 | ||
5473 | /* Return a tree representing the lower bound of the array mentioned in | |
5474 | EXP, an ARRAY_REF. */ | |
5475 | ||
5476 | tree | |
5477 | array_ref_low_bound (tree exp) | |
5478 | { | |
5479 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5480 | ||
5481 | /* If a lower bound is specified in EXP, use it. */ | |
5482 | if (TREE_OPERAND (exp, 2)) | |
5483 | return TREE_OPERAND (exp, 2); | |
5484 | ||
5485 | /* Otherwise, if there is a domain type and it has a lower bound, use it, | |
5486 | substituting for a PLACEHOLDER_EXPR as needed. */ | |
5487 | if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
5488 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); | |
5489 | ||
5490 | /* Otherwise, return a zero of the appropriate type. */ | |
5212068f | 5491 | return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); |
44de5aeb RK |
5492 | } |
5493 | ||
a7e5372d ZD |
5494 | /* Return a tree representing the upper bound of the array mentioned in |
5495 | EXP, an ARRAY_REF. */ | |
5496 | ||
5497 | tree | |
5498 | array_ref_up_bound (tree exp) | |
5499 | { | |
5500 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5501 | ||
5502 | /* If there is a domain type and it has an upper bound, use it, substituting | |
5503 | for a PLACEHOLDER_EXPR as needed. */ | |
5504 | if (domain_type && TYPE_MAX_VALUE (domain_type)) | |
5505 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); | |
5506 | ||
5507 | /* Otherwise fail. */ | |
5508 | return NULL_TREE; | |
5509 | } | |
5510 | ||
44de5aeb RK |
5511 | /* Return a tree representing the offset, in bytes, of the field referenced |
5512 | by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ | |
5513 | ||
5514 | tree | |
5515 | component_ref_field_offset (tree exp) | |
5516 | { | |
5517 | tree aligned_offset = TREE_OPERAND (exp, 2); | |
5518 | tree field = TREE_OPERAND (exp, 1); | |
5519 | ||
5520 | /* If an offset was specified in the COMPONENT_REF, it's the offset measured | |
5521 | in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that | |
5522 | value. */ | |
5523 | if (aligned_offset) | |
bc482be4 RH |
5524 | { |
5525 | /* ??? tree_ssa_useless_type_conversion will eliminate casts to | |
5526 | sizetype from another type of the same width and signedness. */ | |
5527 | if (TREE_TYPE (aligned_offset) != sizetype) | |
5528 | aligned_offset = fold_convert (sizetype, aligned_offset); | |
5529 | return size_binop (MULT_EXPR, aligned_offset, | |
5530 | size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT)); | |
5531 | } | |
44de5aeb | 5532 | |
caf93cb0 | 5533 | /* Otherwise, take the offset from that of the field. Substitute |
44de5aeb RK |
5534 | any PLACEHOLDER_EXPR that we have. */ |
5535 | else | |
5536 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); | |
5537 | } | |
5538 | ||
ed239f5a RK |
5539 | /* Return 1 if T is an expression that get_inner_reference handles. */ |
5540 | ||
5541 | int | |
502b8322 | 5542 | handled_component_p (tree t) |
ed239f5a RK |
5543 | { |
5544 | switch (TREE_CODE (t)) | |
5545 | { | |
5546 | case BIT_FIELD_REF: | |
5547 | case COMPONENT_REF: | |
5548 | case ARRAY_REF: | |
5549 | case ARRAY_RANGE_REF: | |
5550 | case NON_LVALUE_EXPR: | |
5551 | case VIEW_CONVERT_EXPR: | |
5552 | return 1; | |
5553 | ||
1a8c4ca6 EB |
5554 | /* ??? Sure they are handled, but get_inner_reference may return |
5555 | a different PBITSIZE, depending upon whether the expression is | |
5556 | wrapped up in a NOP_EXPR or not, e.g. for bitfields. */ | |
ed239f5a RK |
5557 | case NOP_EXPR: |
5558 | case CONVERT_EXPR: | |
5559 | return (TYPE_MODE (TREE_TYPE (t)) | |
5560 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0)))); | |
5561 | ||
5562 | default: | |
5563 | return 0; | |
5564 | } | |
5565 | } | |
bbf6f052 | 5566 | \f |
3fe44edd RK |
5567 | /* Given an rtx VALUE that may contain additions and multiplications, return |
5568 | an equivalent value that just refers to a register, memory, or constant. | |
5569 | This is done by generating instructions to perform the arithmetic and | |
5570 | returning a pseudo-register containing the value. | |
c45a13a6 RK |
5571 | |
5572 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
bbf6f052 RK |
5573 | |
5574 | rtx | |
502b8322 | 5575 | force_operand (rtx value, rtx target) |
bbf6f052 | 5576 | { |
8a28dbcc | 5577 | rtx op1, op2; |
bbf6f052 | 5578 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
b3694847 | 5579 | rtx subtarget = get_subtarget (target); |
8a28dbcc | 5580 | enum rtx_code code = GET_CODE (value); |
bbf6f052 | 5581 | |
50654f6c ZD |
5582 | /* Check for subreg applied to an expression produced by loop optimizer. */ |
5583 | if (code == SUBREG | |
f8cfc6aa | 5584 | && !REG_P (SUBREG_REG (value)) |
3c0cb5de | 5585 | && !MEM_P (SUBREG_REG (value))) |
50654f6c ZD |
5586 | { |
5587 | value = simplify_gen_subreg (GET_MODE (value), | |
5588 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5589 | force_operand (SUBREG_REG (value), | |
5590 | NULL_RTX)), | |
5591 | GET_MODE (SUBREG_REG (value)), | |
5592 | SUBREG_BYTE (value)); | |
5593 | code = GET_CODE (value); | |
5594 | } | |
5595 | ||
8b015896 | 5596 | /* Check for a PIC address load. */ |
8a28dbcc | 5597 | if ((code == PLUS || code == MINUS) |
8b015896 RH |
5598 | && XEXP (value, 0) == pic_offset_table_rtx |
5599 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
5600 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
5601 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
5602 | { | |
5603 | if (!subtarget) | |
5604 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
5605 | emit_move_insn (subtarget, value); | |
5606 | return subtarget; | |
5607 | } | |
5608 | ||
8a28dbcc | 5609 | if (code == ZERO_EXTEND || code == SIGN_EXTEND) |
bbf6f052 | 5610 | { |
8a28dbcc JH |
5611 | if (!target) |
5612 | target = gen_reg_rtx (GET_MODE (value)); | |
ce0f3925 | 5613 | convert_move (target, force_operand (XEXP (value, 0), NULL), |
8a28dbcc JH |
5614 | code == ZERO_EXTEND); |
5615 | return target; | |
bbf6f052 RK |
5616 | } |
5617 | ||
ec8e098d | 5618 | if (ARITHMETIC_P (value)) |
bbf6f052 RK |
5619 | { |
5620 | op2 = XEXP (value, 1); | |
f8cfc6aa | 5621 | if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) |
bbf6f052 | 5622 | subtarget = 0; |
8a28dbcc | 5623 | if (code == MINUS && GET_CODE (op2) == CONST_INT) |
bbf6f052 | 5624 | { |
8a28dbcc | 5625 | code = PLUS; |
bbf6f052 RK |
5626 | op2 = negate_rtx (GET_MODE (value), op2); |
5627 | } | |
5628 | ||
5629 | /* Check for an addition with OP2 a constant integer and our first | |
8a28dbcc JH |
5630 | operand a PLUS of a virtual register and something else. In that |
5631 | case, we want to emit the sum of the virtual register and the | |
5632 | constant first and then add the other value. This allows virtual | |
5633 | register instantiation to simply modify the constant rather than | |
5634 | creating another one around this addition. */ | |
5635 | if (code == PLUS && GET_CODE (op2) == CONST_INT | |
bbf6f052 | 5636 | && GET_CODE (XEXP (value, 0)) == PLUS |
f8cfc6aa | 5637 | && REG_P (XEXP (XEXP (value, 0), 0)) |
bbf6f052 RK |
5638 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER |
5639 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
5640 | { | |
8a28dbcc JH |
5641 | rtx temp = expand_simple_binop (GET_MODE (value), code, |
5642 | XEXP (XEXP (value, 0), 0), op2, | |
5643 | subtarget, 0, OPTAB_LIB_WIDEN); | |
5644 | return expand_simple_binop (GET_MODE (value), code, temp, | |
5645 | force_operand (XEXP (XEXP (value, | |
5646 | 0), 1), 0), | |
5647 | target, 0, OPTAB_LIB_WIDEN); | |
bbf6f052 | 5648 | } |
3a94c984 | 5649 | |
8a28dbcc JH |
5650 | op1 = force_operand (XEXP (value, 0), subtarget); |
5651 | op2 = force_operand (op2, NULL_RTX); | |
5652 | switch (code) | |
5653 | { | |
5654 | case MULT: | |
5655 | return expand_mult (GET_MODE (value), op1, op2, target, 1); | |
5656 | case DIV: | |
5657 | if (!INTEGRAL_MODE_P (GET_MODE (value))) | |
5658 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5659 | target, 1, OPTAB_LIB_WIDEN); | |
5660 | else | |
5661 | return expand_divmod (0, | |
5662 | FLOAT_MODE_P (GET_MODE (value)) | |
5663 | ? RDIV_EXPR : TRUNC_DIV_EXPR, | |
5664 | GET_MODE (value), op1, op2, target, 0); | |
5665 | break; | |
5666 | case MOD: | |
5667 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5668 | target, 0); | |
5669 | break; | |
5670 | case UDIV: | |
5671 | return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, | |
5672 | target, 1); | |
5673 | break; | |
5674 | case UMOD: | |
5675 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5676 | target, 1); | |
5677 | break; | |
5678 | case ASHIFTRT: | |
5679 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5680 | target, 0, OPTAB_LIB_WIDEN); | |
5681 | break; | |
5682 | default: | |
5683 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5684 | target, 1, OPTAB_LIB_WIDEN); | |
5685 | } | |
5686 | } | |
ec8e098d | 5687 | if (UNARY_P (value)) |
8a28dbcc JH |
5688 | { |
5689 | op1 = force_operand (XEXP (value, 0), NULL_RTX); | |
5690 | return expand_simple_unop (GET_MODE (value), code, op1, target, 0); | |
bbf6f052 | 5691 | } |
34e81b5a RK |
5692 | |
5693 | #ifdef INSN_SCHEDULING | |
5694 | /* On machines that have insn scheduling, we want all memory reference to be | |
5695 | explicit, so we need to deal with such paradoxical SUBREGs. */ | |
3c0cb5de | 5696 | if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value)) |
34e81b5a RK |
5697 | && (GET_MODE_SIZE (GET_MODE (value)) |
5698 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) | |
5699 | value | |
5700 | = simplify_gen_subreg (GET_MODE (value), | |
5701 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5702 | force_operand (SUBREG_REG (value), | |
5703 | NULL_RTX)), | |
5704 | GET_MODE (SUBREG_REG (value)), | |
5705 | SUBREG_BYTE (value)); | |
5706 | #endif | |
5707 | ||
bbf6f052 RK |
5708 | return value; |
5709 | } | |
5710 | \f | |
bbf6f052 | 5711 | /* Subroutine of expand_expr: return nonzero iff there is no way that |
e5e809f4 JL |
5712 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
5713 | call is going to be used to determine whether we need a temporary | |
ff439b5f CB |
5714 | for EXP, as opposed to a recursive call to this function. |
5715 | ||
5716 | It is always safe for this routine to return zero since it merely | |
5717 | searches for optimization opportunities. */ | |
bbf6f052 | 5718 | |
8f17b5c5 | 5719 | int |
502b8322 | 5720 | safe_from_p (rtx x, tree exp, int top_p) |
bbf6f052 RK |
5721 | { |
5722 | rtx exp_rtl = 0; | |
5723 | int i, nops; | |
5724 | ||
6676e72f RK |
5725 | if (x == 0 |
5726 | /* If EXP has varying size, we MUST use a target since we currently | |
8f6562d0 PB |
5727 | have no way of allocating temporaries of variable size |
5728 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
5729 | So we assume here that something at a higher level has prevented a | |
f4510f37 | 5730 | clash. This is somewhat bogus, but the best we can do. Only |
e5e809f4 | 5731 | do this when X is BLKmode and when we are at the top level. */ |
d0f062fb | 5732 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f4510f37 | 5733 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
8f6562d0 PB |
5734 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
5735 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
5736 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
5737 | != INTEGER_CST) | |
1da68f56 RK |
5738 | && GET_MODE (x) == BLKmode) |
5739 | /* If X is in the outgoing argument area, it is always safe. */ | |
3c0cb5de | 5740 | || (MEM_P (x) |
1da68f56 RK |
5741 | && (XEXP (x, 0) == virtual_outgoing_args_rtx |
5742 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
5743 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
bbf6f052 RK |
5744 | return 1; |
5745 | ||
5746 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
5747 | find the underlying pseudo. */ | |
5748 | if (GET_CODE (x) == SUBREG) | |
5749 | { | |
5750 | x = SUBREG_REG (x); | |
f8cfc6aa | 5751 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
bbf6f052 RK |
5752 | return 0; |
5753 | } | |
5754 | ||
1da68f56 | 5755 | /* Now look at our tree code and possibly recurse. */ |
bbf6f052 RK |
5756 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
5757 | { | |
5758 | case 'd': | |
a9772b60 | 5759 | exp_rtl = DECL_RTL_IF_SET (exp); |
bbf6f052 RK |
5760 | break; |
5761 | ||
5762 | case 'c': | |
5763 | return 1; | |
5764 | ||
5765 | case 'x': | |
5766 | if (TREE_CODE (exp) == TREE_LIST) | |
f8d4be57 CE |
5767 | { |
5768 | while (1) | |
5769 | { | |
5770 | if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) | |
5771 | return 0; | |
5772 | exp = TREE_CHAIN (exp); | |
5773 | if (!exp) | |
5774 | return 1; | |
5775 | if (TREE_CODE (exp) != TREE_LIST) | |
5776 | return safe_from_p (x, exp, 0); | |
5777 | } | |
5778 | } | |
ff439b5f CB |
5779 | else if (TREE_CODE (exp) == ERROR_MARK) |
5780 | return 1; /* An already-visited SAVE_EXPR? */ | |
bbf6f052 RK |
5781 | else |
5782 | return 0; | |
5783 | ||
350fae66 RK |
5784 | case 's': |
5785 | /* The only case we look at here is the DECL_INITIAL inside a | |
5786 | DECL_EXPR. */ | |
5787 | return (TREE_CODE (exp) != DECL_EXPR | |
5788 | || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL | |
5789 | || !DECL_INITIAL (DECL_EXPR_DECL (exp)) | |
5790 | || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0)); | |
5791 | ||
bbf6f052 RK |
5792 | case '2': |
5793 | case '<': | |
f8d4be57 CE |
5794 | if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) |
5795 | return 0; | |
5d3cc252 | 5796 | /* Fall through. */ |
f8d4be57 CE |
5797 | |
5798 | case '1': | |
5799 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); | |
bbf6f052 RK |
5800 | |
5801 | case 'e': | |
5802 | case 'r': | |
5803 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in | |
5804 | the expression. If it is set, we conflict iff we are that rtx or | |
5805 | both are in memory. Otherwise, we check all operands of the | |
5806 | expression recursively. */ | |
5807 | ||
5808 | switch (TREE_CODE (exp)) | |
5809 | { | |
5810 | case ADDR_EXPR: | |
70072ed9 RK |
5811 | /* If the operand is static or we are static, we can't conflict. |
5812 | Likewise if we don't conflict with the operand at all. */ | |
5813 | if (staticp (TREE_OPERAND (exp, 0)) | |
5814 | || TREE_STATIC (exp) | |
5815 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
5816 | return 1; | |
5817 | ||
5818 | /* Otherwise, the only way this can conflict is if we are taking | |
5819 | the address of a DECL a that address if part of X, which is | |
5820 | very rare. */ | |
5821 | exp = TREE_OPERAND (exp, 0); | |
5822 | if (DECL_P (exp)) | |
5823 | { | |
5824 | if (!DECL_RTL_SET_P (exp) | |
3c0cb5de | 5825 | || !MEM_P (DECL_RTL (exp))) |
70072ed9 RK |
5826 | return 0; |
5827 | else | |
5828 | exp_rtl = XEXP (DECL_RTL (exp), 0); | |
5829 | } | |
5830 | break; | |
bbf6f052 RK |
5831 | |
5832 | case INDIRECT_REF: | |
3c0cb5de | 5833 | if (MEM_P (x) |
1da68f56 RK |
5834 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), |
5835 | get_alias_set (exp))) | |
bbf6f052 RK |
5836 | return 0; |
5837 | break; | |
5838 | ||
5839 | case CALL_EXPR: | |
f9808f81 MM |
5840 | /* Assume that the call will clobber all hard registers and |
5841 | all of memory. */ | |
f8cfc6aa | 5842 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
3c0cb5de | 5843 | || MEM_P (x)) |
f9808f81 | 5844 | return 0; |
bbf6f052 RK |
5845 | break; |
5846 | ||
bbf6f052 | 5847 | case WITH_CLEANUP_EXPR: |
5dab5552 | 5848 | case CLEANUP_POINT_EXPR: |
ac45df5d | 5849 | /* Lowered by gimplify.c. */ |
5b0264cb | 5850 | gcc_unreachable (); |
ac45df5d | 5851 | |
bbf6f052 | 5852 | case SAVE_EXPR: |
82c82743 | 5853 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 | 5854 | |
e9a25f70 JL |
5855 | default: |
5856 | break; | |
bbf6f052 RK |
5857 | } |
5858 | ||
5859 | /* If we have an rtx, we do not need to scan our operands. */ | |
5860 | if (exp_rtl) | |
5861 | break; | |
5862 | ||
8f17b5c5 | 5863 | nops = first_rtl_op (TREE_CODE (exp)); |
bbf6f052 RK |
5864 | for (i = 0; i < nops; i++) |
5865 | if (TREE_OPERAND (exp, i) != 0 | |
e5e809f4 | 5866 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
bbf6f052 | 5867 | return 0; |
8f17b5c5 MM |
5868 | |
5869 | /* If this is a language-specific tree code, it may require | |
5870 | special handling. */ | |
dbbbbf3b JDA |
5871 | if ((unsigned int) TREE_CODE (exp) |
5872 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE | |
ae2bcd98 | 5873 | && !lang_hooks.safe_from_p (x, exp)) |
8f17b5c5 | 5874 | return 0; |
bbf6f052 RK |
5875 | } |
5876 | ||
5877 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
5878 | with it. */ | |
5879 | if (exp_rtl) | |
5880 | { | |
5881 | if (GET_CODE (exp_rtl) == SUBREG) | |
5882 | { | |
5883 | exp_rtl = SUBREG_REG (exp_rtl); | |
f8cfc6aa | 5884 | if (REG_P (exp_rtl) |
bbf6f052 RK |
5885 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) |
5886 | return 0; | |
5887 | } | |
5888 | ||
5889 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
1da68f56 | 5890 | are memory and they conflict. */ |
bbf6f052 | 5891 | return ! (rtx_equal_p (x, exp_rtl) |
3c0cb5de | 5892 | || (MEM_P (x) && MEM_P (exp_rtl) |
21117a17 | 5893 | && true_dependence (exp_rtl, VOIDmode, x, |
1da68f56 | 5894 | rtx_addr_varies_p))); |
bbf6f052 RK |
5895 | } |
5896 | ||
5897 | /* If we reach here, it is safe. */ | |
5898 | return 1; | |
5899 | } | |
5900 | ||
14a774a9 | 5901 | \f |
0d4903b8 RK |
5902 | /* Return the highest power of two that EXP is known to be a multiple of. |
5903 | This is used in updating alignment of MEMs in array references. */ | |
5904 | ||
9ceca302 | 5905 | static unsigned HOST_WIDE_INT |
502b8322 | 5906 | highest_pow2_factor (tree exp) |
0d4903b8 | 5907 | { |
9ceca302 | 5908 | unsigned HOST_WIDE_INT c0, c1; |
0d4903b8 RK |
5909 | |
5910 | switch (TREE_CODE (exp)) | |
5911 | { | |
5912 | case INTEGER_CST: | |
e0f1be5c JJ |
5913 | /* We can find the lowest bit that's a one. If the low |
5914 | HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. | |
5915 | We need to handle this case since we can find it in a COND_EXPR, | |
a98ebe2e | 5916 | a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an |
e0f1be5c | 5917 | erroneous program, so return BIGGEST_ALIGNMENT to avoid any |
3a531a8b | 5918 | later ICE. */ |
e0f1be5c | 5919 | if (TREE_CONSTANT_OVERFLOW (exp)) |
1ed1b4fb | 5920 | return BIGGEST_ALIGNMENT; |
e0f1be5c | 5921 | else |
0d4903b8 | 5922 | { |
e0f1be5c JJ |
5923 | /* Note: tree_low_cst is intentionally not used here, |
5924 | we don't care about the upper bits. */ | |
5925 | c0 = TREE_INT_CST_LOW (exp); | |
5926 | c0 &= -c0; | |
5927 | return c0 ? c0 : BIGGEST_ALIGNMENT; | |
0d4903b8 RK |
5928 | } |
5929 | break; | |
5930 | ||
65a07688 | 5931 | case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: |
0d4903b8 RK |
5932 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); |
5933 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5934 | return MIN (c0, c1); | |
5935 | ||
5936 | case MULT_EXPR: | |
5937 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
5938 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5939 | return c0 * c1; | |
5940 | ||
5941 | case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: | |
5942 | case CEIL_DIV_EXPR: | |
65a07688 RK |
5943 | if (integer_pow2p (TREE_OPERAND (exp, 1)) |
5944 | && host_integerp (TREE_OPERAND (exp, 1), 1)) | |
5945 | { | |
5946 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
5947 | c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); | |
5948 | return MAX (1, c0 / c1); | |
5949 | } | |
5950 | break; | |
0d4903b8 RK |
5951 | |
5952 | case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: | |
6fce44af | 5953 | case SAVE_EXPR: |
0d4903b8 RK |
5954 | return highest_pow2_factor (TREE_OPERAND (exp, 0)); |
5955 | ||
65a07688 RK |
5956 | case COMPOUND_EXPR: |
5957 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5958 | ||
0d4903b8 RK |
5959 | case COND_EXPR: |
5960 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5961 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); | |
5962 | return MIN (c0, c1); | |
5963 | ||
5964 | default: | |
5965 | break; | |
5966 | } | |
5967 | ||
5968 | return 1; | |
5969 | } | |
818c0c94 | 5970 | |
d50a16c4 EB |
5971 | /* Similar, except that the alignment requirements of TARGET are |
5972 | taken into account. Assume it is at least as aligned as its | |
5973 | type, unless it is a COMPONENT_REF in which case the layout of | |
5974 | the structure gives the alignment. */ | |
818c0c94 | 5975 | |
9ceca302 | 5976 | static unsigned HOST_WIDE_INT |
d50a16c4 | 5977 | highest_pow2_factor_for_target (tree target, tree exp) |
818c0c94 | 5978 | { |
d50a16c4 | 5979 | unsigned HOST_WIDE_INT target_align, factor; |
818c0c94 RH |
5980 | |
5981 | factor = highest_pow2_factor (exp); | |
d50a16c4 | 5982 | if (TREE_CODE (target) == COMPONENT_REF) |
a4e9ffe5 | 5983 | target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1)); |
d50a16c4 | 5984 | else |
a4e9ffe5 | 5985 | target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target)); |
d50a16c4 | 5986 | return MAX (factor, target_align); |
818c0c94 | 5987 | } |
0d4903b8 | 5988 | \f |
6de9cd9a DN |
5989 | /* Expands variable VAR. */ |
5990 | ||
5991 | void | |
5992 | expand_var (tree var) | |
5993 | { | |
5994 | if (DECL_EXTERNAL (var)) | |
5995 | return; | |
5996 | ||
5997 | if (TREE_STATIC (var)) | |
5998 | /* If this is an inlined copy of a static local variable, | |
5999 | look up the original decl. */ | |
6000 | var = DECL_ORIGIN (var); | |
6001 | ||
6002 | if (TREE_STATIC (var) | |
6003 | ? !TREE_ASM_WRITTEN (var) | |
6004 | : !DECL_RTL_SET_P (var)) | |
6005 | { | |
1a186ec5 RH |
6006 | if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var)) |
6007 | /* Should be ignored. */; | |
673fda6b | 6008 | else if (lang_hooks.expand_decl (var)) |
6de9cd9a DN |
6009 | /* OK. */; |
6010 | else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var)) | |
6011 | expand_decl (var); | |
6012 | else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) | |
0e6df31e | 6013 | rest_of_decl_compilation (var, 0, 0); |
6de9cd9a | 6014 | else |
5b0264cb NS |
6015 | /* No expansion needed. */ |
6016 | gcc_assert (TREE_CODE (var) == TYPE_DECL | |
6017 | || TREE_CODE (var) == CONST_DECL | |
6018 | || TREE_CODE (var) == FUNCTION_DECL | |
6019 | || TREE_CODE (var) == LABEL_DECL); | |
6de9cd9a DN |
6020 | } |
6021 | } | |
6022 | ||
eb698c58 RS |
6023 | /* Subroutine of expand_expr. Expand the two operands of a binary |
6024 | expression EXP0 and EXP1 placing the results in OP0 and OP1. | |
6025 | The value may be stored in TARGET if TARGET is nonzero. The | |
6026 | MODIFIER argument is as documented by expand_expr. */ | |
6027 | ||
6028 | static void | |
6029 | expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, | |
6030 | enum expand_modifier modifier) | |
6031 | { | |
6032 | if (! safe_from_p (target, exp1, 1)) | |
6033 | target = 0; | |
6034 | if (operand_equal_p (exp0, exp1, 0)) | |
6035 | { | |
6036 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); | |
6037 | *op1 = copy_rtx (*op0); | |
6038 | } | |
6039 | else | |
6040 | { | |
c67e6e14 RS |
6041 | /* If we need to preserve evaluation order, copy exp0 into its own |
6042 | temporary variable so that it can't be clobbered by exp1. */ | |
6043 | if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1)) | |
6044 | exp0 = save_expr (exp0); | |
eb698c58 RS |
6045 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); |
6046 | *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier); | |
6047 | } | |
6048 | } | |
6049 | ||
f47e9b4e | 6050 | \f |
70bb498a | 6051 | /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP. |
6377bb9a RH |
6052 | The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ |
6053 | ||
6054 | static rtx | |
70bb498a RH |
6055 | expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
6056 | enum expand_modifier modifier) | |
6377bb9a RH |
6057 | { |
6058 | rtx result, subtarget; | |
6059 | tree inner, offset; | |
6060 | HOST_WIDE_INT bitsize, bitpos; | |
6061 | int volatilep, unsignedp; | |
6062 | enum machine_mode mode1; | |
6063 | ||
6064 | /* If we are taking the address of a constant and are at the top level, | |
6065 | we have to use output_constant_def since we can't call force_const_mem | |
6066 | at top level. */ | |
6067 | /* ??? This should be considered a front-end bug. We should not be | |
6068 | generating ADDR_EXPR of something that isn't an LVALUE. The only | |
6069 | exception here is STRING_CST. */ | |
6070 | if (TREE_CODE (exp) == CONSTRUCTOR | |
6071 | || TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
6072 | return XEXP (output_constant_def (exp, 0), 0); | |
6073 | ||
6074 | /* Everything must be something allowed by is_gimple_addressable. */ | |
6075 | switch (TREE_CODE (exp)) | |
6076 | { | |
6077 | case INDIRECT_REF: | |
6078 | /* This case will happen via recursion for &a->b. */ | |
6079 | return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL); | |
6080 | ||
6081 | case CONST_DECL: | |
6082 | /* Recurse and make the output_constant_def clause above handle this. */ | |
70bb498a | 6083 | return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target, |
6377bb9a RH |
6084 | tmode, modifier); |
6085 | ||
6086 | case REALPART_EXPR: | |
6087 | /* The real part of the complex number is always first, therefore | |
6088 | the address is the same as the address of the parent object. */ | |
6089 | offset = 0; | |
6090 | bitpos = 0; | |
6091 | inner = TREE_OPERAND (exp, 0); | |
6092 | break; | |
6093 | ||
6094 | case IMAGPART_EXPR: | |
6095 | /* The imaginary part of the complex number is always second. | |
2a7e31df | 6096 | The expression is therefore always offset by the size of the |
6377bb9a RH |
6097 | scalar type. */ |
6098 | offset = 0; | |
6099 | bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))); | |
6100 | inner = TREE_OPERAND (exp, 0); | |
6101 | break; | |
6102 | ||
6103 | default: | |
6104 | /* If the object is a DECL, then expand it for its rtl. Don't bypass | |
6105 | expand_expr, as that can have various side effects; LABEL_DECLs for | |
6106 | example, may not have their DECL_RTL set yet. Assume language | |
6107 | specific tree nodes can be expanded in some interesting way. */ | |
6108 | if (DECL_P (exp) | |
6109 | || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE) | |
6110 | { | |
6111 | result = expand_expr (exp, target, tmode, | |
6112 | modifier == EXPAND_INITIALIZER | |
6113 | ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS); | |
6114 | ||
6115 | /* If the DECL isn't in memory, then the DECL wasn't properly | |
6116 | marked TREE_ADDRESSABLE, which will be either a front-end | |
6117 | or a tree optimizer bug. */ | |
5b0264cb | 6118 | gcc_assert (GET_CODE (result) == MEM); |
6377bb9a RH |
6119 | result = XEXP (result, 0); |
6120 | ||
6121 | /* ??? Is this needed anymore? */ | |
6122 | if (!TREE_USED (exp) == 0) | |
6123 | { | |
6124 | assemble_external (exp); | |
6125 | TREE_USED (exp) = 1; | |
6126 | } | |
6127 | ||
6128 | if (modifier != EXPAND_INITIALIZER | |
6129 | && modifier != EXPAND_CONST_ADDRESS) | |
6130 | result = force_operand (result, target); | |
6131 | return result; | |
6132 | } | |
6133 | ||
6134 | inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
6135 | &mode1, &unsignedp, &volatilep); | |
6136 | break; | |
6137 | } | |
6138 | ||
6139 | /* We must have made progress. */ | |
5b0264cb | 6140 | gcc_assert (inner != exp); |
6377bb9a RH |
6141 | |
6142 | subtarget = offset || bitpos ? NULL_RTX : target; | |
70bb498a | 6143 | result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier); |
6377bb9a RH |
6144 | |
6145 | if (tmode == VOIDmode) | |
6146 | { | |
6147 | tmode = GET_MODE (result); | |
6148 | if (tmode == VOIDmode) | |
6149 | tmode = Pmode; | |
6150 | } | |
6151 | ||
6152 | if (offset) | |
6153 | { | |
6154 | rtx tmp; | |
6155 | ||
6156 | if (modifier != EXPAND_NORMAL) | |
6157 | result = force_operand (result, NULL); | |
6158 | tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL); | |
6159 | ||
6160 | if (modifier == EXPAND_SUM) | |
6161 | result = gen_rtx_PLUS (tmode, result, tmp); | |
6162 | else | |
6163 | { | |
6164 | subtarget = bitpos ? NULL_RTX : target; | |
6165 | result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, | |
6166 | 1, OPTAB_LIB_WIDEN); | |
6167 | } | |
6168 | } | |
6169 | ||
6170 | if (bitpos) | |
6171 | { | |
6172 | /* Someone beforehand should have rejected taking the address | |
6173 | of such an object. */ | |
5b0264cb | 6174 | gcc_assert (!(bitpos % BITS_PER_UNIT)); |
6377bb9a RH |
6175 | |
6176 | result = plus_constant (result, bitpos / BITS_PER_UNIT); | |
6177 | if (modifier < EXPAND_SUM) | |
6178 | result = force_operand (result, target); | |
6179 | } | |
6180 | ||
6181 | return result; | |
6182 | } | |
6183 | ||
70bb498a RH |
6184 | /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR. |
6185 | The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ | |
6186 | ||
6187 | static rtx | |
6188 | expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, | |
6189 | enum expand_modifier modifier) | |
6190 | { | |
6191 | enum machine_mode rmode; | |
6192 | rtx result; | |
6193 | ||
6194 | result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target, | |
6195 | tmode, modifier); | |
6196 | ||
6197 | /* Despite expand_expr claims concerning ignoring TMODE when not | |
6198 | strictly convenient, stuff breaks if we don't honor it. */ | |
6199 | if (tmode == VOIDmode) | |
6200 | tmode = TYPE_MODE (TREE_TYPE (exp)); | |
6201 | rmode = GET_MODE (result); | |
6202 | if (rmode == VOIDmode) | |
6203 | rmode = tmode; | |
6204 | if (rmode != tmode) | |
6205 | result = convert_memory_address (tmode, result); | |
6206 | ||
6207 | return result; | |
6208 | } | |
6209 | ||
6210 | ||
bbf6f052 RK |
6211 | /* expand_expr: generate code for computing expression EXP. |
6212 | An rtx for the computed value is returned. The value is never null. | |
6213 | In the case of a void EXP, const0_rtx is returned. | |
6214 | ||
6215 | The value may be stored in TARGET if TARGET is nonzero. | |
6216 | TARGET is just a suggestion; callers must assume that | |
6217 | the rtx returned may not be the same as TARGET. | |
6218 | ||
6219 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
6220 | ||
6221 | If TMODE is not VOIDmode, it suggests generating the | |
6222 | result in mode TMODE. But this is done only when convenient. | |
6223 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
6224 | TMODE is just a suggestion; callers must assume that | |
6225 | the rtx returned may not have mode TMODE. | |
6226 | ||
d6a5ac33 RK |
6227 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
6228 | probably will not be used. | |
bbf6f052 RK |
6229 | |
6230 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
6231 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
6232 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
6233 | products as above, or REG or MEM, or constant. | |
6234 | Ordinarily in such cases we would output mul or add instructions | |
6235 | and then return a pseudo reg containing the sum. | |
6236 | ||
6237 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
6238 | it also marks a label as absolutely required (it can't be dead). | |
26fcb35a | 6239 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d6a5ac33 RK |
6240 | This is used for outputting expressions used in initializers. |
6241 | ||
6242 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
6243 | with a constant address even if that address is not normally legitimate. | |
8403445a AM |
6244 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. |
6245 | ||
6246 | EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for | |
6247 | a call parameter. Such targets require special care as we haven't yet | |
6248 | marked TARGET so that it's safe from being trashed by libcalls. We | |
6249 | don't want to use TARGET for anything but the final result; | |
6250 | Intermediate values must go elsewhere. Additionally, calls to | |
caf93cb0 | 6251 | emit_block_move will be flagged with BLOCK_OP_CALL_PARM. |
0fab64a3 MM |
6252 | |
6253 | If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid | |
6254 | address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the | |
6255 | DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a | |
6256 | COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on | |
6257 | recursively. */ | |
bbf6f052 | 6258 | |
6de9cd9a DN |
6259 | static rtx expand_expr_real_1 (tree, rtx, enum machine_mode, |
6260 | enum expand_modifier, rtx *); | |
6261 | ||
bbf6f052 | 6262 | rtx |
0fab64a3 MM |
6263 | expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
6264 | enum expand_modifier modifier, rtx *alt_rtl) | |
6de9cd9a DN |
6265 | { |
6266 | int rn = -1; | |
6267 | rtx ret, last = NULL; | |
6268 | ||
6269 | /* Handle ERROR_MARK before anybody tries to access its type. */ | |
6270 | if (TREE_CODE (exp) == ERROR_MARK | |
6271 | || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK) | |
6272 | { | |
6273 | ret = CONST0_RTX (tmode); | |
6274 | return ret ? ret : const0_rtx; | |
6275 | } | |
6276 | ||
6277 | if (flag_non_call_exceptions) | |
6278 | { | |
6279 | rn = lookup_stmt_eh_region (exp); | |
6280 | /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ | |
6281 | if (rn >= 0) | |
6282 | last = get_last_insn (); | |
6283 | } | |
6284 | ||
6285 | /* If this is an expression of some kind and it has an associated line | |
caf93cb0 | 6286 | number, then emit the line number before expanding the expression. |
6de9cd9a DN |
6287 | |
6288 | We need to save and restore the file and line information so that | |
6289 | errors discovered during expansion are emitted with the right | |
caf93cb0 | 6290 | information. It would be better of the diagnostic routines |
6de9cd9a DN |
6291 | used the file/line information embedded in the tree nodes rather |
6292 | than globals. */ | |
6293 | if (cfun && EXPR_HAS_LOCATION (exp)) | |
6294 | { | |
6295 | location_t saved_location = input_location; | |
6296 | input_location = EXPR_LOCATION (exp); | |
6297 | emit_line_note (input_location); | |
caf93cb0 | 6298 | |
6de9cd9a | 6299 | /* Record where the insns produced belong. */ |
1ea463a2 | 6300 | record_block_change (TREE_BLOCK (exp)); |
6de9cd9a DN |
6301 | |
6302 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
6303 | ||
6304 | input_location = saved_location; | |
6305 | } | |
6306 | else | |
6307 | { | |
6308 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
6309 | } | |
6310 | ||
6311 | /* If using non-call exceptions, mark all insns that may trap. | |
6312 | expand_call() will mark CALL_INSNs before we get to this code, | |
6313 | but it doesn't handle libcalls, and these may trap. */ | |
6314 | if (rn >= 0) | |
caf93cb0 | 6315 | { |
6de9cd9a | 6316 | rtx insn; |
caf93cb0 | 6317 | for (insn = next_real_insn (last); insn; |
6de9cd9a DN |
6318 | insn = next_real_insn (insn)) |
6319 | { | |
6320 | if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
6321 | /* If we want exceptions for non-call insns, any | |
6322 | may_trap_p instruction may throw. */ | |
6323 | && GET_CODE (PATTERN (insn)) != CLOBBER | |
6324 | && GET_CODE (PATTERN (insn)) != USE | |
4b4bf941 | 6325 | && (CALL_P (insn) || may_trap_p (PATTERN (insn)))) |
6de9cd9a DN |
6326 | { |
6327 | REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn), | |
6328 | REG_NOTES (insn)); | |
6329 | } | |
6330 | } | |
6331 | } | |
6332 | ||
6333 | return ret; | |
6334 | } | |
6335 | ||
6336 | static rtx | |
6337 | expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, | |
6338 | enum expand_modifier modifier, rtx *alt_rtl) | |
bbf6f052 | 6339 | { |
b3694847 | 6340 | rtx op0, op1, temp; |
bbf6f052 | 6341 | tree type = TREE_TYPE (exp); |
8df83eae | 6342 | int unsignedp; |
b3694847 SS |
6343 | enum machine_mode mode; |
6344 | enum tree_code code = TREE_CODE (exp); | |
bbf6f052 | 6345 | optab this_optab; |
68557e14 ML |
6346 | rtx subtarget, original_target; |
6347 | int ignore; | |
bbf6f052 | 6348 | tree context; |
bc15d0ef JM |
6349 | bool reduce_bit_field = false; |
6350 | #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \ | |
6351 | ? reduce_to_bit_field_precision ((expr), \ | |
6352 | target, \ | |
6353 | type) \ | |
6354 | : (expr)) | |
bbf6f052 | 6355 | |
68557e14 | 6356 | mode = TYPE_MODE (type); |
8df83eae | 6357 | unsignedp = TYPE_UNSIGNED (type); |
bc15d0ef JM |
6358 | if (lang_hooks.reduce_bit_field_operations |
6359 | && TREE_CODE (type) == INTEGER_TYPE | |
6360 | && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)) | |
6361 | { | |
6362 | /* An operation in what may be a bit-field type needs the | |
6363 | result to be reduced to the precision of the bit-field type, | |
6364 | which is narrower than that of the type's mode. */ | |
6365 | reduce_bit_field = true; | |
6366 | if (modifier == EXPAND_STACK_PARM) | |
6367 | target = 0; | |
6368 | } | |
8df83eae | 6369 | |
68557e14 | 6370 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
296b4ed9 | 6371 | subtarget = get_subtarget (target); |
68557e14 ML |
6372 | original_target = target; |
6373 | ignore = (target == const0_rtx | |
6374 | || ((code == NON_LVALUE_EXPR || code == NOP_EXPR | |
3a18db48 AP |
6375 | || code == CONVERT_EXPR || code == COND_EXPR |
6376 | || code == VIEW_CONVERT_EXPR) | |
68557e14 ML |
6377 | && TREE_CODE (type) == VOID_TYPE)); |
6378 | ||
dd27116b RK |
6379 | /* If we are going to ignore this result, we need only do something |
6380 | if there is a side-effect somewhere in the expression. If there | |
b50d17a1 RK |
6381 | is, short-circuit the most common cases here. Note that we must |
6382 | not call expand_expr with anything but const0_rtx in case this | |
6383 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
bbf6f052 | 6384 | |
dd27116b RK |
6385 | if (ignore) |
6386 | { | |
6387 | if (! TREE_SIDE_EFFECTS (exp)) | |
6388 | return const0_rtx; | |
6389 | ||
14a774a9 RK |
6390 | /* Ensure we reference a volatile object even if value is ignored, but |
6391 | don't do this if all we are doing is taking its address. */ | |
dd27116b RK |
6392 | if (TREE_THIS_VOLATILE (exp) |
6393 | && TREE_CODE (exp) != FUNCTION_DECL | |
14a774a9 RK |
6394 | && mode != VOIDmode && mode != BLKmode |
6395 | && modifier != EXPAND_CONST_ADDRESS) | |
dd27116b | 6396 | { |
37a08a29 | 6397 | temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); |
3c0cb5de | 6398 | if (MEM_P (temp)) |
dd27116b RK |
6399 | temp = copy_to_reg (temp); |
6400 | return const0_rtx; | |
6401 | } | |
6402 | ||
14a774a9 | 6403 | if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF |
3a18db48 | 6404 | || code == INDIRECT_REF) |
37a08a29 RK |
6405 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6406 | modifier); | |
6407 | ||
14a774a9 | 6408 | else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' |
b4e3fabb | 6409 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) |
dd27116b | 6410 | { |
37a08a29 RK |
6411 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6412 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
dd27116b RK |
6413 | return const0_rtx; |
6414 | } | |
6415 | else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) | |
6416 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
6417 | /* If the second operand has no side effects, just evaluate | |
0f41302f | 6418 | the first. */ |
37a08a29 RK |
6419 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6420 | modifier); | |
14a774a9 RK |
6421 | else if (code == BIT_FIELD_REF) |
6422 | { | |
37a08a29 RK |
6423 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6424 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
6425 | expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); | |
14a774a9 RK |
6426 | return const0_rtx; |
6427 | } | |
37a08a29 | 6428 | |
90764a87 | 6429 | target = 0; |
dd27116b | 6430 | } |
bbf6f052 | 6431 | |
e44842fe RK |
6432 | /* If will do cse, generate all results into pseudo registers |
6433 | since 1) that allows cse to find more things | |
6434 | and 2) otherwise cse could produce an insn the machine | |
4977bab6 ZW |
6435 | cannot support. An exception is a CONSTRUCTOR into a multi-word |
6436 | MEM: that's much more likely to be most efficient into the MEM. | |
6437 | Another is a CALL_EXPR which must return in memory. */ | |
e44842fe | 6438 | |
bbf6f052 | 6439 | if (! cse_not_expected && mode != BLKmode && target |
f8cfc6aa | 6440 | && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER) |
4977bab6 | 6441 | && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
61f71b34 | 6442 | && ! (code == CALL_EXPR && aggregate_value_p (exp, exp))) |
8403445a | 6443 | target = 0; |
bbf6f052 | 6444 | |
bbf6f052 RK |
6445 | switch (code) |
6446 | { | |
6447 | case LABEL_DECL: | |
b552441b RS |
6448 | { |
6449 | tree function = decl_function_context (exp); | |
c5c76735 | 6450 | |
6de9cd9a DN |
6451 | temp = label_rtx (exp); |
6452 | temp = gen_rtx_LABEL_REF (Pmode, temp); | |
6453 | ||
d0977240 | 6454 | if (function != current_function_decl |
6de9cd9a DN |
6455 | && function != 0) |
6456 | LABEL_REF_NONLOCAL_P (temp) = 1; | |
6457 | ||
6458 | temp = gen_rtx_MEM (FUNCTION_MODE, temp); | |
26fcb35a | 6459 | return temp; |
b552441b | 6460 | } |
bbf6f052 | 6461 | |
8b11a64c ZD |
6462 | case SSA_NAME: |
6463 | return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, | |
6464 | NULL); | |
6465 | ||
bbf6f052 | 6466 | case PARM_DECL: |
bbf6f052 | 6467 | case VAR_DECL: |
2dca20cd RS |
6468 | /* If a static var's type was incomplete when the decl was written, |
6469 | but the type is complete now, lay out the decl now. */ | |
ca06cfe6 RH |
6470 | if (DECL_SIZE (exp) == 0 |
6471 | && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp)) | |
2dca20cd | 6472 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) |
a46666a9 | 6473 | layout_decl (exp, 0); |
921b3427 | 6474 | |
0f41302f | 6475 | /* ... fall through ... */ |
d6a5ac33 | 6476 | |
2dca20cd | 6477 | case FUNCTION_DECL: |
bbf6f052 | 6478 | case RESULT_DECL: |
5b0264cb | 6479 | gcc_assert (DECL_RTL (exp)); |
d6a5ac33 | 6480 | |
e44842fe RK |
6481 | /* Ensure variable marked as used even if it doesn't go through |
6482 | a parser. If it hasn't be used yet, write out an external | |
6483 | definition. */ | |
6484 | if (! TREE_USED (exp)) | |
6485 | { | |
6486 | assemble_external (exp); | |
6487 | TREE_USED (exp) = 1; | |
6488 | } | |
6489 | ||
dc6d66b3 RK |
6490 | /* Show we haven't gotten RTL for this yet. */ |
6491 | temp = 0; | |
6492 | ||
ab8907ef RH |
6493 | /* Variables inherited from containing functions should have |
6494 | been lowered by this point. */ | |
bbf6f052 | 6495 | context = decl_function_context (exp); |
5b0264cb NS |
6496 | gcc_assert (!context |
6497 | || context == current_function_decl | |
6498 | || TREE_STATIC (exp) | |
6499 | /* ??? C++ creates functions that are not TREE_STATIC. */ | |
6500 | || TREE_CODE (exp) == FUNCTION_DECL); | |
4af3895e | 6501 | |
bbf6f052 RK |
6502 | /* This is the case of an array whose size is to be determined |
6503 | from its initializer, while the initializer is still being parsed. | |
6504 | See expand_decl. */ | |
d6a5ac33 | 6505 | |
5b0264cb | 6506 | if (MEM_P (DECL_RTL (exp)) |
f8cfc6aa | 6507 | && REG_P (XEXP (DECL_RTL (exp), 0))) |
792760b9 | 6508 | temp = validize_mem (DECL_RTL (exp)); |
d6a5ac33 RK |
6509 | |
6510 | /* If DECL_RTL is memory, we are in the normal case and either | |
6511 | the address is not valid or it is not a register and -fforce-addr | |
6512 | is specified, get the address into a register. */ | |
6513 | ||
3c0cb5de | 6514 | else if (MEM_P (DECL_RTL (exp)) |
dc6d66b3 RK |
6515 | && modifier != EXPAND_CONST_ADDRESS |
6516 | && modifier != EXPAND_SUM | |
6517 | && modifier != EXPAND_INITIALIZER | |
6518 | && (! memory_address_p (DECL_MODE (exp), | |
6519 | XEXP (DECL_RTL (exp), 0)) | |
6520 | || (flag_force_addr | |
f8cfc6aa | 6521 | && !REG_P (XEXP (DECL_RTL (exp), 0))))) |
0fab64a3 MM |
6522 | { |
6523 | if (alt_rtl) | |
6524 | *alt_rtl = DECL_RTL (exp); | |
6525 | temp = replace_equiv_address (DECL_RTL (exp), | |
6526 | copy_rtx (XEXP (DECL_RTL (exp), 0))); | |
6527 | } | |
1499e0a8 | 6528 | |
dc6d66b3 | 6529 | /* If we got something, return it. But first, set the alignment |
04956a1a | 6530 | if the address is a register. */ |
dc6d66b3 RK |
6531 | if (temp != 0) |
6532 | { | |
3c0cb5de | 6533 | if (MEM_P (temp) && REG_P (XEXP (temp, 0))) |
bdb429a5 | 6534 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); |
dc6d66b3 RK |
6535 | |
6536 | return temp; | |
6537 | } | |
6538 | ||
1499e0a8 RK |
6539 | /* If the mode of DECL_RTL does not match that of the decl, it |
6540 | must be a promoted value. We return a SUBREG of the wanted mode, | |
6541 | but mark it so that we know that it was already extended. */ | |
6542 | ||
f8cfc6aa | 6543 | if (REG_P (DECL_RTL (exp)) |
7254c5fa | 6544 | && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) |
1499e0a8 | 6545 | { |
5b0264cb NS |
6546 | enum machine_mode pmode; |
6547 | ||
1499e0a8 RK |
6548 | /* Get the signedness used for this variable. Ensure we get the |
6549 | same mode we got when the variable was declared. */ | |
5b0264cb NS |
6550 | pmode = promote_mode (type, DECL_MODE (exp), &unsignedp, |
6551 | (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)); | |
6552 | gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode); | |
1499e0a8 | 6553 | |
ddef6bc7 | 6554 | temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); |
1499e0a8 | 6555 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
7879b81e | 6556 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
1499e0a8 RK |
6557 | return temp; |
6558 | } | |
6559 | ||
bbf6f052 RK |
6560 | return DECL_RTL (exp); |
6561 | ||
6562 | case INTEGER_CST: | |
d8a50944 | 6563 | temp = immed_double_const (TREE_INT_CST_LOW (exp), |
05bccae2 | 6564 | TREE_INT_CST_HIGH (exp), mode); |
bbf6f052 | 6565 | |
d8a50944 RH |
6566 | /* ??? If overflow is set, fold will have done an incomplete job, |
6567 | which can result in (plus xx (const_int 0)), which can get | |
6568 | simplified by validate_replace_rtx during virtual register | |
6569 | instantiation, which can result in unrecognizable insns. | |
6570 | Avoid this by forcing all overflows into registers. */ | |
c2e9dc85 RH |
6571 | if (TREE_CONSTANT_OVERFLOW (exp) |
6572 | && modifier != EXPAND_INITIALIZER) | |
d8a50944 RH |
6573 | temp = force_reg (mode, temp); |
6574 | ||
6575 | return temp; | |
6576 | ||
d744e06e | 6577 | case VECTOR_CST: |
3a021db2 PB |
6578 | if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT |
6579 | || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT) | |
6580 | return const_vector_from_tree (exp); | |
caf93cb0 | 6581 | else |
3a021db2 PB |
6582 | return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp), |
6583 | TREE_VECTOR_CST_ELTS (exp)), | |
6584 | ignore ? const0_rtx : target, tmode, modifier); | |
d744e06e | 6585 | |
bbf6f052 | 6586 | case CONST_DECL: |
8403445a | 6587 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier); |
bbf6f052 RK |
6588 | |
6589 | case REAL_CST: | |
6590 | /* If optimized, generate immediate CONST_DOUBLE | |
3a94c984 KH |
6591 | which will be turned into memory by reload if necessary. |
6592 | ||
bbf6f052 RK |
6593 | We used to force a register so that loop.c could see it. But |
6594 | this does not allow gen_* patterns to perform optimizations with | |
6595 | the constants. It also produces two insns in cases like "x = 1.0;". | |
6596 | On most machines, floating-point constants are not permitted in | |
6597 | many insns, so we'd end up copying it to a register in any case. | |
6598 | ||
6599 | Now, we do the copying in expand_binop, if appropriate. */ | |
5692c7bc ZW |
6600 | return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), |
6601 | TYPE_MODE (TREE_TYPE (exp))); | |
bbf6f052 RK |
6602 | |
6603 | case COMPLEX_CST: | |
9ad58e09 RS |
6604 | /* Handle evaluating a complex constant in a CONCAT target. */ |
6605 | if (original_target && GET_CODE (original_target) == CONCAT) | |
6606 | { | |
6607 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6608 | rtx rtarg, itarg; | |
6609 | ||
6610 | rtarg = XEXP (original_target, 0); | |
6611 | itarg = XEXP (original_target, 1); | |
6612 | ||
6613 | /* Move the real and imaginary parts separately. */ | |
6614 | op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0); | |
6615 | op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0); | |
6616 | ||
6617 | if (op0 != rtarg) | |
6618 | emit_move_insn (rtarg, op0); | |
6619 | if (op1 != itarg) | |
6620 | emit_move_insn (itarg, op1); | |
6621 | ||
6622 | return original_target; | |
6623 | } | |
6624 | ||
71c0e7fc | 6625 | /* ... fall through ... */ |
9ad58e09 | 6626 | |
bbf6f052 | 6627 | case STRING_CST: |
afc6aaab | 6628 | temp = output_constant_def (exp, 1); |
bbf6f052 | 6629 | |
afc6aaab | 6630 | /* temp contains a constant address. |
bbf6f052 RK |
6631 | On RISC machines where a constant address isn't valid, |
6632 | make some insns to get that address into a register. */ | |
afc6aaab | 6633 | if (modifier != EXPAND_CONST_ADDRESS |
bbf6f052 RK |
6634 | && modifier != EXPAND_INITIALIZER |
6635 | && modifier != EXPAND_SUM | |
afc6aaab ZW |
6636 | && (! memory_address_p (mode, XEXP (temp, 0)) |
6637 | || flag_force_addr)) | |
6638 | return replace_equiv_address (temp, | |
6639 | copy_rtx (XEXP (temp, 0))); | |
6640 | return temp; | |
bbf6f052 RK |
6641 | |
6642 | case SAVE_EXPR: | |
82c82743 RH |
6643 | { |
6644 | tree val = TREE_OPERAND (exp, 0); | |
6645 | rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); | |
e5e809f4 | 6646 | |
7f5e6307 | 6647 | if (!SAVE_EXPR_RESOLVED_P (exp)) |
82c82743 RH |
6648 | { |
6649 | /* We can indeed still hit this case, typically via builtin | |
6650 | expanders calling save_expr immediately before expanding | |
6651 | something. Assume this means that we only have to deal | |
6652 | with non-BLKmode values. */ | |
5b0264cb | 6653 | gcc_assert (GET_MODE (ret) != BLKmode); |
1499e0a8 | 6654 | |
82c82743 RH |
6655 | val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); |
6656 | DECL_ARTIFICIAL (val) = 1; | |
7f5e6307 | 6657 | DECL_IGNORED_P (val) = 1; |
82c82743 | 6658 | TREE_OPERAND (exp, 0) = val; |
7f5e6307 | 6659 | SAVE_EXPR_RESOLVED_P (exp) = 1; |
1499e0a8 | 6660 | |
82c82743 RH |
6661 | if (!CONSTANT_P (ret)) |
6662 | ret = copy_to_reg (ret); | |
6663 | SET_DECL_RTL (val, ret); | |
6664 | } | |
1499e0a8 | 6665 | |
82c82743 RH |
6666 | return ret; |
6667 | } | |
bbf6f052 | 6668 | |
70e6ca43 APB |
6669 | case GOTO_EXPR: |
6670 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) | |
6671 | expand_goto (TREE_OPERAND (exp, 0)); | |
6672 | else | |
6673 | expand_computed_goto (TREE_OPERAND (exp, 0)); | |
6674 | return const0_rtx; | |
6675 | ||
bbf6f052 | 6676 | case CONSTRUCTOR: |
dd27116b RK |
6677 | /* If we don't need the result, just ensure we evaluate any |
6678 | subexpressions. */ | |
6679 | if (ignore) | |
6680 | { | |
6681 | tree elt; | |
37a08a29 | 6682 | |
dd27116b | 6683 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
37a08a29 RK |
6684 | expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); |
6685 | ||
dd27116b RK |
6686 | return const0_rtx; |
6687 | } | |
3207b172 | 6688 | |
4af3895e JVA |
6689 | /* All elts simple constants => refer to a constant in memory. But |
6690 | if this is a non-BLKmode mode, let it store a field at a time | |
6691 | since that should make a CONST_INT or CONST_DOUBLE when we | |
3207b172 | 6692 | fold. Likewise, if we have a target we can use, it is best to |
d720b9d1 RK |
6693 | store directly into the target unless the type is large enough |
6694 | that memcpy will be used. If we are making an initializer and | |
00182e1e AH |
6695 | all operands are constant, put it in memory as well. |
6696 | ||
6697 | FIXME: Avoid trying to fill vector constructors piece-meal. | |
6698 | Output them with output_constant_def below unless we're sure | |
6699 | they're zeros. This should go away when vector initializers | |
6700 | are treated like VECTOR_CST instead of arrays. | |
6701 | */ | |
dd27116b | 6702 | else if ((TREE_STATIC (exp) |
3207b172 | 6703 | && ((mode == BLKmode |
e5e809f4 | 6704 | && ! (target != 0 && safe_from_p (target, exp, 1))) |
d720b9d1 | 6705 | || TREE_ADDRESSABLE (exp) |
19caa751 | 6706 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) |
3a94c984 | 6707 | && (! MOVE_BY_PIECES_P |
19caa751 RK |
6708 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), |
6709 | TYPE_ALIGN (type))) | |
6de9cd9a | 6710 | && ! mostly_zeros_p (exp)))) |
f59700f9 RK |
6711 | || ((modifier == EXPAND_INITIALIZER |
6712 | || modifier == EXPAND_CONST_ADDRESS) | |
6713 | && TREE_CONSTANT (exp))) | |
bbf6f052 | 6714 | { |
bd7cf17e | 6715 | rtx constructor = output_constant_def (exp, 1); |
19caa751 | 6716 | |
b552441b RS |
6717 | if (modifier != EXPAND_CONST_ADDRESS |
6718 | && modifier != EXPAND_INITIALIZER | |
792760b9 RK |
6719 | && modifier != EXPAND_SUM) |
6720 | constructor = validize_mem (constructor); | |
6721 | ||
bbf6f052 RK |
6722 | return constructor; |
6723 | } | |
bbf6f052 RK |
6724 | else |
6725 | { | |
e9ac02a6 JW |
6726 | /* Handle calls that pass values in multiple non-contiguous |
6727 | locations. The Irix 6 ABI has examples of this. */ | |
e5e809f4 | 6728 | if (target == 0 || ! safe_from_p (target, exp, 1) |
8403445a AM |
6729 | || GET_CODE (target) == PARALLEL |
6730 | || modifier == EXPAND_STACK_PARM) | |
1da68f56 RK |
6731 | target |
6732 | = assign_temp (build_qualified_type (type, | |
6733 | (TYPE_QUALS (type) | |
6734 | | (TREE_READONLY (exp) | |
6735 | * TYPE_QUAL_CONST))), | |
c24ae149 | 6736 | 0, TREE_ADDRESSABLE (exp), 1); |
07604beb | 6737 | |
dbb5c281 | 6738 | store_constructor (exp, target, 0, int_expr_size (exp)); |
bbf6f052 RK |
6739 | return target; |
6740 | } | |
6741 | ||
6742 | case INDIRECT_REF: | |
6743 | { | |
6744 | tree exp1 = TREE_OPERAND (exp, 0); | |
8b11a64c | 6745 | tree orig; |
3a94c984 | 6746 | |
6de9cd9a DN |
6747 | if (modifier != EXPAND_WRITE) |
6748 | { | |
6749 | tree t; | |
6750 | ||
6751 | t = fold_read_from_constant_string (exp); | |
6752 | if (t) | |
6753 | return expand_expr (t, target, tmode, modifier); | |
6754 | } | |
bbf6f052 | 6755 | |
405f0da6 JW |
6756 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
6757 | op0 = memory_address (mode, op0); | |
38a448ca | 6758 | temp = gen_rtx_MEM (mode, op0); |
8b11a64c ZD |
6759 | |
6760 | orig = REF_ORIGINAL (exp); | |
6761 | if (!orig) | |
6762 | orig = exp; | |
6763 | set_mem_attributes (temp, orig, 0); | |
1125706f | 6764 | |
8c8a8e34 JW |
6765 | return temp; |
6766 | } | |
bbf6f052 RK |
6767 | |
6768 | case ARRAY_REF: | |
6de9cd9a | 6769 | |
bbf6f052 | 6770 | { |
742920c7 | 6771 | tree array = TREE_OPERAND (exp, 0); |
44de5aeb | 6772 | tree low_bound = array_ref_low_bound (exp); |
fed3cef0 | 6773 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
08293add | 6774 | HOST_WIDE_INT i; |
b50d17a1 | 6775 | |
5b0264cb NS |
6776 | gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE); |
6777 | ||
d4c89139 PB |
6778 | /* Optimize the special-case of a zero lower bound. |
6779 | ||
6780 | We convert the low_bound to sizetype to avoid some problems | |
6781 | with constant folding. (E.g. suppose the lower bound is 1, | |
6782 | and its mode is QI. Without the conversion, (ARRAY | |
6783 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
fed3cef0 | 6784 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
d4c89139 | 6785 | |
742920c7 | 6786 | if (! integer_zerop (low_bound)) |
fed3cef0 | 6787 | index = size_diffop (index, convert (sizetype, low_bound)); |
742920c7 | 6788 | |
742920c7 | 6789 | /* Fold an expression like: "foo"[2]. |
ad2e7dd0 RK |
6790 | This is not done in fold so it won't happen inside &. |
6791 | Don't fold if this is for wide characters since it's too | |
6792 | difficult to do correctly and this is a very rare case. */ | |
742920c7 | 6793 | |
017e1b43 RH |
6794 | if (modifier != EXPAND_CONST_ADDRESS |
6795 | && modifier != EXPAND_INITIALIZER | |
6de9cd9a DN |
6796 | && modifier != EXPAND_MEMORY) |
6797 | { | |
6798 | tree t = fold_read_from_constant_string (exp); | |
6799 | ||
6800 | if (t) | |
6801 | return expand_expr (t, target, tmode, modifier); | |
6802 | } | |
bbf6f052 | 6803 | |
742920c7 RK |
6804 | /* If this is a constant index into a constant array, |
6805 | just get the value from the array. Handle both the cases when | |
6806 | we have an explicit constructor and when our operand is a variable | |
6807 | that was declared const. */ | |
4af3895e | 6808 | |
017e1b43 RH |
6809 | if (modifier != EXPAND_CONST_ADDRESS |
6810 | && modifier != EXPAND_INITIALIZER | |
6811 | && modifier != EXPAND_MEMORY | |
6812 | && TREE_CODE (array) == CONSTRUCTOR | |
6813 | && ! TREE_SIDE_EFFECTS (array) | |
05bccae2 | 6814 | && TREE_CODE (index) == INTEGER_CST |
3a94c984 | 6815 | && 0 > compare_tree_int (index, |
05bccae2 RK |
6816 | list_length (CONSTRUCTOR_ELTS |
6817 | (TREE_OPERAND (exp, 0))))) | |
742920c7 | 6818 | { |
05bccae2 RK |
6819 | tree elem; |
6820 | ||
6821 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
6822 | i = TREE_INT_CST_LOW (index); | |
6823 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) | |
6824 | ; | |
6825 | ||
6826 | if (elem) | |
37a08a29 RK |
6827 | return expand_expr (fold (TREE_VALUE (elem)), target, tmode, |
6828 | modifier); | |
742920c7 | 6829 | } |
3a94c984 | 6830 | |
742920c7 | 6831 | else if (optimize >= 1 |
cb5fa0f8 RK |
6832 | && modifier != EXPAND_CONST_ADDRESS |
6833 | && modifier != EXPAND_INITIALIZER | |
017e1b43 | 6834 | && modifier != EXPAND_MEMORY |
742920c7 RK |
6835 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) |
6836 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
beb0c2e0 RH |
6837 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK |
6838 | && targetm.binds_local_p (array)) | |
742920c7 | 6839 | { |
08293add | 6840 | if (TREE_CODE (index) == INTEGER_CST) |
742920c7 RK |
6841 | { |
6842 | tree init = DECL_INITIAL (array); | |
6843 | ||
742920c7 RK |
6844 | if (TREE_CODE (init) == CONSTRUCTOR) |
6845 | { | |
665f2503 | 6846 | tree elem; |
742920c7 | 6847 | |
05bccae2 | 6848 | for (elem = CONSTRUCTOR_ELTS (init); |
5cb1bea4 JM |
6849 | (elem |
6850 | && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); | |
05bccae2 RK |
6851 | elem = TREE_CHAIN (elem)) |
6852 | ; | |
6853 | ||
c54b0a5e | 6854 | if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) |
742920c7 | 6855 | return expand_expr (fold (TREE_VALUE (elem)), target, |
37a08a29 | 6856 | tmode, modifier); |
742920c7 RK |
6857 | } |
6858 | else if (TREE_CODE (init) == STRING_CST | |
05bccae2 RK |
6859 | && 0 > compare_tree_int (index, |
6860 | TREE_STRING_LENGTH (init))) | |
5c80f6e6 JJ |
6861 | { |
6862 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
6863 | enum machine_mode mode = TYPE_MODE (type); | |
6864 | ||
6865 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6866 | && GET_MODE_SIZE (mode) == 1) | |
21ef78aa DE |
6867 | return gen_int_mode (TREE_STRING_POINTER (init) |
6868 | [TREE_INT_CST_LOW (index)], mode); | |
5c80f6e6 | 6869 | } |
742920c7 RK |
6870 | } |
6871 | } | |
6872 | } | |
afc6aaab | 6873 | goto normal_inner_ref; |
bbf6f052 RK |
6874 | |
6875 | case COMPONENT_REF: | |
4af3895e | 6876 | /* If the operand is a CONSTRUCTOR, we can just extract the |
afc6aaab ZW |
6877 | appropriate field if it is present. */ |
6878 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) | |
4af3895e JVA |
6879 | { |
6880 | tree elt; | |
6881 | ||
6882 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
6883 | elt = TREE_CHAIN (elt)) | |
86b5812c RK |
6884 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) |
6885 | /* We can normally use the value of the field in the | |
6886 | CONSTRUCTOR. However, if this is a bitfield in | |
6887 | an integral mode that we can fit in a HOST_WIDE_INT, | |
6888 | we must mask only the number of bits in the bitfield, | |
6889 | since this is done implicitly by the constructor. If | |
6890 | the bitfield does not meet either of those conditions, | |
6891 | we can't do this optimization. */ | |
6892 | && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) | |
6893 | || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) | |
6894 | == MODE_INT) | |
6895 | && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) | |
6896 | <= HOST_BITS_PER_WIDE_INT)))) | |
6897 | { | |
8403445a AM |
6898 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt)) |
6899 | && modifier == EXPAND_STACK_PARM) | |
6900 | target = 0; | |
3a94c984 | 6901 | op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); |
86b5812c RK |
6902 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) |
6903 | { | |
9df2c88c RK |
6904 | HOST_WIDE_INT bitsize |
6905 | = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); | |
22273300 JJ |
6906 | enum machine_mode imode |
6907 | = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); | |
86b5812c | 6908 | |
8df83eae | 6909 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) |
86b5812c RK |
6910 | { |
6911 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
22273300 | 6912 | op0 = expand_and (imode, op0, op1, target); |
86b5812c RK |
6913 | } |
6914 | else | |
6915 | { | |
6916 | tree count | |
4a90aeeb | 6917 | = build_int_cst (NULL_TREE, |
7d60be94 | 6918 | GET_MODE_BITSIZE (imode) - bitsize); |
86b5812c RK |
6919 | |
6920 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
6921 | target, 0); | |
6922 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
6923 | target, 0); | |
6924 | } | |
6925 | } | |
6926 | ||
6927 | return op0; | |
6928 | } | |
4af3895e | 6929 | } |
afc6aaab | 6930 | goto normal_inner_ref; |
4af3895e | 6931 | |
afc6aaab ZW |
6932 | case BIT_FIELD_REF: |
6933 | case ARRAY_RANGE_REF: | |
6934 | normal_inner_ref: | |
bbf6f052 RK |
6935 | { |
6936 | enum machine_mode mode1; | |
770ae6cc | 6937 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 6938 | tree offset; |
bbf6f052 | 6939 | int volatilep = 0; |
839c4796 | 6940 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
a06ef755 | 6941 | &mode1, &unsignedp, &volatilep); |
f47e9b4e | 6942 | rtx orig_op0; |
bbf6f052 | 6943 | |
e7f3c83f RK |
6944 | /* If we got back the original object, something is wrong. Perhaps |
6945 | we are evaluating an expression too early. In any event, don't | |
6946 | infinitely recurse. */ | |
5b0264cb | 6947 | gcc_assert (tem != exp); |
e7f3c83f | 6948 | |
3d27140a | 6949 | /* If TEM's type is a union of variable size, pass TARGET to the inner |
b74f5ff2 RK |
6950 | computation, since it will need a temporary and TARGET is known |
6951 | to have to do. This occurs in unchecked conversion in Ada. */ | |
3a94c984 | 6952 | |
f47e9b4e RK |
6953 | orig_op0 = op0 |
6954 | = expand_expr (tem, | |
6955 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
6956 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
6957 | != INTEGER_CST) | |
8403445a | 6958 | && modifier != EXPAND_STACK_PARM |
f47e9b4e RK |
6959 | ? target : NULL_RTX), |
6960 | VOIDmode, | |
6961 | (modifier == EXPAND_INITIALIZER | |
8403445a AM |
6962 | || modifier == EXPAND_CONST_ADDRESS |
6963 | || modifier == EXPAND_STACK_PARM) | |
f47e9b4e | 6964 | ? modifier : EXPAND_NORMAL); |
bbf6f052 | 6965 | |
8c8a8e34 | 6966 | /* If this is a constant, put it into a register if it is a |
14a774a9 | 6967 | legitimate constant and OFFSET is 0 and memory if it isn't. */ |
8c8a8e34 JW |
6968 | if (CONSTANT_P (op0)) |
6969 | { | |
6970 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); | |
14a774a9 RK |
6971 | if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) |
6972 | && offset == 0) | |
8c8a8e34 JW |
6973 | op0 = force_reg (mode, op0); |
6974 | else | |
6975 | op0 = validize_mem (force_const_mem (mode, op0)); | |
6976 | } | |
6977 | ||
8d2e5f72 RK |
6978 | /* Otherwise, if this object not in memory and we either have an |
6979 | offset or a BLKmode result, put it there. This case can't occur in | |
6980 | C, but can in Ada if we have unchecked conversion of an expression | |
6981 | from a scalar type to an array or record type or for an | |
6982 | ARRAY_RANGE_REF whose type is BLKmode. */ | |
3c0cb5de | 6983 | else if (!MEM_P (op0) |
8d2e5f72 RK |
6984 | && (offset != 0 |
6985 | || (code == ARRAY_RANGE_REF && mode == BLKmode))) | |
6986 | { | |
82c82743 RH |
6987 | tree nt = build_qualified_type (TREE_TYPE (tem), |
6988 | (TYPE_QUALS (TREE_TYPE (tem)) | |
6989 | | TYPE_QUAL_CONST)); | |
6990 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
450b1728 | 6991 | |
82c82743 RH |
6992 | emit_move_insn (memloc, op0); |
6993 | op0 = memloc; | |
8d2e5f72 RK |
6994 | } |
6995 | ||
7bb0943f RS |
6996 | if (offset != 0) |
6997 | { | |
8403445a AM |
6998 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, |
6999 | EXPAND_SUM); | |
7bb0943f | 7000 | |
5b0264cb | 7001 | gcc_assert (MEM_P (op0)); |
2d48c13d | 7002 | |
2d48c13d | 7003 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 | 7004 | if (GET_MODE (offset_rtx) != Pmode) |
267b28bd | 7005 | offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
fa06ab5c RK |
7006 | #else |
7007 | if (GET_MODE (offset_rtx) != ptr_mode) | |
7008 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
2d48c13d JL |
7009 | #endif |
7010 | ||
e82407b5 EB |
7011 | if (GET_MODE (op0) == BLKmode |
7012 | /* A constant address in OP0 can have VOIDmode, we must | |
7013 | not try to call force_reg in that case. */ | |
efd07ca7 | 7014 | && GET_MODE (XEXP (op0, 0)) != VOIDmode |
14a774a9 | 7015 | && bitsize != 0 |
3a94c984 | 7016 | && (bitpos % bitsize) == 0 |
89752202 | 7017 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 7018 | && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 7019 | { |
e3c8ea67 | 7020 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
7021 | bitpos = 0; |
7022 | } | |
7023 | ||
0d4903b8 RK |
7024 | op0 = offset_address (op0, offset_rtx, |
7025 | highest_pow2_factor (offset)); | |
7bb0943f RS |
7026 | } |
7027 | ||
1ce7f3c2 RK |
7028 | /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, |
7029 | record its alignment as BIGGEST_ALIGNMENT. */ | |
3c0cb5de | 7030 | if (MEM_P (op0) && bitpos == 0 && offset != 0 |
1ce7f3c2 RK |
7031 | && is_aligning_offset (offset, tem)) |
7032 | set_mem_align (op0, BIGGEST_ALIGNMENT); | |
7033 | ||
bbf6f052 | 7034 | /* Don't forget about volatility even if this is a bitfield. */ |
3c0cb5de | 7035 | if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) |
bbf6f052 | 7036 | { |
f47e9b4e RK |
7037 | if (op0 == orig_op0) |
7038 | op0 = copy_rtx (op0); | |
7039 | ||
bbf6f052 RK |
7040 | MEM_VOLATILE_P (op0) = 1; |
7041 | } | |
7042 | ||
010f87c4 JJ |
7043 | /* The following code doesn't handle CONCAT. |
7044 | Assume only bitpos == 0 can be used for CONCAT, due to | |
7045 | one element arrays having the same mode as its element. */ | |
7046 | if (GET_CODE (op0) == CONCAT) | |
7047 | { | |
5b0264cb NS |
7048 | gcc_assert (bitpos == 0 |
7049 | && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))); | |
010f87c4 JJ |
7050 | return op0; |
7051 | } | |
7052 | ||
ccc98036 RS |
7053 | /* In cases where an aligned union has an unaligned object |
7054 | as a field, we might be extracting a BLKmode value from | |
7055 | an integer-mode (e.g., SImode) object. Handle this case | |
7056 | by doing the extract into an object as wide as the field | |
7057 | (which we know to be the width of a basic mode), then | |
cb5fa0f8 | 7058 | storing into memory, and changing the mode to BLKmode. */ |
bbf6f052 | 7059 | if (mode1 == VOIDmode |
f8cfc6aa | 7060 | || REG_P (op0) || GET_CODE (op0) == SUBREG |
cb5fa0f8 RK |
7061 | || (mode1 != BLKmode && ! direct_load[(int) mode1] |
7062 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
10c2a453 RK |
7063 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT |
7064 | && modifier != EXPAND_CONST_ADDRESS | |
7065 | && modifier != EXPAND_INITIALIZER) | |
cb5fa0f8 RK |
7066 | /* If the field isn't aligned enough to fetch as a memref, |
7067 | fetch it as a bit field. */ | |
7068 | || (mode1 != BLKmode | |
9e5f281f | 7069 | && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) |
e82407b5 | 7070 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0) |
3c0cb5de | 7071 | || (MEM_P (op0) |
e82407b5 EB |
7072 | && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) |
7073 | || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)))) | |
a8f3bf52 RK |
7074 | && ((modifier == EXPAND_CONST_ADDRESS |
7075 | || modifier == EXPAND_INITIALIZER) | |
7076 | ? STRICT_ALIGNMENT | |
7077 | : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))) | |
9e5f281f | 7078 | || (bitpos % BITS_PER_UNIT != 0))) |
cb5fa0f8 RK |
7079 | /* If the type and the field are a constant size and the |
7080 | size of the type isn't the same size as the bitfield, | |
7081 | we must use bitfield operations. */ | |
7082 | || (bitsize >= 0 | |
dbe4d070 RH |
7083 | && TYPE_SIZE (TREE_TYPE (exp)) |
7084 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
cb5fa0f8 | 7085 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), |
a06ef755 | 7086 | bitsize))) |
bbf6f052 | 7087 | { |
bbf6f052 RK |
7088 | enum machine_mode ext_mode = mode; |
7089 | ||
14a774a9 | 7090 | if (ext_mode == BLKmode |
3c0cb5de JQ |
7091 | && ! (target != 0 && MEM_P (op0) |
7092 | && MEM_P (target) | |
14a774a9 | 7093 | && bitpos % BITS_PER_UNIT == 0)) |
bbf6f052 RK |
7094 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); |
7095 | ||
7096 | if (ext_mode == BLKmode) | |
a281e72d | 7097 | { |
7a06d606 RK |
7098 | if (target == 0) |
7099 | target = assign_temp (type, 0, 1, 1); | |
7100 | ||
7101 | if (bitsize == 0) | |
7102 | return target; | |
7103 | ||
a281e72d RK |
7104 | /* In this case, BITPOS must start at a byte boundary and |
7105 | TARGET, if specified, must be a MEM. */ | |
5b0264cb NS |
7106 | gcc_assert (MEM_P (op0) |
7107 | && (!target || MEM_P (target)) | |
7108 | && !(bitpos % BITS_PER_UNIT)); | |
a281e72d | 7109 | |
7a06d606 RK |
7110 | emit_block_move (target, |
7111 | adjust_address (op0, VOIDmode, | |
7112 | bitpos / BITS_PER_UNIT), | |
a06ef755 | 7113 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
44bb111a | 7114 | / BITS_PER_UNIT), |
8403445a AM |
7115 | (modifier == EXPAND_STACK_PARM |
7116 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
3a94c984 | 7117 | |
a281e72d RK |
7118 | return target; |
7119 | } | |
bbf6f052 | 7120 | |
dc6d66b3 RK |
7121 | op0 = validize_mem (op0); |
7122 | ||
3c0cb5de | 7123 | if (MEM_P (op0) && REG_P (XEXP (op0, 0))) |
04050c69 | 7124 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 | 7125 | |
8403445a AM |
7126 | op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, |
7127 | (modifier == EXPAND_STACK_PARM | |
7128 | ? NULL_RTX : target), | |
b3520980 | 7129 | ext_mode, ext_mode); |
ef19912d RK |
7130 | |
7131 | /* If the result is a record type and BITSIZE is narrower than | |
7132 | the mode of OP0, an integral mode, and this is a big endian | |
7133 | machine, we must put the field into the high-order bits. */ | |
7134 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
7135 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
65a07688 | 7136 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) |
ef19912d RK |
7137 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, |
7138 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
7139 | - bitsize), | |
7140 | op0, 1); | |
7141 | ||
daae8185 RK |
7142 | /* If the result type is BLKmode, store the data into a temporary |
7143 | of the appropriate type, but with the mode corresponding to the | |
7144 | mode for the data we have (op0's mode). It's tempting to make | |
7145 | this a constant type, since we know it's only being stored once, | |
7146 | but that can cause problems if we are taking the address of this | |
7147 | COMPONENT_REF because the MEM of any reference via that address | |
7148 | will have flags corresponding to the type, which will not | |
7149 | necessarily be constant. */ | |
bbf6f052 RK |
7150 | if (mode == BLKmode) |
7151 | { | |
daae8185 RK |
7152 | rtx new |
7153 | = assign_stack_temp_for_type | |
7154 | (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type); | |
bbf6f052 RK |
7155 | |
7156 | emit_move_insn (new, op0); | |
7157 | op0 = copy_rtx (new); | |
7158 | PUT_MODE (op0, BLKmode); | |
c3d32120 | 7159 | set_mem_attributes (op0, exp, 1); |
bbf6f052 RK |
7160 | } |
7161 | ||
7162 | return op0; | |
7163 | } | |
7164 | ||
05019f83 RK |
7165 | /* If the result is BLKmode, use that to access the object |
7166 | now as well. */ | |
7167 | if (mode == BLKmode) | |
7168 | mode1 = BLKmode; | |
7169 | ||
bbf6f052 RK |
7170 | /* Get a reference to just this component. */ |
7171 | if (modifier == EXPAND_CONST_ADDRESS | |
7172 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
f1ec5147 | 7173 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
bbf6f052 | 7174 | else |
f4ef873c | 7175 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
41472af8 | 7176 | |
f47e9b4e RK |
7177 | if (op0 == orig_op0) |
7178 | op0 = copy_rtx (op0); | |
7179 | ||
3bdf5ad1 | 7180 | set_mem_attributes (op0, exp, 0); |
f8cfc6aa | 7181 | if (REG_P (XEXP (op0, 0))) |
a06ef755 | 7182 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 | 7183 | |
bbf6f052 | 7184 | MEM_VOLATILE_P (op0) |= volatilep; |
0d15e60c | 7185 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode |
08bbd316 | 7186 | || modifier == EXPAND_CONST_ADDRESS |
0d15e60c | 7187 | || modifier == EXPAND_INITIALIZER) |
bbf6f052 | 7188 | return op0; |
0d15e60c | 7189 | else if (target == 0) |
bbf6f052 | 7190 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
0d15e60c | 7191 | |
bbf6f052 RK |
7192 | convert_move (target, op0, unsignedp); |
7193 | return target; | |
7194 | } | |
7195 | ||
0f59171d RH |
7196 | case OBJ_TYPE_REF: |
7197 | return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier); | |
4a8d0c9c | 7198 | |
bbf6f052 RK |
7199 | case CALL_EXPR: |
7200 | /* Check for a built-in function. */ | |
7201 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
d6a5ac33 RK |
7202 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7203 | == FUNCTION_DECL) | |
bbf6f052 | 7204 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
0fb7aeda | 7205 | { |
c70eaeaf KG |
7206 | if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7207 | == BUILT_IN_FRONTEND) | |
673fda6b SB |
7208 | return lang_hooks.expand_expr (exp, original_target, |
7209 | tmode, modifier, | |
7210 | alt_rtl); | |
c70eaeaf KG |
7211 | else |
7212 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
7213 | } | |
d6a5ac33 | 7214 | |
8129842c | 7215 | return expand_call (exp, target, ignore); |
bbf6f052 RK |
7216 | |
7217 | case NON_LVALUE_EXPR: | |
7218 | case NOP_EXPR: | |
7219 | case CONVERT_EXPR: | |
4a53008b | 7220 | if (TREE_OPERAND (exp, 0) == error_mark_node) |
a592f288 | 7221 | return const0_rtx; |
4a53008b | 7222 | |
bbf6f052 RK |
7223 | if (TREE_CODE (type) == UNION_TYPE) |
7224 | { | |
7225 | tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
14a774a9 | 7226 | |
c3d32120 RK |
7227 | /* If both input and output are BLKmode, this conversion isn't doing |
7228 | anything except possibly changing memory attribute. */ | |
7229 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) | |
7230 | { | |
7231 | rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, | |
7232 | modifier); | |
7233 | ||
7234 | result = copy_rtx (result); | |
7235 | set_mem_attributes (result, exp, 0); | |
7236 | return result; | |
7237 | } | |
14a774a9 | 7238 | |
bbf6f052 | 7239 | if (target == 0) |
cf7cb67e JH |
7240 | { |
7241 | if (TYPE_MODE (type) != BLKmode) | |
7242 | target = gen_reg_rtx (TYPE_MODE (type)); | |
7243 | else | |
7244 | target = assign_temp (type, 0, 1, 1); | |
7245 | } | |
d6a5ac33 | 7246 | |
3c0cb5de | 7247 | if (MEM_P (target)) |
bbf6f052 RK |
7248 | /* Store data into beginning of memory target. */ |
7249 | store_expr (TREE_OPERAND (exp, 0), | |
8403445a AM |
7250 | adjust_address (target, TYPE_MODE (valtype), 0), |
7251 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
1499e0a8 | 7252 | |
bbf6f052 | 7253 | else |
5b0264cb NS |
7254 | { |
7255 | gcc_assert (REG_P (target)); | |
7256 | ||
7257 | /* Store this field into a union of the proper type. */ | |
7258 | store_field (target, | |
7259 | MIN ((int_size_in_bytes (TREE_TYPE | |
7260 | (TREE_OPERAND (exp, 0))) | |
7261 | * BITS_PER_UNIT), | |
7262 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), | |
7263 | 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), | |
7264 | VOIDmode, 0, type, 0); | |
7265 | } | |
bbf6f052 RK |
7266 | |
7267 | /* Return the entire union. */ | |
7268 | return target; | |
7269 | } | |
d6a5ac33 | 7270 | |
7f62854a RK |
7271 | if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
7272 | { | |
7273 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, | |
37a08a29 | 7274 | modifier); |
7f62854a RK |
7275 | |
7276 | /* If the signedness of the conversion differs and OP0 is | |
7277 | a promoted SUBREG, clear that indication since we now | |
7278 | have to do the proper extension. */ | |
8df83eae | 7279 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp |
7f62854a RK |
7280 | && GET_CODE (op0) == SUBREG) |
7281 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
7282 | ||
bc15d0ef | 7283 | return REDUCE_BIT_FIELD (op0); |
7f62854a RK |
7284 | } |
7285 | ||
fdf473ae | 7286 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
bc15d0ef | 7287 | op0 = REDUCE_BIT_FIELD (op0); |
12342f90 RS |
7288 | if (GET_MODE (op0) == mode) |
7289 | return op0; | |
12342f90 | 7290 | |
d6a5ac33 RK |
7291 | /* If OP0 is a constant, just convert it into the proper mode. */ |
7292 | if (CONSTANT_P (op0)) | |
fdf473ae RH |
7293 | { |
7294 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
7295 | enum machine_mode inner_mode = TYPE_MODE (inner_type); | |
7296 | ||
0fb7aeda | 7297 | if (modifier == EXPAND_INITIALIZER) |
fdf473ae RH |
7298 | return simplify_gen_subreg (mode, op0, inner_mode, |
7299 | subreg_lowpart_offset (mode, | |
7300 | inner_mode)); | |
7301 | else | |
7302 | return convert_modes (mode, inner_mode, op0, | |
8df83eae | 7303 | TYPE_UNSIGNED (inner_type)); |
fdf473ae | 7304 | } |
12342f90 | 7305 | |
26fcb35a | 7306 | if (modifier == EXPAND_INITIALIZER) |
38a448ca | 7307 | return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
d6a5ac33 | 7308 | |
bbf6f052 | 7309 | if (target == 0) |
d6a5ac33 RK |
7310 | return |
7311 | convert_to_mode (mode, op0, | |
8df83eae | 7312 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 | 7313 | else |
d6a5ac33 | 7314 | convert_move (target, op0, |
8df83eae | 7315 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 RK |
7316 | return target; |
7317 | ||
ed239f5a | 7318 | case VIEW_CONVERT_EXPR: |
37a08a29 | 7319 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
ed239f5a RK |
7320 | |
7321 | /* If the input and output modes are both the same, we are done. | |
13cf99ec RK |
7322 | Otherwise, if neither mode is BLKmode and both are integral and within |
7323 | a word, we can use gen_lowpart. If neither is true, make sure the | |
7324 | operand is in memory and convert the MEM to the new mode. */ | |
ed239f5a RK |
7325 | if (TYPE_MODE (type) == GET_MODE (op0)) |
7326 | ; | |
7327 | else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode | |
13cf99ec RK |
7328 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
7329 | && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT | |
ed239f5a RK |
7330 | && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD |
7331 | && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) | |
7332 | op0 = gen_lowpart (TYPE_MODE (type), op0); | |
3c0cb5de | 7333 | else if (!MEM_P (op0)) |
ed239f5a | 7334 | { |
c11c10d8 RK |
7335 | /* If the operand is not a MEM, force it into memory. Since we |
7336 | are going to be be changing the mode of the MEM, don't call | |
7337 | force_const_mem for constants because we don't allow pool | |
7338 | constants to change mode. */ | |
ed239f5a | 7339 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
ed239f5a | 7340 | |
5b0264cb | 7341 | gcc_assert (!TREE_ADDRESSABLE (exp)); |
ed239f5a | 7342 | |
c11c10d8 RK |
7343 | if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) |
7344 | target | |
7345 | = assign_stack_temp_for_type | |
7346 | (TYPE_MODE (inner_type), | |
7347 | GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); | |
ed239f5a | 7348 | |
c11c10d8 RK |
7349 | emit_move_insn (target, op0); |
7350 | op0 = target; | |
ed239f5a RK |
7351 | } |
7352 | ||
c11c10d8 RK |
7353 | /* At this point, OP0 is in the correct mode. If the output type is such |
7354 | that the operand is known to be aligned, indicate that it is. | |
7355 | Otherwise, we need only be concerned about alignment for non-BLKmode | |
7356 | results. */ | |
3c0cb5de | 7357 | if (MEM_P (op0)) |
ed239f5a RK |
7358 | { |
7359 | op0 = copy_rtx (op0); | |
7360 | ||
ed239f5a RK |
7361 | if (TYPE_ALIGN_OK (type)) |
7362 | set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); | |
7363 | else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT | |
7364 | && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) | |
7365 | { | |
7366 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
65a07688 RK |
7367 | HOST_WIDE_INT temp_size |
7368 | = MAX (int_size_in_bytes (inner_type), | |
7369 | (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); | |
ed239f5a RK |
7370 | rtx new = assign_stack_temp_for_type (TYPE_MODE (type), |
7371 | temp_size, 0, type); | |
c4e59f51 | 7372 | rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); |
ed239f5a | 7373 | |
5b0264cb | 7374 | gcc_assert (!TREE_ADDRESSABLE (exp)); |
c11c10d8 | 7375 | |
ed239f5a RK |
7376 | if (GET_MODE (op0) == BLKmode) |
7377 | emit_block_move (new_with_op0_mode, op0, | |
44bb111a | 7378 | GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), |
8403445a AM |
7379 | (modifier == EXPAND_STACK_PARM |
7380 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
ed239f5a RK |
7381 | else |
7382 | emit_move_insn (new_with_op0_mode, op0); | |
7383 | ||
7384 | op0 = new; | |
7385 | } | |
0fb7aeda | 7386 | |
c4e59f51 | 7387 | op0 = adjust_address (op0, TYPE_MODE (type), 0); |
ed239f5a RK |
7388 | } |
7389 | ||
7390 | return op0; | |
7391 | ||
bbf6f052 | 7392 | case PLUS_EXPR: |
4dfa0342 | 7393 | /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and |
bbf6f052 RK |
7394 | something else, make sure we add the register to the constant and |
7395 | then to the other thing. This case can occur during strength | |
7396 | reduction and doing it this way will produce better code if the | |
7397 | frame pointer or argument pointer is eliminated. | |
7398 | ||
7399 | fold-const.c will ensure that the constant is always in the inner | |
7400 | PLUS_EXPR, so the only case we need to do anything about is if | |
7401 | sp, ap, or fp is our second argument, in which case we must swap | |
7402 | the innermost first argument and our second argument. */ | |
7403 | ||
7404 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR | |
7405 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST | |
4dfa0342 RH |
7406 | && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL |
7407 | && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx | |
7408 | || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx | |
7409 | || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) | |
bbf6f052 RK |
7410 | { |
7411 | tree t = TREE_OPERAND (exp, 1); | |
7412 | ||
7413 | TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
7414 | TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; | |
7415 | } | |
7416 | ||
88f63c77 | 7417 | /* If the result is to be ptr_mode and we are adding an integer to |
bbf6f052 RK |
7418 | something, we might be forming a constant. So try to use |
7419 | plus_constant. If it produces a sum and we can't accept it, | |
7420 | use force_operand. This allows P = &ARR[const] to generate | |
7421 | efficient code on machines where a SYMBOL_REF is not a valid | |
7422 | address. | |
7423 | ||
7424 | If this is an EXPAND_SUM call, always return the sum. */ | |
c980ac49 | 7425 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
0fb7aeda | 7426 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
bbf6f052 | 7427 | { |
8403445a AM |
7428 | if (modifier == EXPAND_STACK_PARM) |
7429 | target = 0; | |
c980ac49 RS |
7430 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
7431 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7432 | && TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
7433 | { | |
cbbc503e JL |
7434 | rtx constant_part; |
7435 | ||
c980ac49 RS |
7436 | op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
7437 | EXPAND_SUM); | |
cbbc503e JL |
7438 | /* Use immed_double_const to ensure that the constant is |
7439 | truncated according to the mode of OP1, then sign extended | |
7440 | to a HOST_WIDE_INT. Using the constant directly can result | |
7441 | in non-canonical RTL in a 64x32 cross compile. */ | |
7442 | constant_part | |
7443 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), | |
7444 | (HOST_WIDE_INT) 0, | |
a5efcd63 | 7445 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
7f401c74 | 7446 | op1 = plus_constant (op1, INTVAL (constant_part)); |
c980ac49 RS |
7447 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7448 | op1 = force_operand (op1, target); | |
bc15d0ef | 7449 | return REDUCE_BIT_FIELD (op1); |
c980ac49 | 7450 | } |
bbf6f052 | 7451 | |
c980ac49 RS |
7452 | else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
7453 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT | |
7454 | && TREE_CONSTANT (TREE_OPERAND (exp, 0))) | |
7455 | { | |
cbbc503e JL |
7456 | rtx constant_part; |
7457 | ||
c980ac49 | 7458 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
70d95bac RH |
7459 | (modifier == EXPAND_INITIALIZER |
7460 | ? EXPAND_INITIALIZER : EXPAND_SUM)); | |
c980ac49 RS |
7461 | if (! CONSTANT_P (op0)) |
7462 | { | |
7463 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7464 | VOIDmode, modifier); | |
f0e9957a RS |
7465 | /* Return a PLUS if modifier says it's OK. */ |
7466 | if (modifier == EXPAND_SUM | |
7467 | || modifier == EXPAND_INITIALIZER) | |
7468 | return simplify_gen_binary (PLUS, mode, op0, op1); | |
7469 | goto binop2; | |
c980ac49 | 7470 | } |
cbbc503e JL |
7471 | /* Use immed_double_const to ensure that the constant is |
7472 | truncated according to the mode of OP1, then sign extended | |
7473 | to a HOST_WIDE_INT. Using the constant directly can result | |
7474 | in non-canonical RTL in a 64x32 cross compile. */ | |
7475 | constant_part | |
7476 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), | |
7477 | (HOST_WIDE_INT) 0, | |
2a94e396 | 7478 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
7f401c74 | 7479 | op0 = plus_constant (op0, INTVAL (constant_part)); |
c980ac49 RS |
7480 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7481 | op0 = force_operand (op0, target); | |
bc15d0ef | 7482 | return REDUCE_BIT_FIELD (op0); |
c980ac49 | 7483 | } |
bbf6f052 RK |
7484 | } |
7485 | ||
7486 | /* No sense saving up arithmetic to be done | |
7487 | if it's all in the wrong mode to form part of an address. | |
7488 | And force_operand won't know whether to sign-extend or | |
7489 | zero-extend. */ | |
7490 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
88f63c77 | 7491 | || mode != ptr_mode) |
4ef7870a | 7492 | { |
eb698c58 RS |
7493 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7494 | subtarget, &op0, &op1, 0); | |
6e7727eb EB |
7495 | if (op0 == const0_rtx) |
7496 | return op1; | |
7497 | if (op1 == const0_rtx) | |
7498 | return op0; | |
4ef7870a EB |
7499 | goto binop2; |
7500 | } | |
bbf6f052 | 7501 | |
eb698c58 RS |
7502 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7503 | subtarget, &op0, &op1, modifier); | |
bc15d0ef | 7504 | return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
bbf6f052 RK |
7505 | |
7506 | case MINUS_EXPR: | |
ea87523e RK |
7507 | /* For initializers, we are allowed to return a MINUS of two |
7508 | symbolic constants. Here we handle all cases when both operands | |
7509 | are constant. */ | |
bbf6f052 RK |
7510 | /* Handle difference of two symbolic constants, |
7511 | for the sake of an initializer. */ | |
7512 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
7513 | && really_constant_p (TREE_OPERAND (exp, 0)) | |
7514 | && really_constant_p (TREE_OPERAND (exp, 1))) | |
7515 | { | |
eb698c58 RS |
7516 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7517 | NULL_RTX, &op0, &op1, modifier); | |
ea87523e | 7518 | |
ea87523e RK |
7519 | /* If the last operand is a CONST_INT, use plus_constant of |
7520 | the negated constant. Else make the MINUS. */ | |
7521 | if (GET_CODE (op1) == CONST_INT) | |
bc15d0ef | 7522 | return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); |
ea87523e | 7523 | else |
bc15d0ef | 7524 | return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); |
bbf6f052 | 7525 | } |
ae431183 | 7526 | |
1717e19e UW |
7527 | /* No sense saving up arithmetic to be done |
7528 | if it's all in the wrong mode to form part of an address. | |
7529 | And force_operand won't know whether to sign-extend or | |
7530 | zero-extend. */ | |
7531 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7532 | || mode != ptr_mode) | |
7533 | goto binop; | |
7534 | ||
eb698c58 RS |
7535 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7536 | subtarget, &op0, &op1, modifier); | |
1717e19e UW |
7537 | |
7538 | /* Convert A - const to A + (-const). */ | |
7539 | if (GET_CODE (op1) == CONST_INT) | |
7540 | { | |
7541 | op1 = negate_rtx (mode, op1); | |
bc15d0ef | 7542 | return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
1717e19e UW |
7543 | } |
7544 | ||
7545 | goto binop2; | |
bbf6f052 RK |
7546 | |
7547 | case MULT_EXPR: | |
bbf6f052 RK |
7548 | /* If first operand is constant, swap them. |
7549 | Thus the following special case checks need only | |
7550 | check the second operand. */ | |
7551 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
7552 | { | |
b3694847 | 7553 | tree t1 = TREE_OPERAND (exp, 0); |
bbf6f052 RK |
7554 | TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); |
7555 | TREE_OPERAND (exp, 1) = t1; | |
7556 | } | |
7557 | ||
7558 | /* Attempt to return something suitable for generating an | |
7559 | indexed address, for machines that support that. */ | |
7560 | ||
88f63c77 | 7561 | if (modifier == EXPAND_SUM && mode == ptr_mode |
3b40e71b | 7562 | && host_integerp (TREE_OPERAND (exp, 1), 0)) |
bbf6f052 | 7563 | { |
48a5f2fa DJ |
7564 | tree exp1 = TREE_OPERAND (exp, 1); |
7565 | ||
921b3427 RK |
7566 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7567 | EXPAND_SUM); | |
bbf6f052 | 7568 | |
f8cfc6aa | 7569 | if (!REG_P (op0)) |
906c4e36 | 7570 | op0 = force_operand (op0, NULL_RTX); |
f8cfc6aa | 7571 | if (!REG_P (op0)) |
bbf6f052 RK |
7572 | op0 = copy_to_mode_reg (mode, op0); |
7573 | ||
bc15d0ef | 7574 | return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, |
48a5f2fa | 7575 | gen_int_mode (tree_low_cst (exp1, 0), |
bc15d0ef | 7576 | TYPE_MODE (TREE_TYPE (exp1))))); |
bbf6f052 RK |
7577 | } |
7578 | ||
8403445a AM |
7579 | if (modifier == EXPAND_STACK_PARM) |
7580 | target = 0; | |
7581 | ||
bbf6f052 RK |
7582 | /* Check for multiplying things that have been extended |
7583 | from a narrower type. If this machine supports multiplying | |
7584 | in that narrower type with a result in the desired type, | |
7585 | do it that way, and avoid the explicit type-conversion. */ | |
7586 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR | |
7587 | && TREE_CODE (type) == INTEGER_TYPE | |
7588 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7589 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
7590 | && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
7591 | && int_fits_type_p (TREE_OPERAND (exp, 1), | |
7592 | TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7593 | /* Don't use a widening multiply if a shift will do. */ | |
7594 | && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
906c4e36 | 7595 | > HOST_BITS_PER_WIDE_INT) |
bbf6f052 RK |
7596 | || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
7597 | || | |
7598 | (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8df83eae RK |
7599 | && (TYPE_PRECISION (TREE_TYPE |
7600 | (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7601 | == TYPE_PRECISION (TREE_TYPE | |
7602 | (TREE_OPERAND | |
7603 | (TREE_OPERAND (exp, 0), 0)))) | |
bbf6f052 RK |
7604 | /* If both operands are extended, they must either both |
7605 | be zero-extended or both be sign-extended. */ | |
8df83eae RK |
7606 | && (TYPE_UNSIGNED (TREE_TYPE |
7607 | (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7608 | == TYPE_UNSIGNED (TREE_TYPE | |
7609 | (TREE_OPERAND | |
7610 | (TREE_OPERAND (exp, 0), 0))))))) | |
bbf6f052 | 7611 | { |
888d65b5 RS |
7612 | tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); |
7613 | enum machine_mode innermode = TYPE_MODE (op0type); | |
8df83eae | 7614 | bool zextend_p = TYPE_UNSIGNED (op0type); |
888d65b5 RS |
7615 | optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; |
7616 | this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; | |
7617 | ||
b10af0c8 | 7618 | if (mode == GET_MODE_WIDER_MODE (innermode)) |
bbf6f052 | 7619 | { |
b10af0c8 TG |
7620 | if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
7621 | { | |
b10af0c8 | 7622 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) |
eb698c58 RS |
7623 | expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7624 | TREE_OPERAND (exp, 1), | |
7625 | NULL_RTX, &op0, &op1, 0); | |
b10af0c8 | 7626 | else |
eb698c58 RS |
7627 | expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7628 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7629 | NULL_RTX, &op0, &op1, 0); | |
c4d70ce3 | 7630 | goto binop3; |
b10af0c8 TG |
7631 | } |
7632 | else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
7633 | && innermode == word_mode) | |
7634 | { | |
888d65b5 | 7635 | rtx htem, hipart; |
b10af0c8 TG |
7636 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
7637 | NULL_RTX, VOIDmode, 0); | |
7638 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
8c118062 GK |
7639 | op1 = convert_modes (innermode, mode, |
7640 | expand_expr (TREE_OPERAND (exp, 1), | |
7641 | NULL_RTX, VOIDmode, 0), | |
7642 | unsignedp); | |
b10af0c8 TG |
7643 | else |
7644 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7645 | NULL_RTX, VOIDmode, 0); | |
7646 | temp = expand_binop (mode, other_optab, op0, op1, target, | |
7647 | unsignedp, OPTAB_LIB_WIDEN); | |
888d65b5 RS |
7648 | hipart = gen_highpart (innermode, temp); |
7649 | htem = expand_mult_highpart_adjust (innermode, hipart, | |
7650 | op0, op1, hipart, | |
7651 | zextend_p); | |
7652 | if (htem != hipart) | |
7653 | emit_move_insn (hipart, htem); | |
bc15d0ef | 7654 | return REDUCE_BIT_FIELD (temp); |
b10af0c8 | 7655 | } |
bbf6f052 RK |
7656 | } |
7657 | } | |
eb698c58 RS |
7658 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7659 | subtarget, &op0, &op1, 0); | |
bc15d0ef | 7660 | return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); |
bbf6f052 RK |
7661 | |
7662 | case TRUNC_DIV_EXPR: | |
7663 | case FLOOR_DIV_EXPR: | |
7664 | case CEIL_DIV_EXPR: | |
7665 | case ROUND_DIV_EXPR: | |
7666 | case EXACT_DIV_EXPR: | |
8403445a AM |
7667 | if (modifier == EXPAND_STACK_PARM) |
7668 | target = 0; | |
bbf6f052 RK |
7669 | /* Possible optimization: compute the dividend with EXPAND_SUM |
7670 | then if the divisor is constant can optimize the case | |
7671 | where some terms of the dividend have coeffs divisible by it. */ | |
eb698c58 RS |
7672 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7673 | subtarget, &op0, &op1, 0); | |
bbf6f052 RK |
7674 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
7675 | ||
7676 | case RDIV_EXPR: | |
b7e9703c JH |
7677 | /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving |
7678 | expensive divide. If not, combine will rebuild the original | |
7679 | computation. */ | |
7680 | if (flag_unsafe_math_optimizations && optimize && !optimize_size | |
ed7d44bc | 7681 | && TREE_CODE (type) == REAL_TYPE |
b7e9703c | 7682 | && !real_onep (TREE_OPERAND (exp, 0))) |
3244e67d RS |
7683 | return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0), |
7684 | build2 (RDIV_EXPR, type, | |
7685 | build_real (type, dconst1), | |
7686 | TREE_OPERAND (exp, 1))), | |
8e37cba8 | 7687 | target, tmode, modifier); |
c4d70ce3 | 7688 | |
bbf6f052 RK |
7689 | goto binop; |
7690 | ||
7691 | case TRUNC_MOD_EXPR: | |
7692 | case FLOOR_MOD_EXPR: | |
7693 | case CEIL_MOD_EXPR: | |
7694 | case ROUND_MOD_EXPR: | |
8403445a AM |
7695 | if (modifier == EXPAND_STACK_PARM) |
7696 | target = 0; | |
eb698c58 RS |
7697 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7698 | subtarget, &op0, &op1, 0); | |
bbf6f052 RK |
7699 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
7700 | ||
7701 | case FIX_ROUND_EXPR: | |
7702 | case FIX_FLOOR_EXPR: | |
7703 | case FIX_CEIL_EXPR: | |
5b0264cb | 7704 | gcc_unreachable (); /* Not used for C. */ |
bbf6f052 RK |
7705 | |
7706 | case FIX_TRUNC_EXPR: | |
906c4e36 | 7707 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
8403445a | 7708 | if (target == 0 || modifier == EXPAND_STACK_PARM) |
bbf6f052 RK |
7709 | target = gen_reg_rtx (mode); |
7710 | expand_fix (target, op0, unsignedp); | |
7711 | return target; | |
7712 | ||
7713 | case FLOAT_EXPR: | |
906c4e36 | 7714 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
8403445a | 7715 | if (target == 0 || modifier == EXPAND_STACK_PARM) |
bbf6f052 RK |
7716 | target = gen_reg_rtx (mode); |
7717 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
7718 | So give it the correct mode. With -O, cse will optimize this. */ | |
7719 | if (GET_MODE (op0) == VOIDmode) | |
7720 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
7721 | op0); | |
7722 | expand_float (target, op0, | |
8df83eae | 7723 | TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
bbf6f052 RK |
7724 | return target; |
7725 | ||
7726 | case NEGATE_EXPR: | |
5b22bee8 | 7727 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
8403445a AM |
7728 | if (modifier == EXPAND_STACK_PARM) |
7729 | target = 0; | |
91ce572a | 7730 | temp = expand_unop (mode, |
c4d70ce3 PB |
7731 | optab_for_tree_code (NEGATE_EXPR, type), |
7732 | op0, target, 0); | |
5b0264cb | 7733 | gcc_assert (temp); |
bc15d0ef | 7734 | return REDUCE_BIT_FIELD (temp); |
bbf6f052 RK |
7735 | |
7736 | case ABS_EXPR: | |
7737 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8403445a AM |
7738 | if (modifier == EXPAND_STACK_PARM) |
7739 | target = 0; | |
bbf6f052 | 7740 | |
11017cc7 | 7741 | /* ABS_EXPR is not valid for complex arguments. */ |
5b0264cb NS |
7742 | gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT |
7743 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); | |
2d7050fd | 7744 | |
bbf6f052 RK |
7745 | /* Unsigned abs is simply the operand. Testing here means we don't |
7746 | risk generating incorrect code below. */ | |
8df83eae | 7747 | if (TYPE_UNSIGNED (type)) |
bbf6f052 RK |
7748 | return op0; |
7749 | ||
91ce572a | 7750 | return expand_abs (mode, op0, target, unsignedp, |
e5e809f4 | 7751 | safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
bbf6f052 RK |
7752 | |
7753 | case MAX_EXPR: | |
7754 | case MIN_EXPR: | |
7755 | target = original_target; | |
8403445a AM |
7756 | if (target == 0 |
7757 | || modifier == EXPAND_STACK_PARM | |
3c0cb5de | 7758 | || (MEM_P (target) && MEM_VOLATILE_P (target)) |
d6a5ac33 | 7759 | || GET_MODE (target) != mode |
f8cfc6aa | 7760 | || (REG_P (target) |
bbf6f052 RK |
7761 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
7762 | target = gen_reg_rtx (mode); | |
eb698c58 RS |
7763 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
7764 | target, &op0, &op1, 0); | |
bbf6f052 RK |
7765 | |
7766 | /* First try to do it with a special MIN or MAX instruction. | |
7767 | If that does not win, use a conditional jump to select the proper | |
7768 | value. */ | |
c4d70ce3 | 7769 | this_optab = optab_for_tree_code (code, type); |
bbf6f052 RK |
7770 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
7771 | OPTAB_WIDEN); | |
7772 | if (temp != 0) | |
7773 | return temp; | |
7774 | ||
fa2981d8 JW |
7775 | /* At this point, a MEM target is no longer useful; we will get better |
7776 | code without it. */ | |
3a94c984 | 7777 | |
3c0cb5de | 7778 | if (MEM_P (target)) |
fa2981d8 JW |
7779 | target = gen_reg_rtx (mode); |
7780 | ||
e3be1116 RS |
7781 | /* If op1 was placed in target, swap op0 and op1. */ |
7782 | if (target != op0 && target == op1) | |
7783 | { | |
7784 | rtx tem = op0; | |
7785 | op0 = op1; | |
7786 | op1 = tem; | |
7787 | } | |
7788 | ||
ee456b1c RK |
7789 | if (target != op0) |
7790 | emit_move_insn (target, op0); | |
d6a5ac33 | 7791 | |
bbf6f052 | 7792 | op0 = gen_label_rtx (); |
d6a5ac33 | 7793 | |
f81497d9 RS |
7794 | /* If this mode is an integer too wide to compare properly, |
7795 | compare word by word. Rely on cse to optimize constant cases. */ | |
1eb8759b RH |
7796 | if (GET_MODE_CLASS (mode) == MODE_INT |
7797 | && ! can_compare_p (GE, mode, ccp_jump)) | |
bbf6f052 | 7798 | { |
f81497d9 | 7799 | if (code == MAX_EXPR) |
288dc1ea EB |
7800 | do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1, |
7801 | NULL_RTX, op0); | |
bbf6f052 | 7802 | else |
288dc1ea EB |
7803 | do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target, |
7804 | NULL_RTX, op0); | |
bbf6f052 | 7805 | } |
f81497d9 RS |
7806 | else |
7807 | { | |
b30f05db | 7808 | do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, |
288dc1ea | 7809 | unsignedp, mode, NULL_RTX, NULL_RTX, op0); |
f81497d9 | 7810 | } |
b30f05db | 7811 | emit_move_insn (target, op1); |
bbf6f052 RK |
7812 | emit_label (op0); |
7813 | return target; | |
7814 | ||
bbf6f052 RK |
7815 | case BIT_NOT_EXPR: |
7816 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8403445a AM |
7817 | if (modifier == EXPAND_STACK_PARM) |
7818 | target = 0; | |
bbf6f052 | 7819 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); |
5b0264cb | 7820 | gcc_assert (temp); |
bbf6f052 RK |
7821 | return temp; |
7822 | ||
d6a5ac33 RK |
7823 | /* ??? Can optimize bitwise operations with one arg constant. |
7824 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
7825 | and (a bitwise1 b) bitwise2 b (etc) | |
7826 | but that is probably not worth while. */ | |
7827 | ||
7828 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two | |
7829 | boolean values when we want in all cases to compute both of them. In | |
7830 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
7831 | as actual zero-or-1 values and then bitwise anding. In cases where | |
7832 | there cannot be any side effects, better code would be made by | |
7833 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
7834 | how to recognize those cases. */ | |
7835 | ||
bbf6f052 | 7836 | case TRUTH_AND_EXPR: |
c4d70ce3 | 7837 | code = BIT_AND_EXPR; |
bbf6f052 | 7838 | case BIT_AND_EXPR: |
bbf6f052 RK |
7839 | goto binop; |
7840 | ||
bbf6f052 | 7841 | case TRUTH_OR_EXPR: |
7efcb746 | 7842 | code = BIT_IOR_EXPR; |
bbf6f052 | 7843 | case BIT_IOR_EXPR: |
bbf6f052 RK |
7844 | goto binop; |
7845 | ||
874726a8 | 7846 | case TRUTH_XOR_EXPR: |
c4d70ce3 | 7847 | code = BIT_XOR_EXPR; |
bbf6f052 | 7848 | case BIT_XOR_EXPR: |
bbf6f052 RK |
7849 | goto binop; |
7850 | ||
7851 | case LSHIFT_EXPR: | |
7852 | case RSHIFT_EXPR: | |
7853 | case LROTATE_EXPR: | |
7854 | case RROTATE_EXPR: | |
e5e809f4 | 7855 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 | 7856 | subtarget = 0; |
8403445a AM |
7857 | if (modifier == EXPAND_STACK_PARM) |
7858 | target = 0; | |
bbf6f052 RK |
7859 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
7860 | return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, | |
7861 | unsignedp); | |
7862 | ||
d6a5ac33 RK |
7863 | /* Could determine the answer when only additive constants differ. Also, |
7864 | the addition of one can be handled by changing the condition. */ | |
bbf6f052 RK |
7865 | case LT_EXPR: |
7866 | case LE_EXPR: | |
7867 | case GT_EXPR: | |
7868 | case GE_EXPR: | |
7869 | case EQ_EXPR: | |
7870 | case NE_EXPR: | |
1eb8759b RH |
7871 | case UNORDERED_EXPR: |
7872 | case ORDERED_EXPR: | |
7873 | case UNLT_EXPR: | |
7874 | case UNLE_EXPR: | |
7875 | case UNGT_EXPR: | |
7876 | case UNGE_EXPR: | |
7877 | case UNEQ_EXPR: | |
d1a7edaf | 7878 | case LTGT_EXPR: |
8403445a AM |
7879 | temp = do_store_flag (exp, |
7880 | modifier != EXPAND_STACK_PARM ? target : NULL_RTX, | |
7881 | tmode != VOIDmode ? tmode : mode, 0); | |
bbf6f052 RK |
7882 | if (temp != 0) |
7883 | return temp; | |
d6a5ac33 | 7884 | |
0f41302f | 7885 | /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
bbf6f052 RK |
7886 | if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
7887 | && original_target | |
f8cfc6aa | 7888 | && REG_P (original_target) |
bbf6f052 RK |
7889 | && (GET_MODE (original_target) |
7890 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
7891 | { | |
d6a5ac33 RK |
7892 | temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
7893 | VOIDmode, 0); | |
7894 | ||
c0a3eeac UW |
7895 | /* If temp is constant, we can just compute the result. */ |
7896 | if (GET_CODE (temp) == CONST_INT) | |
7897 | { | |
7898 | if (INTVAL (temp) != 0) | |
7899 | emit_move_insn (target, const1_rtx); | |
7900 | else | |
7901 | emit_move_insn (target, const0_rtx); | |
7902 | ||
7903 | return target; | |
7904 | } | |
7905 | ||
bbf6f052 | 7906 | if (temp != original_target) |
c0a3eeac UW |
7907 | { |
7908 | enum machine_mode mode1 = GET_MODE (temp); | |
7909 | if (mode1 == VOIDmode) | |
7910 | mode1 = tmode != VOIDmode ? tmode : mode; | |
0fb7aeda | 7911 | |
c0a3eeac UW |
7912 | temp = copy_to_mode_reg (mode1, temp); |
7913 | } | |
d6a5ac33 | 7914 | |
bbf6f052 | 7915 | op1 = gen_label_rtx (); |
c5d5d461 | 7916 | emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
a06ef755 | 7917 | GET_MODE (temp), unsignedp, op1); |
bbf6f052 RK |
7918 | emit_move_insn (temp, const1_rtx); |
7919 | emit_label (op1); | |
7920 | return temp; | |
7921 | } | |
d6a5ac33 | 7922 | |
25f3e06c PB |
7923 | /* If no set-flag instruction, must generate a conditional store |
7924 | into a temporary variable. Drop through and handle this | |
7925 | like && and ||. */ | |
7926 | ||
7927 | if (! ignore | |
7928 | && (target == 0 | |
7929 | || modifier == EXPAND_STACK_PARM | |
7930 | || ! safe_from_p (target, exp, 1) | |
7931 | /* Make sure we don't have a hard reg (such as function's return | |
7932 | value) live across basic blocks, if not optimizing. */ | |
7933 | || (!optimize && REG_P (target) | |
7934 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
7935 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
7936 | ||
7937 | if (target) | |
7938 | emit_move_insn (target, const0_rtx); | |
7939 | ||
7940 | op1 = gen_label_rtx (); | |
7941 | jumpifnot (exp, op1); | |
7942 | ||
7943 | if (target) | |
7944 | emit_move_insn (target, const1_rtx); | |
7945 | ||
7946 | emit_label (op1); | |
7947 | return ignore ? const0_rtx : target; | |
7948 | ||
bbf6f052 | 7949 | case TRUTH_NOT_EXPR: |
8403445a AM |
7950 | if (modifier == EXPAND_STACK_PARM) |
7951 | target = 0; | |
bbf6f052 RK |
7952 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); |
7953 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
7954 | only with operands that are always zero or one. */ | |
906c4e36 | 7955 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
bbf6f052 | 7956 | target, 1, OPTAB_LIB_WIDEN); |
5b0264cb | 7957 | gcc_assert (temp); |
bbf6f052 RK |
7958 | return temp; |
7959 | ||
6de9cd9a DN |
7960 | case STATEMENT_LIST: |
7961 | { | |
7962 | tree_stmt_iterator iter; | |
7963 | ||
5b0264cb | 7964 | gcc_assert (ignore); |
6de9cd9a DN |
7965 | |
7966 | for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) | |
7967 | expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); | |
7968 | } | |
7969 | return const0_rtx; | |
7970 | ||
bbf6f052 | 7971 | case COND_EXPR: |
6de9cd9a DN |
7972 | /* If it's void, we don't need to worry about computing a value. */ |
7973 | if (VOID_TYPE_P (TREE_TYPE (exp))) | |
7974 | { | |
7975 | tree pred = TREE_OPERAND (exp, 0); | |
e5bacf32 PB |
7976 | tree then_ = TREE_OPERAND (exp, 1); |
7977 | tree else_ = TREE_OPERAND (exp, 2); | |
f676971a | 7978 | |
5b0264cb NS |
7979 | gcc_assert (TREE_CODE (then_) == GOTO_EXPR |
7980 | && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL | |
7981 | && TREE_CODE (else_) == GOTO_EXPR | |
7982 | && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL); | |
f676971a | 7983 | |
e5bacf32 PB |
7984 | jumpif (pred, label_rtx (GOTO_DESTINATION (then_))); |
7985 | return expand_expr (else_, const0_rtx, VOIDmode, 0); | |
7986 | } | |
f676971a | 7987 | |
e5bacf32 PB |
7988 | /* Note that COND_EXPRs whose type is a structure or union |
7989 | are required to be constructed to contain assignments of | |
7990 | a temporary variable, so that we can evaluate them here | |
7991 | for side effect only. If type is void, we must do likewise. */ | |
7992 | ||
5b0264cb NS |
7993 | gcc_assert (!TREE_ADDRESSABLE (type) |
7994 | && !ignore | |
7995 | && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node | |
7996 | && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node); | |
f676971a | 7997 | |
e5bacf32 PB |
7998 | /* If we are not to produce a result, we have no target. Otherwise, |
7999 | if a target was specified use it; it will not be used as an | |
8000 | intermediate target unless it is safe. If no target, use a | |
8001 | temporary. */ | |
f676971a | 8002 | |
e5bacf32 PB |
8003 | if (modifier != EXPAND_STACK_PARM |
8004 | && original_target | |
8005 | && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) | |
8006 | && GET_MODE (original_target) == mode | |
7c00d1fe | 8007 | #ifdef HAVE_conditional_move |
e5bacf32 PB |
8008 | && (! can_conditionally_move_p (mode) |
8009 | || REG_P (original_target)) | |
7c00d1fe | 8010 | #endif |
e5bacf32 PB |
8011 | && !MEM_P (original_target)) |
8012 | temp = original_target; | |
8013 | else | |
8014 | temp = assign_temp (type, 0, 0, 1); | |
f676971a | 8015 | |
e5bacf32 PB |
8016 | do_pending_stack_adjust (); |
8017 | NO_DEFER_POP; | |
8018 | op0 = gen_label_rtx (); | |
8019 | op1 = gen_label_rtx (); | |
8020 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
8021 | store_expr (TREE_OPERAND (exp, 1), temp, | |
8022 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
f676971a | 8023 | |
e5bacf32 PB |
8024 | emit_jump_insn (gen_jump (op1)); |
8025 | emit_barrier (); | |
8026 | emit_label (op0); | |
8027 | store_expr (TREE_OPERAND (exp, 2), temp, | |
8028 | modifier == EXPAND_STACK_PARM ? 2 : 0); | |
f676971a | 8029 | |
e5bacf32 PB |
8030 | emit_label (op1); |
8031 | OK_DEFER_POP; | |
8032 | return temp; | |
f676971a | 8033 | |
bbf6f052 RK |
8034 | case MODIFY_EXPR: |
8035 | { | |
8036 | /* If lhs is complex, expand calls in rhs before computing it. | |
6d0a3f67 NS |
8037 | That's so we don't compute a pointer and save it over a |
8038 | call. If lhs is simple, compute it first so we can give it | |
8039 | as a target if the rhs is just a call. This avoids an | |
8040 | extra temp and copy and that prevents a partial-subsumption | |
8041 | which makes bad code. Actually we could treat | |
8042 | component_ref's of vars like vars. */ | |
bbf6f052 RK |
8043 | |
8044 | tree lhs = TREE_OPERAND (exp, 0); | |
8045 | tree rhs = TREE_OPERAND (exp, 1); | |
bbf6f052 RK |
8046 | |
8047 | temp = 0; | |
8048 | ||
bbf6f052 RK |
8049 | /* Check for |= or &= of a bitfield of size one into another bitfield |
8050 | of size 1. In this case, (unless we need the result of the | |
8051 | assignment) we can do this more efficiently with a | |
8052 | test followed by an assignment, if necessary. | |
8053 | ||
8054 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
8055 | things change so we do, this code should be enhanced to | |
8056 | support it. */ | |
8057 | if (ignore | |
8058 | && TREE_CODE (lhs) == COMPONENT_REF | |
8059 | && (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8060 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
8061 | && TREE_OPERAND (rhs, 0) == lhs | |
8062 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
05bccae2 RK |
8063 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
8064 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
bbf6f052 RK |
8065 | { |
8066 | rtx label = gen_label_rtx (); | |
8067 | ||
8068 | do_jump (TREE_OPERAND (rhs, 1), | |
8069 | TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0, | |
8070 | TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0); | |
8071 | expand_assignment (lhs, convert (TREE_TYPE (rhs), | |
8072 | (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8073 | ? integer_one_node | |
8074 | : integer_zero_node)), | |
b90f141a | 8075 | 0); |
e7c33f54 | 8076 | do_pending_stack_adjust (); |
bbf6f052 RK |
8077 | emit_label (label); |
8078 | return const0_rtx; | |
8079 | } | |
8080 | ||
b90f141a | 8081 | temp = expand_assignment (lhs, rhs, ! ignore); |
0fb7aeda | 8082 | |
bbf6f052 RK |
8083 | return temp; |
8084 | } | |
8085 | ||
6e7f84a7 APB |
8086 | case RETURN_EXPR: |
8087 | if (!TREE_OPERAND (exp, 0)) | |
8088 | expand_null_return (); | |
8089 | else | |
8090 | expand_return (TREE_OPERAND (exp, 0)); | |
8091 | return const0_rtx; | |
8092 | ||
bbf6f052 | 8093 | case ADDR_EXPR: |
70bb498a | 8094 | return expand_expr_addr_expr (exp, target, tmode, modifier); |
bbf6f052 | 8095 | |
7308a047 RS |
8096 | /* COMPLEX type for Extended Pascal & Fortran */ |
8097 | case COMPLEX_EXPR: | |
8098 | { | |
8099 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6551fa4d | 8100 | rtx insns; |
7308a047 RS |
8101 | |
8102 | /* Get the rtx code of the operands. */ | |
8103 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8104 | op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0); | |
8105 | ||
8106 | if (! target) | |
8107 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
8108 | ||
6551fa4d | 8109 | start_sequence (); |
7308a047 RS |
8110 | |
8111 | /* Move the real (op0) and imaginary (op1) parts to their location. */ | |
2d7050fd RS |
8112 | emit_move_insn (gen_realpart (mode, target), op0); |
8113 | emit_move_insn (gen_imagpart (mode, target), op1); | |
7308a047 | 8114 | |
6551fa4d JW |
8115 | insns = get_insns (); |
8116 | end_sequence (); | |
8117 | ||
7308a047 | 8118 | /* Complex construction should appear as a single unit. */ |
6551fa4d JW |
8119 | /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS, |
8120 | each with a separate pseudo as destination. | |
8121 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8122 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8123 | emit_no_conflict_block (insns, target, op0, op1, NULL_RTX); |
8124 | else | |
2f937369 | 8125 | emit_insn (insns); |
7308a047 RS |
8126 | |
8127 | return target; | |
8128 | } | |
8129 | ||
8130 | case REALPART_EXPR: | |
2d7050fd RS |
8131 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8132 | return gen_realpart (mode, op0); | |
3a94c984 | 8133 | |
7308a047 | 8134 | case IMAGPART_EXPR: |
2d7050fd RS |
8135 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8136 | return gen_imagpart (mode, op0); | |
7308a047 | 8137 | |
6de9cd9a DN |
8138 | case RESX_EXPR: |
8139 | expand_resx_expr (exp); | |
8140 | return const0_rtx; | |
8141 | ||
e976b8b2 | 8142 | case TRY_CATCH_EXPR: |
6de9cd9a | 8143 | case CATCH_EXPR: |
6de9cd9a | 8144 | case EH_FILTER_EXPR: |
b335b813 | 8145 | case TRY_FINALLY_EXPR: |
ac45df5d | 8146 | /* Lowered by tree-eh.c. */ |
5b0264cb | 8147 | gcc_unreachable (); |
b335b813 | 8148 | |
ac45df5d RH |
8149 | case WITH_CLEANUP_EXPR: |
8150 | case CLEANUP_POINT_EXPR: | |
8151 | case TARGET_EXPR: | |
165b54c3 | 8152 | case CASE_LABEL_EXPR: |
77c9db77 | 8153 | case VA_ARG_EXPR: |
caf93cb0 | 8154 | case BIND_EXPR: |
e5bacf32 PB |
8155 | case INIT_EXPR: |
8156 | case CONJ_EXPR: | |
8157 | case COMPOUND_EXPR: | |
8158 | case PREINCREMENT_EXPR: | |
8159 | case PREDECREMENT_EXPR: | |
8160 | case POSTINCREMENT_EXPR: | |
8161 | case POSTDECREMENT_EXPR: | |
8162 | case LOOP_EXPR: | |
8163 | case EXIT_EXPR: | |
8164 | case LABELED_BLOCK_EXPR: | |
8165 | case EXIT_BLOCK_EXPR: | |
8166 | case TRUTH_ANDIF_EXPR: | |
8167 | case TRUTH_ORIF_EXPR: | |
ac45df5d | 8168 | /* Lowered by gimplify.c. */ |
5b0264cb | 8169 | gcc_unreachable (); |
b335b813 | 8170 | |
52a11cbf | 8171 | case EXC_PTR_EXPR: |
86c99549 | 8172 | return get_exception_pointer (cfun); |
52a11cbf | 8173 | |
6de9cd9a DN |
8174 | case FILTER_EXPR: |
8175 | return get_exception_filter (cfun); | |
8176 | ||
67231816 RH |
8177 | case FDESC_EXPR: |
8178 | /* Function descriptors are not valid except for as | |
8179 | initialization constants, and should not be expanded. */ | |
5b0264cb | 8180 | gcc_unreachable (); |
67231816 | 8181 | |
6de9cd9a | 8182 | case SWITCH_EXPR: |
7efcb746 | 8183 | expand_case (exp); |
6de9cd9a DN |
8184 | return const0_rtx; |
8185 | ||
8186 | case LABEL_EXPR: | |
8187 | expand_label (TREE_OPERAND (exp, 0)); | |
8188 | return const0_rtx; | |
8189 | ||
6de9cd9a DN |
8190 | case ASM_EXPR: |
8191 | expand_asm_expr (exp); | |
8192 | return const0_rtx; | |
8193 | ||
d25cee4d RH |
8194 | case WITH_SIZE_EXPR: |
8195 | /* WITH_SIZE_EXPR expands to its first argument. The caller should | |
8196 | have pulled out the size to use in whatever context it needed. */ | |
8197 | return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode, | |
8198 | modifier, alt_rtl); | |
8199 | ||
bbf6f052 | 8200 | default: |
673fda6b SB |
8201 | return lang_hooks.expand_expr (exp, original_target, tmode, |
8202 | modifier, alt_rtl); | |
bbf6f052 RK |
8203 | } |
8204 | ||
c4d70ce3 | 8205 | /* Here to do an ordinary binary operator. */ |
bbf6f052 | 8206 | binop: |
eb698c58 RS |
8207 | expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
8208 | subtarget, &op0, &op1, 0); | |
bbf6f052 | 8209 | binop2: |
c4d70ce3 PB |
8210 | this_optab = optab_for_tree_code (code, type); |
8211 | binop3: | |
8403445a AM |
8212 | if (modifier == EXPAND_STACK_PARM) |
8213 | target = 0; | |
bbf6f052 RK |
8214 | temp = expand_binop (mode, this_optab, op0, op1, target, |
8215 | unsignedp, OPTAB_LIB_WIDEN); | |
5b0264cb | 8216 | gcc_assert (temp); |
bc15d0ef JM |
8217 | return REDUCE_BIT_FIELD (temp); |
8218 | } | |
8219 | #undef REDUCE_BIT_FIELD | |
8220 | \f | |
8221 | /* Subroutine of above: reduce EXP to the precision of TYPE (in the | |
8222 | signedness of TYPE), possibly returning the result in TARGET. */ | |
8223 | static rtx | |
8224 | reduce_to_bit_field_precision (rtx exp, rtx target, tree type) | |
8225 | { | |
8226 | HOST_WIDE_INT prec = TYPE_PRECISION (type); | |
8227 | if (target && GET_MODE (target) != GET_MODE (exp)) | |
8228 | target = 0; | |
8229 | if (TYPE_UNSIGNED (type)) | |
8230 | { | |
8231 | rtx mask; | |
8232 | if (prec < HOST_BITS_PER_WIDE_INT) | |
8233 | mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0, | |
8234 | GET_MODE (exp)); | |
8235 | else | |
8236 | mask = immed_double_const ((unsigned HOST_WIDE_INT) -1, | |
8237 | ((unsigned HOST_WIDE_INT) 1 | |
8238 | << (prec - HOST_BITS_PER_WIDE_INT)) - 1, | |
8239 | GET_MODE (exp)); | |
8240 | return expand_and (GET_MODE (exp), exp, mask, target); | |
8241 | } | |
8242 | else | |
8243 | { | |
4a90aeeb | 8244 | tree count = build_int_cst (NULL_TREE, |
7d60be94 | 8245 | GET_MODE_BITSIZE (GET_MODE (exp)) - prec); |
bc15d0ef JM |
8246 | exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); |
8247 | return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); | |
8248 | } | |
bbf6f052 | 8249 | } |
b93a436e | 8250 | \f |
1ce7f3c2 RK |
8251 | /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that |
8252 | when applied to the address of EXP produces an address known to be | |
8253 | aligned more than BIGGEST_ALIGNMENT. */ | |
8254 | ||
8255 | static int | |
502b8322 | 8256 | is_aligning_offset (tree offset, tree exp) |
1ce7f3c2 | 8257 | { |
6fce44af | 8258 | /* Strip off any conversions. */ |
1ce7f3c2 RK |
8259 | while (TREE_CODE (offset) == NON_LVALUE_EXPR |
8260 | || TREE_CODE (offset) == NOP_EXPR | |
6fce44af | 8261 | || TREE_CODE (offset) == CONVERT_EXPR) |
1ce7f3c2 RK |
8262 | offset = TREE_OPERAND (offset, 0); |
8263 | ||
8264 | /* We must now have a BIT_AND_EXPR with a constant that is one less than | |
8265 | power of 2 and which is larger than BIGGEST_ALIGNMENT. */ | |
8266 | if (TREE_CODE (offset) != BIT_AND_EXPR | |
8267 | || !host_integerp (TREE_OPERAND (offset, 1), 1) | |
caf93cb0 | 8268 | || compare_tree_int (TREE_OPERAND (offset, 1), |
c0cfc691 | 8269 | BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 |
1ce7f3c2 RK |
8270 | || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) |
8271 | return 0; | |
8272 | ||
8273 | /* Look at the first operand of BIT_AND_EXPR and strip any conversion. | |
8274 | It must be NEGATE_EXPR. Then strip any more conversions. */ | |
8275 | offset = TREE_OPERAND (offset, 0); | |
8276 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
8277 | || TREE_CODE (offset) == NOP_EXPR | |
8278 | || TREE_CODE (offset) == CONVERT_EXPR) | |
8279 | offset = TREE_OPERAND (offset, 0); | |
8280 | ||
8281 | if (TREE_CODE (offset) != NEGATE_EXPR) | |
8282 | return 0; | |
8283 | ||
8284 | offset = TREE_OPERAND (offset, 0); | |
8285 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
8286 | || TREE_CODE (offset) == NOP_EXPR | |
8287 | || TREE_CODE (offset) == CONVERT_EXPR) | |
8288 | offset = TREE_OPERAND (offset, 0); | |
8289 | ||
6fce44af RK |
8290 | /* This must now be the address of EXP. */ |
8291 | return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; | |
1ce7f3c2 RK |
8292 | } |
8293 | \f | |
e0a2f705 | 8294 | /* Return the tree node if an ARG corresponds to a string constant or zero |
cc2902df | 8295 | if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset |
fed3cef0 RK |
8296 | in bytes within the string that ARG is accessing. The type of the |
8297 | offset will be `sizetype'. */ | |
b93a436e | 8298 | |
28f4ec01 | 8299 | tree |
502b8322 | 8300 | string_constant (tree arg, tree *ptr_offset) |
b93a436e | 8301 | { |
a45f71f5 | 8302 | tree array, offset; |
b93a436e JL |
8303 | STRIP_NOPS (arg); |
8304 | ||
a45f71f5 | 8305 | if (TREE_CODE (arg) == ADDR_EXPR) |
b93a436e | 8306 | { |
a45f71f5 JJ |
8307 | if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) |
8308 | { | |
8309 | *ptr_offset = size_zero_node; | |
8310 | return TREE_OPERAND (arg, 0); | |
8311 | } | |
8312 | else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL) | |
8313 | { | |
8314 | array = TREE_OPERAND (arg, 0); | |
8315 | offset = size_zero_node; | |
8316 | } | |
8317 | else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) | |
8318 | { | |
8319 | array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
8320 | offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); | |
8321 | if (TREE_CODE (array) != STRING_CST | |
8322 | && TREE_CODE (array) != VAR_DECL) | |
8323 | return 0; | |
8324 | } | |
8325 | else | |
8326 | return 0; | |
6de9cd9a | 8327 | } |
b93a436e JL |
8328 | else if (TREE_CODE (arg) == PLUS_EXPR) |
8329 | { | |
8330 | tree arg0 = TREE_OPERAND (arg, 0); | |
8331 | tree arg1 = TREE_OPERAND (arg, 1); | |
8332 | ||
8333 | STRIP_NOPS (arg0); | |
8334 | STRIP_NOPS (arg1); | |
8335 | ||
8336 | if (TREE_CODE (arg0) == ADDR_EXPR | |
a45f71f5 JJ |
8337 | && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST |
8338 | || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL)) | |
bbf6f052 | 8339 | { |
a45f71f5 JJ |
8340 | array = TREE_OPERAND (arg0, 0); |
8341 | offset = arg1; | |
bbf6f052 | 8342 | } |
b93a436e | 8343 | else if (TREE_CODE (arg1) == ADDR_EXPR |
a45f71f5 JJ |
8344 | && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST |
8345 | || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL)) | |
bbf6f052 | 8346 | { |
a45f71f5 JJ |
8347 | array = TREE_OPERAND (arg1, 0); |
8348 | offset = arg0; | |
bbf6f052 | 8349 | } |
a45f71f5 JJ |
8350 | else |
8351 | return 0; | |
8352 | } | |
8353 | else | |
8354 | return 0; | |
8355 | ||
8356 | if (TREE_CODE (array) == STRING_CST) | |
8357 | { | |
8358 | *ptr_offset = convert (sizetype, offset); | |
8359 | return array; | |
8360 | } | |
8361 | else if (TREE_CODE (array) == VAR_DECL) | |
8362 | { | |
8363 | int length; | |
8364 | ||
8365 | /* Variables initialized to string literals can be handled too. */ | |
8366 | if (DECL_INITIAL (array) == NULL_TREE | |
8367 | || TREE_CODE (DECL_INITIAL (array)) != STRING_CST) | |
8368 | return 0; | |
8369 | ||
8370 | /* If they are read-only, non-volatile and bind locally. */ | |
8371 | if (! TREE_READONLY (array) | |
8372 | || TREE_SIDE_EFFECTS (array) | |
8373 | || ! targetm.binds_local_p (array)) | |
8374 | return 0; | |
8375 | ||
8376 | /* Avoid const char foo[4] = "abcde"; */ | |
8377 | if (DECL_SIZE_UNIT (array) == NULL_TREE | |
8378 | || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST | |
8379 | || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0 | |
8380 | || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0) | |
8381 | return 0; | |
8382 | ||
8383 | /* If variable is bigger than the string literal, OFFSET must be constant | |
8384 | and inside of the bounds of the string literal. */ | |
8385 | offset = convert (sizetype, offset); | |
8386 | if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0 | |
8387 | && (! host_integerp (offset, 1) | |
8388 | || compare_tree_int (offset, length) >= 0)) | |
8389 | return 0; | |
8390 | ||
8391 | *ptr_offset = offset; | |
8392 | return DECL_INITIAL (array); | |
b93a436e | 8393 | } |
ca695ac9 | 8394 | |
b93a436e JL |
8395 | return 0; |
8396 | } | |
ca695ac9 | 8397 | \f |
b93a436e JL |
8398 | /* Generate code to calculate EXP using a store-flag instruction |
8399 | and return an rtx for the result. EXP is either a comparison | |
8400 | or a TRUTH_NOT_EXPR whose operand is a comparison. | |
ca695ac9 | 8401 | |
b93a436e | 8402 | If TARGET is nonzero, store the result there if convenient. |
ca695ac9 | 8403 | |
cc2902df | 8404 | If ONLY_CHEAP is nonzero, only do this if it is likely to be very |
b93a436e | 8405 | cheap. |
ca695ac9 | 8406 | |
b93a436e JL |
8407 | Return zero if there is no suitable set-flag instruction |
8408 | available on this machine. | |
ca695ac9 | 8409 | |
b93a436e JL |
8410 | Once expand_expr has been called on the arguments of the comparison, |
8411 | we are committed to doing the store flag, since it is not safe to | |
8412 | re-evaluate the expression. We emit the store-flag insn by calling | |
8413 | emit_store_flag, but only expand the arguments if we have a reason | |
8414 | to believe that emit_store_flag will be successful. If we think that | |
8415 | it will, but it isn't, we have to simulate the store-flag with a | |
8416 | set/jump/set sequence. */ | |
ca695ac9 | 8417 | |
b93a436e | 8418 | static rtx |
502b8322 | 8419 | do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
b93a436e JL |
8420 | { |
8421 | enum rtx_code code; | |
8422 | tree arg0, arg1, type; | |
8423 | tree tem; | |
8424 | enum machine_mode operand_mode; | |
8425 | int invert = 0; | |
8426 | int unsignedp; | |
8427 | rtx op0, op1; | |
8428 | enum insn_code icode; | |
8429 | rtx subtarget = target; | |
381127e8 | 8430 | rtx result, label; |
ca695ac9 | 8431 | |
b93a436e JL |
8432 | /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
8433 | result at the end. We can't simply invert the test since it would | |
8434 | have already been inverted if it were valid. This case occurs for | |
8435 | some floating-point comparisons. */ | |
ca695ac9 | 8436 | |
b93a436e JL |
8437 | if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
8438 | invert = 1, exp = TREE_OPERAND (exp, 0); | |
ca695ac9 | 8439 | |
b93a436e JL |
8440 | arg0 = TREE_OPERAND (exp, 0); |
8441 | arg1 = TREE_OPERAND (exp, 1); | |
5129d2ce AH |
8442 | |
8443 | /* Don't crash if the comparison was erroneous. */ | |
8444 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
8445 | return const0_rtx; | |
8446 | ||
b93a436e JL |
8447 | type = TREE_TYPE (arg0); |
8448 | operand_mode = TYPE_MODE (type); | |
8df83eae | 8449 | unsignedp = TYPE_UNSIGNED (type); |
ca695ac9 | 8450 | |
b93a436e JL |
8451 | /* We won't bother with BLKmode store-flag operations because it would mean |
8452 | passing a lot of information to emit_store_flag. */ | |
8453 | if (operand_mode == BLKmode) | |
8454 | return 0; | |
ca695ac9 | 8455 | |
b93a436e JL |
8456 | /* We won't bother with store-flag operations involving function pointers |
8457 | when function pointers must be canonicalized before comparisons. */ | |
8458 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
8459 | if (HAVE_canonicalize_funcptr_for_compare | |
8460 | && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
8461 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
8462 | == FUNCTION_TYPE)) | |
8463 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
8464 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
8465 | == FUNCTION_TYPE)))) | |
8466 | return 0; | |
ca695ac9 JB |
8467 | #endif |
8468 | ||
b93a436e JL |
8469 | STRIP_NOPS (arg0); |
8470 | STRIP_NOPS (arg1); | |
ca695ac9 | 8471 | |
b93a436e JL |
8472 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
8473 | operation of some type. Some comparisons against 1 and -1 can be | |
8474 | converted to comparisons with zero. Do so here so that the tests | |
8475 | below will be aware that we have a comparison with zero. These | |
8476 | tests will not catch constants in the first operand, but constants | |
8477 | are rarely passed as the first operand. */ | |
ca695ac9 | 8478 | |
b93a436e JL |
8479 | switch (TREE_CODE (exp)) |
8480 | { | |
8481 | case EQ_EXPR: | |
8482 | code = EQ; | |
bbf6f052 | 8483 | break; |
b93a436e JL |
8484 | case NE_EXPR: |
8485 | code = NE; | |
bbf6f052 | 8486 | break; |
b93a436e JL |
8487 | case LT_EXPR: |
8488 | if (integer_onep (arg1)) | |
8489 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
8490 | else | |
8491 | code = unsignedp ? LTU : LT; | |
ca695ac9 | 8492 | break; |
b93a436e JL |
8493 | case LE_EXPR: |
8494 | if (! unsignedp && integer_all_onesp (arg1)) | |
8495 | arg1 = integer_zero_node, code = LT; | |
8496 | else | |
8497 | code = unsignedp ? LEU : LE; | |
ca695ac9 | 8498 | break; |
b93a436e JL |
8499 | case GT_EXPR: |
8500 | if (! unsignedp && integer_all_onesp (arg1)) | |
8501 | arg1 = integer_zero_node, code = GE; | |
8502 | else | |
8503 | code = unsignedp ? GTU : GT; | |
8504 | break; | |
8505 | case GE_EXPR: | |
8506 | if (integer_onep (arg1)) | |
8507 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
8508 | else | |
8509 | code = unsignedp ? GEU : GE; | |
ca695ac9 | 8510 | break; |
1eb8759b RH |
8511 | |
8512 | case UNORDERED_EXPR: | |
8513 | code = UNORDERED; | |
8514 | break; | |
8515 | case ORDERED_EXPR: | |
8516 | code = ORDERED; | |
8517 | break; | |
8518 | case UNLT_EXPR: | |
8519 | code = UNLT; | |
8520 | break; | |
8521 | case UNLE_EXPR: | |
8522 | code = UNLE; | |
8523 | break; | |
8524 | case UNGT_EXPR: | |
8525 | code = UNGT; | |
8526 | break; | |
8527 | case UNGE_EXPR: | |
8528 | code = UNGE; | |
8529 | break; | |
8530 | case UNEQ_EXPR: | |
8531 | code = UNEQ; | |
8532 | break; | |
d1a7edaf PB |
8533 | case LTGT_EXPR: |
8534 | code = LTGT; | |
8535 | break; | |
1eb8759b | 8536 | |
ca695ac9 | 8537 | default: |
5b0264cb | 8538 | gcc_unreachable (); |
bbf6f052 | 8539 | } |
bbf6f052 | 8540 | |
b93a436e JL |
8541 | /* Put a constant second. */ |
8542 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) | |
8543 | { | |
8544 | tem = arg0; arg0 = arg1; arg1 = tem; | |
8545 | code = swap_condition (code); | |
ca695ac9 | 8546 | } |
bbf6f052 | 8547 | |
b93a436e JL |
8548 | /* If this is an equality or inequality test of a single bit, we can |
8549 | do this by shifting the bit being tested to the low-order bit and | |
8550 | masking the result with the constant 1. If the condition was EQ, | |
8551 | we xor it with 1. This does not require an scc insn and is faster | |
7960bf22 JL |
8552 | than an scc insn even if we have it. |
8553 | ||
8554 | The code to make this transformation was moved into fold_single_bit_test, | |
8555 | so we just call into the folder and expand its result. */ | |
d39985fa | 8556 | |
b93a436e JL |
8557 | if ((code == NE || code == EQ) |
8558 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
8559 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
60cd4dae | 8560 | { |
ae2bcd98 | 8561 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
60cd4dae | 8562 | return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR, |
450b1728 | 8563 | arg0, arg1, type), |
60cd4dae JL |
8564 | target, VOIDmode, EXPAND_NORMAL); |
8565 | } | |
bbf6f052 | 8566 | |
b93a436e | 8567 | /* Now see if we are likely to be able to do this. Return if not. */ |
1eb8759b | 8568 | if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
b93a436e | 8569 | return 0; |
1eb8759b | 8570 | |
b93a436e JL |
8571 | icode = setcc_gen_code[(int) code]; |
8572 | if (icode == CODE_FOR_nothing | |
a995e389 | 8573 | || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
ca695ac9 | 8574 | { |
b93a436e JL |
8575 | /* We can only do this if it is one of the special cases that |
8576 | can be handled without an scc insn. */ | |
8577 | if ((code == LT && integer_zerop (arg1)) | |
8578 | || (! only_cheap && code == GE && integer_zerop (arg1))) | |
8579 | ; | |
8580 | else if (BRANCH_COST >= 0 | |
8581 | && ! only_cheap && (code == NE || code == EQ) | |
8582 | && TREE_CODE (type) != REAL_TYPE | |
8583 | && ((abs_optab->handlers[(int) operand_mode].insn_code | |
8584 | != CODE_FOR_nothing) | |
8585 | || (ffs_optab->handlers[(int) operand_mode].insn_code | |
8586 | != CODE_FOR_nothing))) | |
8587 | ; | |
8588 | else | |
8589 | return 0; | |
ca695ac9 | 8590 | } |
3a94c984 | 8591 | |
296b4ed9 | 8592 | if (! get_subtarget (target) |
e3be1116 | 8593 | || GET_MODE (subtarget) != operand_mode) |
b93a436e JL |
8594 | subtarget = 0; |
8595 | ||
eb698c58 | 8596 | expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); |
b93a436e JL |
8597 | |
8598 | if (target == 0) | |
8599 | target = gen_reg_rtx (mode); | |
8600 | ||
ad76cef8 | 8601 | result = emit_store_flag (target, code, op0, op1, |
b93a436e | 8602 | operand_mode, unsignedp, 1); |
ca695ac9 | 8603 | |
b93a436e JL |
8604 | if (result) |
8605 | { | |
8606 | if (invert) | |
8607 | result = expand_binop (mode, xor_optab, result, const1_rtx, | |
8608 | result, 0, OPTAB_LIB_WIDEN); | |
8609 | return result; | |
ca695ac9 | 8610 | } |
bbf6f052 | 8611 | |
b93a436e | 8612 | /* If this failed, we have to do this with set/compare/jump/set code. */ |
f8cfc6aa | 8613 | if (!REG_P (target) |
b93a436e JL |
8614 | || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) |
8615 | target = gen_reg_rtx (GET_MODE (target)); | |
8616 | ||
8617 | emit_move_insn (target, invert ? const0_rtx : const1_rtx); | |
8618 | result = compare_from_rtx (op0, op1, code, unsignedp, | |
a06ef755 | 8619 | operand_mode, NULL_RTX); |
b93a436e JL |
8620 | if (GET_CODE (result) == CONST_INT) |
8621 | return (((result == const0_rtx && ! invert) | |
8622 | || (result != const0_rtx && invert)) | |
8623 | ? const0_rtx : const1_rtx); | |
ca695ac9 | 8624 | |
8f08e8c0 JL |
8625 | /* The code of RESULT may not match CODE if compare_from_rtx |
8626 | decided to swap its operands and reverse the original code. | |
8627 | ||
8628 | We know that compare_from_rtx returns either a CONST_INT or | |
8629 | a new comparison code, so it is safe to just extract the | |
8630 | code from RESULT. */ | |
8631 | code = GET_CODE (result); | |
8632 | ||
b93a436e | 8633 | label = gen_label_rtx (); |
5b0264cb | 8634 | gcc_assert (bcc_gen_fctn[(int) code]); |
0f41302f | 8635 | |
b93a436e JL |
8636 | emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); |
8637 | emit_move_insn (target, invert ? const1_rtx : const0_rtx); | |
8638 | emit_label (label); | |
bbf6f052 | 8639 | |
b93a436e | 8640 | return target; |
ca695ac9 | 8641 | } |
b93a436e | 8642 | \f |
b93a436e | 8643 | |
ad82abb8 ZW |
8644 | /* Stubs in case we haven't got a casesi insn. */ |
8645 | #ifndef HAVE_casesi | |
8646 | # define HAVE_casesi 0 | |
8647 | # define gen_casesi(a, b, c, d, e) (0) | |
8648 | # define CODE_FOR_casesi CODE_FOR_nothing | |
8649 | #endif | |
8650 | ||
8651 | /* If the machine does not have a case insn that compares the bounds, | |
8652 | this means extra overhead for dispatch tables, which raises the | |
8653 | threshold for using them. */ | |
8654 | #ifndef CASE_VALUES_THRESHOLD | |
8655 | #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) | |
8656 | #endif /* CASE_VALUES_THRESHOLD */ | |
8657 | ||
8658 | unsigned int | |
502b8322 | 8659 | case_values_threshold (void) |
ad82abb8 ZW |
8660 | { |
8661 | return CASE_VALUES_THRESHOLD; | |
8662 | } | |
8663 | ||
8664 | /* Attempt to generate a casesi instruction. Returns 1 if successful, | |
8665 | 0 otherwise (i.e. if there is no casesi instruction). */ | |
8666 | int | |
502b8322 AJ |
8667 | try_casesi (tree index_type, tree index_expr, tree minval, tree range, |
8668 | rtx table_label ATTRIBUTE_UNUSED, rtx default_label) | |
ad82abb8 ZW |
8669 | { |
8670 | enum machine_mode index_mode = SImode; | |
8671 | int index_bits = GET_MODE_BITSIZE (index_mode); | |
8672 | rtx op1, op2, index; | |
8673 | enum machine_mode op_mode; | |
8674 | ||
8675 | if (! HAVE_casesi) | |
8676 | return 0; | |
8677 | ||
8678 | /* Convert the index to SImode. */ | |
8679 | if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) | |
8680 | { | |
8681 | enum machine_mode omode = TYPE_MODE (index_type); | |
8682 | rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
8683 | ||
8684 | /* We must handle the endpoints in the original mode. */ | |
3244e67d RS |
8685 | index_expr = build2 (MINUS_EXPR, index_type, |
8686 | index_expr, minval); | |
ad82abb8 ZW |
8687 | minval = integer_zero_node; |
8688 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
8689 | emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, | |
a06ef755 | 8690 | omode, 1, default_label); |
ad82abb8 ZW |
8691 | /* Now we can safely truncate. */ |
8692 | index = convert_to_mode (index_mode, index, 0); | |
8693 | } | |
8694 | else | |
8695 | { | |
8696 | if (TYPE_MODE (index_type) != index_mode) | |
8697 | { | |
ae2bcd98 | 8698 | index_expr = convert (lang_hooks.types.type_for_size |
b0c48229 | 8699 | (index_bits, 0), index_expr); |
ad82abb8 ZW |
8700 | index_type = TREE_TYPE (index_expr); |
8701 | } | |
8702 | ||
8703 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
8704 | } | |
ad76cef8 | 8705 | |
ad82abb8 ZW |
8706 | do_pending_stack_adjust (); |
8707 | ||
8708 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; | |
8709 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) | |
8710 | (index, op_mode)) | |
8711 | index = copy_to_mode_reg (op_mode, index); | |
e87b4f3f | 8712 | |
ad82abb8 ZW |
8713 | op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0); |
8714 | ||
8715 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; | |
8716 | op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), | |
8df83eae | 8717 | op1, TYPE_UNSIGNED (TREE_TYPE (minval))); |
ad82abb8 ZW |
8718 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) |
8719 | (op1, op_mode)) | |
8720 | op1 = copy_to_mode_reg (op_mode, op1); | |
8721 | ||
8722 | op2 = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
8723 | ||
8724 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; | |
8725 | op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), | |
8df83eae | 8726 | op2, TYPE_UNSIGNED (TREE_TYPE (range))); |
ad82abb8 ZW |
8727 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) |
8728 | (op2, op_mode)) | |
8729 | op2 = copy_to_mode_reg (op_mode, op2); | |
8730 | ||
8731 | emit_jump_insn (gen_casesi (index, op1, op2, | |
8732 | table_label, default_label)); | |
8733 | return 1; | |
8734 | } | |
8735 | ||
8736 | /* Attempt to generate a tablejump instruction; same concept. */ | |
8737 | #ifndef HAVE_tablejump | |
8738 | #define HAVE_tablejump 0 | |
8739 | #define gen_tablejump(x, y) (0) | |
8740 | #endif | |
8741 | ||
8742 | /* Subroutine of the next function. | |
8743 | ||
8744 | INDEX is the value being switched on, with the lowest value | |
b93a436e JL |
8745 | in the table already subtracted. |
8746 | MODE is its expected mode (needed if INDEX is constant). | |
8747 | RANGE is the length of the jump table. | |
8748 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
88d3b7f0 | 8749 | |
b93a436e JL |
8750 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
8751 | index value is out of range. */ | |
0f41302f | 8752 | |
ad82abb8 | 8753 | static void |
502b8322 AJ |
8754 | do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label, |
8755 | rtx default_label) | |
ca695ac9 | 8756 | { |
b3694847 | 8757 | rtx temp, vector; |
88d3b7f0 | 8758 | |
74f6d071 JH |
8759 | if (INTVAL (range) > cfun->max_jumptable_ents) |
8760 | cfun->max_jumptable_ents = INTVAL (range); | |
1877be45 | 8761 | |
b93a436e JL |
8762 | /* Do an unsigned comparison (in the proper mode) between the index |
8763 | expression and the value which represents the length of the range. | |
8764 | Since we just finished subtracting the lower bound of the range | |
8765 | from the index expression, this comparison allows us to simultaneously | |
8766 | check that the original index expression value is both greater than | |
8767 | or equal to the minimum value of the range and less than or equal to | |
8768 | the maximum value of the range. */ | |
709f5be1 | 8769 | |
c5d5d461 | 8770 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
a06ef755 | 8771 | default_label); |
bbf6f052 | 8772 | |
b93a436e JL |
8773 | /* If index is in range, it must fit in Pmode. |
8774 | Convert to Pmode so we can index with it. */ | |
8775 | if (mode != Pmode) | |
8776 | index = convert_to_mode (Pmode, index, 1); | |
bbf6f052 | 8777 | |
ba228239 | 8778 | /* Don't let a MEM slip through, because then INDEX that comes |
b93a436e JL |
8779 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, |
8780 | and break_out_memory_refs will go to work on it and mess it up. */ | |
8781 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
f8cfc6aa | 8782 | if (flag_pic && !REG_P (index)) |
b93a436e JL |
8783 | index = copy_to_mode_reg (Pmode, index); |
8784 | #endif | |
ca695ac9 | 8785 | |
b93a436e JL |
8786 | /* If flag_force_addr were to affect this address |
8787 | it could interfere with the tricky assumptions made | |
8788 | about addresses that contain label-refs, | |
8789 | which may be valid only very near the tablejump itself. */ | |
8790 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the | |
8791 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
8792 | uses should all be Pmode, because they are addresses. This code | |
8793 | could fail if addresses and insns are not the same size. */ | |
8794 | index = gen_rtx_PLUS (Pmode, | |
8795 | gen_rtx_MULT (Pmode, index, | |
8796 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
8797 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
8798 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
8799 | if (flag_pic) | |
8800 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
8801 | else | |
bbf6f052 | 8802 | #endif |
b93a436e JL |
8803 | index = memory_address_noforce (CASE_VECTOR_MODE, index); |
8804 | temp = gen_reg_rtx (CASE_VECTOR_MODE); | |
542a8afa | 8805 | vector = gen_const_mem (CASE_VECTOR_MODE, index); |
b93a436e JL |
8806 | convert_move (temp, vector, 0); |
8807 | ||
8808 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
8809 | ||
8810 | /* If we are generating PIC code or if the table is PC-relative, the | |
8811 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
8812 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
8813 | emit_barrier (); | |
bbf6f052 | 8814 | } |
b93a436e | 8815 | |
ad82abb8 | 8816 | int |
502b8322 AJ |
8817 | try_tablejump (tree index_type, tree index_expr, tree minval, tree range, |
8818 | rtx table_label, rtx default_label) | |
ad82abb8 ZW |
8819 | { |
8820 | rtx index; | |
8821 | ||
8822 | if (! HAVE_tablejump) | |
8823 | return 0; | |
8824 | ||
3244e67d RS |
8825 | index_expr = fold (build2 (MINUS_EXPR, index_type, |
8826 | convert (index_type, index_expr), | |
8827 | convert (index_type, minval))); | |
ad82abb8 | 8828 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); |
ad82abb8 ZW |
8829 | do_pending_stack_adjust (); |
8830 | ||
8831 | do_tablejump (index, TYPE_MODE (index_type), | |
8832 | convert_modes (TYPE_MODE (index_type), | |
8833 | TYPE_MODE (TREE_TYPE (range)), | |
8834 | expand_expr (range, NULL_RTX, | |
8835 | VOIDmode, 0), | |
8df83eae | 8836 | TYPE_UNSIGNED (TREE_TYPE (range))), |
ad82abb8 ZW |
8837 | table_label, default_label); |
8838 | return 1; | |
8839 | } | |
e2500fed | 8840 | |
cb2a532e AH |
8841 | /* Nonzero if the mode is a valid vector mode for this architecture. |
8842 | This returns nonzero even if there is no hardware support for the | |
8843 | vector mode, but we can emulate with narrower modes. */ | |
8844 | ||
8845 | int | |
502b8322 | 8846 | vector_mode_valid_p (enum machine_mode mode) |
cb2a532e AH |
8847 | { |
8848 | enum mode_class class = GET_MODE_CLASS (mode); | |
8849 | enum machine_mode innermode; | |
8850 | ||
8851 | /* Doh! What's going on? */ | |
8852 | if (class != MODE_VECTOR_INT | |
8853 | && class != MODE_VECTOR_FLOAT) | |
8854 | return 0; | |
8855 | ||
8856 | /* Hardware support. Woo hoo! */ | |
f676971a | 8857 | if (targetm.vector_mode_supported_p (mode)) |
cb2a532e AH |
8858 | return 1; |
8859 | ||
8860 | innermode = GET_MODE_INNER (mode); | |
8861 | ||
8862 | /* We should probably return 1 if requesting V4DI and we have no DI, | |
8863 | but we have V2DI, but this is probably very unlikely. */ | |
8864 | ||
8865 | /* If we have support for the inner mode, we can safely emulate it. | |
8866 | We may not have V2DI, but me can emulate with a pair of DIs. */ | |
6dd53648 | 8867 | return targetm.scalar_mode_supported_p (innermode); |
cb2a532e AH |
8868 | } |
8869 | ||
d744e06e AH |
8870 | /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ |
8871 | static rtx | |
502b8322 | 8872 | const_vector_from_tree (tree exp) |
d744e06e AH |
8873 | { |
8874 | rtvec v; | |
8875 | int units, i; | |
8876 | tree link, elt; | |
8877 | enum machine_mode inner, mode; | |
8878 | ||
8879 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
8880 | ||
6de9cd9a | 8881 | if (initializer_zerop (exp)) |
d744e06e AH |
8882 | return CONST0_RTX (mode); |
8883 | ||
8884 | units = GET_MODE_NUNITS (mode); | |
8885 | inner = GET_MODE_INNER (mode); | |
8886 | ||
8887 | v = rtvec_alloc (units); | |
8888 | ||
8889 | link = TREE_VECTOR_CST_ELTS (exp); | |
8890 | for (i = 0; link; link = TREE_CHAIN (link), ++i) | |
8891 | { | |
8892 | elt = TREE_VALUE (link); | |
8893 | ||
8894 | if (TREE_CODE (elt) == REAL_CST) | |
8895 | RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt), | |
8896 | inner); | |
8897 | else | |
8898 | RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt), | |
8899 | TREE_INT_CST_HIGH (elt), | |
8900 | inner); | |
8901 | } | |
8902 | ||
5f6c070d AH |
8903 | /* Initialize remaining elements to 0. */ |
8904 | for (; i < units; ++i) | |
8905 | RTVEC_ELT (v, i) = CONST0_RTX (inner); | |
8906 | ||
a73b091d | 8907 | return gen_rtx_CONST_VECTOR (mode, v); |
d744e06e | 8908 | } |
e2500fed | 8909 | #include "gt-expr.h" |