]>
Commit | Line | Data |
---|---|---|
bbf6f052 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
8752c357 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
1574ef13 | 3 | 2000, 2001, 2002 Free Software Foundation, Inc. |
bbf6f052 | 4 | |
1322177d | 5 | This file is part of GCC. |
bbf6f052 | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
bbf6f052 | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
bbf6f052 RK |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
bbf6f052 | 21 | |
bbf6f052 | 22 | #include "config.h" |
670ee920 | 23 | #include "system.h" |
ca695ac9 | 24 | #include "machmode.h" |
bbf6f052 RK |
25 | #include "rtl.h" |
26 | #include "tree.h" | |
ca695ac9 | 27 | #include "obstack.h" |
bbf6f052 | 28 | #include "flags.h" |
bf76bb5a | 29 | #include "regs.h" |
4ed67205 | 30 | #include "hard-reg-set.h" |
3d195391 | 31 | #include "except.h" |
bbf6f052 | 32 | #include "function.h" |
bbf6f052 | 33 | #include "insn-config.h" |
34e81b5a | 34 | #include "insn-attr.h" |
3a94c984 | 35 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 36 | #include "expr.h" |
e78d8e51 ZW |
37 | #include "optabs.h" |
38 | #include "libfuncs.h" | |
bbf6f052 | 39 | #include "recog.h" |
3ef1eef4 | 40 | #include "reload.h" |
bbf6f052 | 41 | #include "output.h" |
bbf6f052 | 42 | #include "typeclass.h" |
10f0ad3d | 43 | #include "toplev.h" |
d7db6646 | 44 | #include "ggc.h" |
ac79cd5a | 45 | #include "langhooks.h" |
e2c49ac2 | 46 | #include "intl.h" |
b1474bb7 | 47 | #include "tm_p.h" |
bbf6f052 | 48 | |
bbf6f052 | 49 | /* Decide whether a function's arguments should be processed |
bbc8a071 RK |
50 | from first to last or from last to first. |
51 | ||
52 | They should if the stack and args grow in opposite directions, but | |
53 | only if we have push insns. */ | |
bbf6f052 | 54 | |
bbf6f052 | 55 | #ifdef PUSH_ROUNDING |
bbc8a071 | 56 | |
3319a347 | 57 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
3a94c984 | 58 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
bbf6f052 | 59 | #endif |
bbc8a071 | 60 | |
bbf6f052 RK |
61 | #endif |
62 | ||
63 | #ifndef STACK_PUSH_CODE | |
64 | #ifdef STACK_GROWS_DOWNWARD | |
65 | #define STACK_PUSH_CODE PRE_DEC | |
66 | #else | |
67 | #define STACK_PUSH_CODE PRE_INC | |
68 | #endif | |
69 | #endif | |
70 | ||
18543a22 ILT |
71 | /* Assume that case vectors are not pc-relative. */ |
72 | #ifndef CASE_VECTOR_PC_RELATIVE | |
73 | #define CASE_VECTOR_PC_RELATIVE 0 | |
74 | #endif | |
75 | ||
bbf6f052 RK |
76 | /* If this is nonzero, we do not bother generating VOLATILE |
77 | around volatile memory references, and we are willing to | |
78 | output indirect addresses. If cse is to follow, we reject | |
79 | indirect addresses so a useful potential cse is generated; | |
80 | if it is used only once, instruction combination will produce | |
81 | the same indirect address eventually. */ | |
82 | int cse_not_expected; | |
83 | ||
14a774a9 RK |
84 | /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */ |
85 | static tree placeholder_list = 0; | |
86 | ||
4969d05d RK |
87 | /* This structure is used by move_by_pieces to describe the move to |
88 | be performed. */ | |
4969d05d RK |
89 | struct move_by_pieces |
90 | { | |
91 | rtx to; | |
92 | rtx to_addr; | |
93 | int autinc_to; | |
94 | int explicit_inc_to; | |
95 | rtx from; | |
96 | rtx from_addr; | |
97 | int autinc_from; | |
98 | int explicit_inc_from; | |
3bdf5ad1 RK |
99 | unsigned HOST_WIDE_INT len; |
100 | HOST_WIDE_INT offset; | |
4969d05d RK |
101 | int reverse; |
102 | }; | |
103 | ||
57814e5e | 104 | /* This structure is used by store_by_pieces to describe the clear to |
9de08200 RK |
105 | be performed. */ |
106 | ||
57814e5e | 107 | struct store_by_pieces |
9de08200 RK |
108 | { |
109 | rtx to; | |
110 | rtx to_addr; | |
111 | int autinc_to; | |
112 | int explicit_inc_to; | |
3bdf5ad1 RK |
113 | unsigned HOST_WIDE_INT len; |
114 | HOST_WIDE_INT offset; | |
57814e5e JJ |
115 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); |
116 | PTR constfundata; | |
9de08200 RK |
117 | int reverse; |
118 | }; | |
119 | ||
292b1216 | 120 | extern struct obstack permanent_obstack; |
c02bd5d9 | 121 | |
711d877c | 122 | static rtx enqueue_insn PARAMS ((rtx, rtx)); |
3bdf5ad1 RK |
123 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns |
124 | PARAMS ((unsigned HOST_WIDE_INT, | |
125 | unsigned int)); | |
711d877c KG |
126 | static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode, |
127 | struct move_by_pieces *)); | |
57814e5e JJ |
128 | static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT, |
129 | enum machine_mode)); | |
3bdf5ad1 RK |
130 | static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT, |
131 | unsigned int)); | |
57814e5e JJ |
132 | static void store_by_pieces_1 PARAMS ((struct store_by_pieces *, |
133 | unsigned int)); | |
134 | static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...), | |
711d877c | 135 | enum machine_mode, |
57814e5e | 136 | struct store_by_pieces *)); |
296b4ed9 | 137 | static rtx get_subtarget PARAMS ((rtx)); |
711d877c KG |
138 | static int is_zeros_p PARAMS ((tree)); |
139 | static int mostly_zeros_p PARAMS ((tree)); | |
770ae6cc RK |
140 | static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT, |
141 | HOST_WIDE_INT, enum machine_mode, | |
04050c69 RK |
142 | tree, tree, int, int)); |
143 | static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT)); | |
770ae6cc RK |
144 | static rtx store_field PARAMS ((rtx, HOST_WIDE_INT, |
145 | HOST_WIDE_INT, enum machine_mode, | |
a06ef755 RK |
146 | tree, enum machine_mode, int, tree, |
147 | int)); | |
711d877c | 148 | static rtx var_rtx PARAMS ((tree)); |
0d4903b8 | 149 | static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree)); |
711d877c | 150 | static rtx expand_increment PARAMS ((tree, int, int)); |
711d877c KG |
151 | static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx)); |
152 | static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx)); | |
770ae6cc RK |
153 | static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, |
154 | rtx, rtx)); | |
711d877c | 155 | static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int)); |
21d93687 | 156 | #ifdef PUSH_ROUNDING |
566aa174 | 157 | static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree)); |
21d93687 | 158 | #endif |
ad82abb8 | 159 | static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx)); |
bbf6f052 | 160 | |
4fa52007 RK |
161 | /* Record for each mode whether we can move a register directly to or |
162 | from an object of that mode in memory. If we can't, we won't try | |
163 | to use that mode directly when accessing a field of that mode. */ | |
164 | ||
165 | static char direct_load[NUM_MACHINE_MODES]; | |
166 | static char direct_store[NUM_MACHINE_MODES]; | |
167 | ||
7e24ffc9 HPN |
168 | /* If a memory-to-memory move would take MOVE_RATIO or more simple |
169 | move-instruction sequences, we will do a movstr or libcall instead. */ | |
bbf6f052 RK |
170 | |
171 | #ifndef MOVE_RATIO | |
266007a7 | 172 | #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti) |
bbf6f052 RK |
173 | #define MOVE_RATIO 2 |
174 | #else | |
3a94c984 | 175 | /* If we are optimizing for space (-Os), cut down the default move ratio. */ |
996d9dac | 176 | #define MOVE_RATIO (optimize_size ? 3 : 15) |
bbf6f052 RK |
177 | #endif |
178 | #endif | |
e87b4f3f | 179 | |
fbe1758d | 180 | /* This macro is used to determine whether move_by_pieces should be called |
3a94c984 | 181 | to perform a structure copy. */ |
fbe1758d | 182 | #ifndef MOVE_BY_PIECES_P |
19caa751 | 183 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
8752c357 | 184 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO) |
fbe1758d AM |
185 | #endif |
186 | ||
266007a7 | 187 | /* This array records the insn_code of insns to perform block moves. */ |
e6677db3 | 188 | enum insn_code movstr_optab[NUM_MACHINE_MODES]; |
266007a7 | 189 | |
9de08200 RK |
190 | /* This array records the insn_code of insns to perform block clears. */ |
191 | enum insn_code clrstr_optab[NUM_MACHINE_MODES]; | |
192 | ||
0f41302f | 193 | /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */ |
e87b4f3f RS |
194 | |
195 | #ifndef SLOW_UNALIGNED_ACCESS | |
e1565e65 | 196 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
e87b4f3f | 197 | #endif |
bbf6f052 | 198 | \f |
4fa52007 | 199 | /* This is run once per compilation to set up which modes can be used |
266007a7 | 200 | directly in memory and to initialize the block move optab. */ |
4fa52007 RK |
201 | |
202 | void | |
203 | init_expr_once () | |
204 | { | |
205 | rtx insn, pat; | |
206 | enum machine_mode mode; | |
cff48d8f | 207 | int num_clobbers; |
9ec36da5 | 208 | rtx mem, mem1; |
9ec36da5 JL |
209 | |
210 | start_sequence (); | |
211 | ||
e2549997 RS |
212 | /* Try indexing by frame ptr and try by stack ptr. |
213 | It is known that on the Convex the stack ptr isn't a valid index. | |
214 | With luck, one or the other is valid on any machine. */ | |
9ec36da5 JL |
215 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
216 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
4fa52007 | 217 | |
38a448ca | 218 | insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX)); |
4fa52007 RK |
219 | pat = PATTERN (insn); |
220 | ||
221 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
222 | mode = (enum machine_mode) ((int) mode + 1)) | |
223 | { | |
224 | int regno; | |
225 | rtx reg; | |
4fa52007 RK |
226 | |
227 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
228 | PUT_MODE (mem, mode); | |
e2549997 | 229 | PUT_MODE (mem1, mode); |
4fa52007 | 230 | |
e6fe56a4 RK |
231 | /* See if there is some register that can be used in this mode and |
232 | directly loaded or stored from memory. */ | |
233 | ||
7308a047 RS |
234 | if (mode != VOIDmode && mode != BLKmode) |
235 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
236 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
237 | regno++) | |
238 | { | |
239 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
240 | continue; | |
e6fe56a4 | 241 | |
38a448ca | 242 | reg = gen_rtx_REG (mode, regno); |
e6fe56a4 | 243 | |
7308a047 RS |
244 | SET_SRC (pat) = mem; |
245 | SET_DEST (pat) = reg; | |
246 | if (recog (pat, insn, &num_clobbers) >= 0) | |
247 | direct_load[(int) mode] = 1; | |
e6fe56a4 | 248 | |
e2549997 RS |
249 | SET_SRC (pat) = mem1; |
250 | SET_DEST (pat) = reg; | |
251 | if (recog (pat, insn, &num_clobbers) >= 0) | |
252 | direct_load[(int) mode] = 1; | |
253 | ||
7308a047 RS |
254 | SET_SRC (pat) = reg; |
255 | SET_DEST (pat) = mem; | |
256 | if (recog (pat, insn, &num_clobbers) >= 0) | |
257 | direct_store[(int) mode] = 1; | |
e2549997 RS |
258 | |
259 | SET_SRC (pat) = reg; | |
260 | SET_DEST (pat) = mem1; | |
261 | if (recog (pat, insn, &num_clobbers) >= 0) | |
262 | direct_store[(int) mode] = 1; | |
7308a047 | 263 | } |
4fa52007 RK |
264 | } |
265 | ||
266 | end_sequence (); | |
267 | } | |
cff48d8f | 268 | |
bbf6f052 RK |
269 | /* This is run at the start of compiling a function. */ |
270 | ||
271 | void | |
272 | init_expr () | |
273 | { | |
01d939e8 | 274 | cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status)); |
bbf6f052 | 275 | |
49ad7cfa | 276 | pending_chain = 0; |
bbf6f052 | 277 | pending_stack_adjust = 0; |
1503a7ec | 278 | stack_pointer_delta = 0; |
bbf6f052 | 279 | inhibit_defer_pop = 0; |
bbf6f052 | 280 | saveregs_value = 0; |
0006469d | 281 | apply_args_value = 0; |
e87b4f3f | 282 | forced_labels = 0; |
bbf6f052 RK |
283 | } |
284 | ||
fa51b01b RH |
285 | void |
286 | mark_expr_status (p) | |
287 | struct expr_status *p; | |
288 | { | |
289 | if (p == NULL) | |
290 | return; | |
291 | ||
292 | ggc_mark_rtx (p->x_saveregs_value); | |
293 | ggc_mark_rtx (p->x_apply_args_value); | |
294 | ggc_mark_rtx (p->x_forced_labels); | |
295 | } | |
296 | ||
297 | void | |
298 | free_expr_status (f) | |
299 | struct function *f; | |
300 | { | |
301 | free (f->expr); | |
302 | f->expr = NULL; | |
303 | } | |
304 | ||
49ad7cfa | 305 | /* Small sanity check that the queue is empty at the end of a function. */ |
296b4ed9 | 306 | |
bbf6f052 | 307 | void |
49ad7cfa | 308 | finish_expr_for_function () |
bbf6f052 | 309 | { |
49ad7cfa BS |
310 | if (pending_chain) |
311 | abort (); | |
bbf6f052 RK |
312 | } |
313 | \f | |
314 | /* Manage the queue of increment instructions to be output | |
315 | for POSTINCREMENT_EXPR expressions, etc. */ | |
316 | ||
bbf6f052 RK |
317 | /* Queue up to increment (or change) VAR later. BODY says how: |
318 | BODY should be the same thing you would pass to emit_insn | |
319 | to increment right away. It will go to emit_insn later on. | |
320 | ||
321 | The value is a QUEUED expression to be used in place of VAR | |
322 | where you want to guarantee the pre-incrementation value of VAR. */ | |
323 | ||
324 | static rtx | |
325 | enqueue_insn (var, body) | |
326 | rtx var, body; | |
327 | { | |
c5c76735 JL |
328 | pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX, |
329 | body, pending_chain); | |
bbf6f052 RK |
330 | return pending_chain; |
331 | } | |
332 | ||
333 | /* Use protect_from_queue to convert a QUEUED expression | |
334 | into something that you can put immediately into an instruction. | |
335 | If the queued incrementation has not happened yet, | |
336 | protect_from_queue returns the variable itself. | |
337 | If the incrementation has happened, protect_from_queue returns a temp | |
338 | that contains a copy of the old value of the variable. | |
339 | ||
340 | Any time an rtx which might possibly be a QUEUED is to be put | |
341 | into an instruction, it must be passed through protect_from_queue first. | |
342 | QUEUED expressions are not meaningful in instructions. | |
343 | ||
344 | Do not pass a value through protect_from_queue and then hold | |
345 | on to it for a while before putting it in an instruction! | |
346 | If the queue is flushed in between, incorrect code will result. */ | |
347 | ||
348 | rtx | |
349 | protect_from_queue (x, modify) | |
b3694847 | 350 | rtx x; |
bbf6f052 RK |
351 | int modify; |
352 | { | |
b3694847 | 353 | RTX_CODE code = GET_CODE (x); |
bbf6f052 RK |
354 | |
355 | #if 0 /* A QUEUED can hang around after the queue is forced out. */ | |
356 | /* Shortcut for most common case. */ | |
357 | if (pending_chain == 0) | |
358 | return x; | |
359 | #endif | |
360 | ||
361 | if (code != QUEUED) | |
362 | { | |
e9baa644 RK |
363 | /* A special hack for read access to (MEM (QUEUED ...)) to facilitate |
364 | use of autoincrement. Make a copy of the contents of the memory | |
365 | location rather than a copy of the address, but not if the value is | |
366 | of mode BLKmode. Don't modify X in place since it might be | |
367 | shared. */ | |
bbf6f052 RK |
368 | if (code == MEM && GET_MODE (x) != BLKmode |
369 | && GET_CODE (XEXP (x, 0)) == QUEUED && !modify) | |
370 | { | |
f1ec5147 RK |
371 | rtx y = XEXP (x, 0); |
372 | rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y)); | |
e9baa644 | 373 | |
bbf6f052 RK |
374 | if (QUEUED_INSN (y)) |
375 | { | |
f1ec5147 RK |
376 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
377 | ||
e9baa644 | 378 | emit_insn_before (gen_move_insn (temp, new), |
bbf6f052 RK |
379 | QUEUED_INSN (y)); |
380 | return temp; | |
381 | } | |
f1ec5147 | 382 | |
73b7f58c BS |
383 | /* Copy the address into a pseudo, so that the returned value |
384 | remains correct across calls to emit_queue. */ | |
f1ec5147 | 385 | return replace_equiv_address (new, copy_to_reg (XEXP (new, 0))); |
bbf6f052 | 386 | } |
f1ec5147 | 387 | |
bbf6f052 RK |
388 | /* Otherwise, recursively protect the subexpressions of all |
389 | the kinds of rtx's that can contain a QUEUED. */ | |
390 | if (code == MEM) | |
3f15938e RS |
391 | { |
392 | rtx tem = protect_from_queue (XEXP (x, 0), 0); | |
393 | if (tem != XEXP (x, 0)) | |
394 | { | |
395 | x = copy_rtx (x); | |
396 | XEXP (x, 0) = tem; | |
397 | } | |
398 | } | |
bbf6f052 RK |
399 | else if (code == PLUS || code == MULT) |
400 | { | |
3f15938e RS |
401 | rtx new0 = protect_from_queue (XEXP (x, 0), 0); |
402 | rtx new1 = protect_from_queue (XEXP (x, 1), 0); | |
403 | if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) | |
404 | { | |
405 | x = copy_rtx (x); | |
406 | XEXP (x, 0) = new0; | |
407 | XEXP (x, 1) = new1; | |
408 | } | |
bbf6f052 RK |
409 | } |
410 | return x; | |
411 | } | |
73b7f58c BS |
412 | /* If the increment has not happened, use the variable itself. Copy it |
413 | into a new pseudo so that the value remains correct across calls to | |
414 | emit_queue. */ | |
bbf6f052 | 415 | if (QUEUED_INSN (x) == 0) |
73b7f58c | 416 | return copy_to_reg (QUEUED_VAR (x)); |
bbf6f052 RK |
417 | /* If the increment has happened and a pre-increment copy exists, |
418 | use that copy. */ | |
419 | if (QUEUED_COPY (x) != 0) | |
420 | return QUEUED_COPY (x); | |
421 | /* The increment has happened but we haven't set up a pre-increment copy. | |
422 | Set one up now, and use it. */ | |
423 | QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x))); | |
424 | emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)), | |
425 | QUEUED_INSN (x)); | |
426 | return QUEUED_COPY (x); | |
427 | } | |
428 | ||
429 | /* Return nonzero if X contains a QUEUED expression: | |
430 | if it contains anything that will be altered by a queued increment. | |
431 | We handle only combinations of MEM, PLUS, MINUS and MULT operators | |
432 | since memory addresses generally contain only those. */ | |
433 | ||
1f06ee8d | 434 | int |
bbf6f052 RK |
435 | queued_subexp_p (x) |
436 | rtx x; | |
437 | { | |
b3694847 | 438 | enum rtx_code code = GET_CODE (x); |
bbf6f052 RK |
439 | switch (code) |
440 | { | |
441 | case QUEUED: | |
442 | return 1; | |
443 | case MEM: | |
444 | return queued_subexp_p (XEXP (x, 0)); | |
445 | case MULT: | |
446 | case PLUS: | |
447 | case MINUS: | |
e9a25f70 JL |
448 | return (queued_subexp_p (XEXP (x, 0)) |
449 | || queued_subexp_p (XEXP (x, 1))); | |
450 | default: | |
451 | return 0; | |
bbf6f052 | 452 | } |
bbf6f052 RK |
453 | } |
454 | ||
455 | /* Perform all the pending incrementations. */ | |
456 | ||
457 | void | |
458 | emit_queue () | |
459 | { | |
b3694847 | 460 | rtx p; |
381127e8 | 461 | while ((p = pending_chain)) |
bbf6f052 | 462 | { |
41b083c4 R |
463 | rtx body = QUEUED_BODY (p); |
464 | ||
465 | if (GET_CODE (body) == SEQUENCE) | |
466 | { | |
467 | QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0); | |
468 | emit_insn (QUEUED_BODY (p)); | |
469 | } | |
470 | else | |
471 | QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p)); | |
bbf6f052 RK |
472 | pending_chain = QUEUED_NEXT (p); |
473 | } | |
474 | } | |
bbf6f052 RK |
475 | \f |
476 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
477 | Both modes may be integer, or both may be floating. | |
478 | UNSIGNEDP should be nonzero if FROM is an unsigned type. | |
479 | This causes zero-extension instead of sign-extension. */ | |
480 | ||
481 | void | |
482 | convert_move (to, from, unsignedp) | |
b3694847 | 483 | rtx to, from; |
bbf6f052 RK |
484 | int unsignedp; |
485 | { | |
486 | enum machine_mode to_mode = GET_MODE (to); | |
487 | enum machine_mode from_mode = GET_MODE (from); | |
488 | int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; | |
489 | int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; | |
490 | enum insn_code code; | |
491 | rtx libcall; | |
492 | ||
493 | /* rtx code for making an equivalent value. */ | |
494 | enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND); | |
495 | ||
496 | to = protect_from_queue (to, 1); | |
497 | from = protect_from_queue (from, 0); | |
498 | ||
499 | if (to_real != from_real) | |
500 | abort (); | |
501 | ||
1499e0a8 RK |
502 | /* If FROM is a SUBREG that indicates that we have already done at least |
503 | the required extension, strip it. We don't handle such SUBREGs as | |
504 | TO here. */ | |
505 | ||
506 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
507 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
508 | >= GET_MODE_SIZE (to_mode)) | |
509 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
510 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
511 | ||
512 | if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) | |
513 | abort (); | |
514 | ||
bbf6f052 RK |
515 | if (to_mode == from_mode |
516 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
517 | { | |
518 | emit_move_insn (to, from); | |
519 | return; | |
520 | } | |
521 | ||
0b4565c9 BS |
522 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
523 | { | |
524 | if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) | |
525 | abort (); | |
3a94c984 | 526 | |
0b4565c9 | 527 | if (VECTOR_MODE_P (to_mode)) |
bafe341a | 528 | from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
0b4565c9 | 529 | else |
bafe341a | 530 | to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
0b4565c9 BS |
531 | |
532 | emit_move_insn (to, from); | |
533 | return; | |
534 | } | |
535 | ||
536 | if (to_real != from_real) | |
537 | abort (); | |
538 | ||
bbf6f052 RK |
539 | if (to_real) |
540 | { | |
642dfa8b | 541 | rtx value, insns; |
81d79e2c | 542 | |
2b01c326 | 543 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)) |
b424402e | 544 | { |
2b01c326 RK |
545 | /* Try converting directly if the insn is supported. */ |
546 | if ((code = can_extend_p (to_mode, from_mode, 0)) | |
547 | != CODE_FOR_nothing) | |
548 | { | |
549 | emit_unop_insn (code, to, from, UNKNOWN); | |
550 | return; | |
551 | } | |
bbf6f052 | 552 | } |
3a94c984 | 553 | |
b424402e RS |
554 | #ifdef HAVE_trunchfqf2 |
555 | if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode) | |
556 | { | |
557 | emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN); | |
558 | return; | |
559 | } | |
560 | #endif | |
704af6a1 JL |
561 | #ifdef HAVE_trunctqfqf2 |
562 | if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode) | |
563 | { | |
564 | emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN); | |
565 | return; | |
566 | } | |
567 | #endif | |
b424402e RS |
568 | #ifdef HAVE_truncsfqf2 |
569 | if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode) | |
570 | { | |
571 | emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN); | |
572 | return; | |
573 | } | |
574 | #endif | |
575 | #ifdef HAVE_truncdfqf2 | |
576 | if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode) | |
577 | { | |
578 | emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN); | |
579 | return; | |
580 | } | |
581 | #endif | |
582 | #ifdef HAVE_truncxfqf2 | |
583 | if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode) | |
584 | { | |
585 | emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN); | |
586 | return; | |
587 | } | |
588 | #endif | |
589 | #ifdef HAVE_trunctfqf2 | |
590 | if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode) | |
591 | { | |
592 | emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN); | |
593 | return; | |
594 | } | |
595 | #endif | |
03747aa3 RK |
596 | |
597 | #ifdef HAVE_trunctqfhf2 | |
598 | if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode) | |
599 | { | |
600 | emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN); | |
601 | return; | |
602 | } | |
603 | #endif | |
b424402e RS |
604 | #ifdef HAVE_truncsfhf2 |
605 | if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode) | |
606 | { | |
607 | emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN); | |
608 | return; | |
609 | } | |
610 | #endif | |
611 | #ifdef HAVE_truncdfhf2 | |
612 | if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode) | |
613 | { | |
614 | emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN); | |
615 | return; | |
616 | } | |
617 | #endif | |
618 | #ifdef HAVE_truncxfhf2 | |
619 | if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode) | |
620 | { | |
621 | emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN); | |
622 | return; | |
623 | } | |
624 | #endif | |
625 | #ifdef HAVE_trunctfhf2 | |
626 | if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode) | |
627 | { | |
628 | emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN); | |
629 | return; | |
630 | } | |
631 | #endif | |
2b01c326 RK |
632 | |
633 | #ifdef HAVE_truncsftqf2 | |
634 | if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode) | |
635 | { | |
636 | emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN); | |
637 | return; | |
638 | } | |
639 | #endif | |
640 | #ifdef HAVE_truncdftqf2 | |
641 | if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode) | |
642 | { | |
643 | emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN); | |
644 | return; | |
645 | } | |
646 | #endif | |
647 | #ifdef HAVE_truncxftqf2 | |
648 | if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode) | |
649 | { | |
650 | emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN); | |
651 | return; | |
652 | } | |
653 | #endif | |
654 | #ifdef HAVE_trunctftqf2 | |
655 | if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode) | |
656 | { | |
657 | emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN); | |
658 | return; | |
659 | } | |
660 | #endif | |
661 | ||
bbf6f052 RK |
662 | #ifdef HAVE_truncdfsf2 |
663 | if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode) | |
664 | { | |
665 | emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN); | |
666 | return; | |
667 | } | |
668 | #endif | |
b092b471 JW |
669 | #ifdef HAVE_truncxfsf2 |
670 | if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode) | |
671 | { | |
672 | emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN); | |
673 | return; | |
674 | } | |
675 | #endif | |
bbf6f052 RK |
676 | #ifdef HAVE_trunctfsf2 |
677 | if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode) | |
678 | { | |
679 | emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN); | |
680 | return; | |
681 | } | |
682 | #endif | |
b092b471 JW |
683 | #ifdef HAVE_truncxfdf2 |
684 | if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode) | |
685 | { | |
686 | emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN); | |
687 | return; | |
688 | } | |
689 | #endif | |
bbf6f052 RK |
690 | #ifdef HAVE_trunctfdf2 |
691 | if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode) | |
692 | { | |
693 | emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN); | |
694 | return; | |
695 | } | |
696 | #endif | |
697 | ||
b092b471 JW |
698 | libcall = (rtx) 0; |
699 | switch (from_mode) | |
700 | { | |
701 | case SFmode: | |
702 | switch (to_mode) | |
703 | { | |
704 | case DFmode: | |
705 | libcall = extendsfdf2_libfunc; | |
706 | break; | |
707 | ||
708 | case XFmode: | |
709 | libcall = extendsfxf2_libfunc; | |
710 | break; | |
711 | ||
712 | case TFmode: | |
713 | libcall = extendsftf2_libfunc; | |
714 | break; | |
3a94c984 | 715 | |
e9a25f70 JL |
716 | default: |
717 | break; | |
b092b471 JW |
718 | } |
719 | break; | |
720 | ||
721 | case DFmode: | |
722 | switch (to_mode) | |
723 | { | |
724 | case SFmode: | |
725 | libcall = truncdfsf2_libfunc; | |
726 | break; | |
727 | ||
728 | case XFmode: | |
729 | libcall = extenddfxf2_libfunc; | |
730 | break; | |
731 | ||
732 | case TFmode: | |
733 | libcall = extenddftf2_libfunc; | |
734 | break; | |
3a94c984 | 735 | |
e9a25f70 JL |
736 | default: |
737 | break; | |
b092b471 JW |
738 | } |
739 | break; | |
740 | ||
741 | case XFmode: | |
742 | switch (to_mode) | |
743 | { | |
744 | case SFmode: | |
745 | libcall = truncxfsf2_libfunc; | |
746 | break; | |
747 | ||
748 | case DFmode: | |
749 | libcall = truncxfdf2_libfunc; | |
750 | break; | |
3a94c984 | 751 | |
e9a25f70 JL |
752 | default: |
753 | break; | |
b092b471 JW |
754 | } |
755 | break; | |
756 | ||
757 | case TFmode: | |
758 | switch (to_mode) | |
759 | { | |
760 | case SFmode: | |
761 | libcall = trunctfsf2_libfunc; | |
762 | break; | |
763 | ||
764 | case DFmode: | |
765 | libcall = trunctfdf2_libfunc; | |
766 | break; | |
3a94c984 | 767 | |
e9a25f70 JL |
768 | default: |
769 | break; | |
b092b471 JW |
770 | } |
771 | break; | |
3a94c984 | 772 | |
e9a25f70 JL |
773 | default: |
774 | break; | |
b092b471 JW |
775 | } |
776 | ||
777 | if (libcall == (rtx) 0) | |
778 | /* This conversion is not implemented yet. */ | |
bbf6f052 RK |
779 | abort (); |
780 | ||
642dfa8b | 781 | start_sequence (); |
ebb1b59a | 782 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
81d79e2c | 783 | 1, from, from_mode); |
642dfa8b BS |
784 | insns = get_insns (); |
785 | end_sequence (); | |
786 | emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode, | |
787 | from)); | |
bbf6f052 RK |
788 | return; |
789 | } | |
790 | ||
791 | /* Now both modes are integers. */ | |
792 | ||
793 | /* Handle expanding beyond a word. */ | |
794 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
795 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
796 | { | |
797 | rtx insns; | |
798 | rtx lowpart; | |
799 | rtx fill_value; | |
800 | rtx lowfrom; | |
801 | int i; | |
802 | enum machine_mode lowpart_mode; | |
803 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
804 | ||
805 | /* Try converting directly if the insn is supported. */ | |
806 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
807 | != CODE_FOR_nothing) | |
808 | { | |
cd1b4b44 RK |
809 | /* If FROM is a SUBREG, put it into a register. Do this |
810 | so that we always generate the same set of insns for | |
811 | better cse'ing; if an intermediate assignment occurred, | |
812 | we won't be doing the operation directly on the SUBREG. */ | |
813 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
814 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
815 | emit_unop_insn (code, to, from, equiv_code); |
816 | return; | |
817 | } | |
818 | /* Next, try converting via full word. */ | |
819 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
820 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
821 | != CODE_FOR_nothing)) | |
822 | { | |
a81fee56 | 823 | if (GET_CODE (to) == REG) |
38a448ca | 824 | emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); |
bbf6f052 RK |
825 | convert_move (gen_lowpart (word_mode, to), from, unsignedp); |
826 | emit_unop_insn (code, to, | |
827 | gen_lowpart (word_mode, to), equiv_code); | |
828 | return; | |
829 | } | |
830 | ||
831 | /* No special multiword conversion insn; do it by hand. */ | |
832 | start_sequence (); | |
833 | ||
5c5033c3 RK |
834 | /* Since we will turn this into a no conflict block, we must ensure |
835 | that the source does not overlap the target. */ | |
836 | ||
837 | if (reg_overlap_mentioned_p (to, from)) | |
838 | from = force_reg (from_mode, from); | |
839 | ||
bbf6f052 RK |
840 | /* Get a copy of FROM widened to a word, if necessary. */ |
841 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
842 | lowpart_mode = word_mode; | |
843 | else | |
844 | lowpart_mode = from_mode; | |
845 | ||
846 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
847 | ||
848 | lowpart = gen_lowpart (lowpart_mode, to); | |
849 | emit_move_insn (lowpart, lowfrom); | |
850 | ||
851 | /* Compute the value to put in each remaining word. */ | |
852 | if (unsignedp) | |
853 | fill_value = const0_rtx; | |
854 | else | |
855 | { | |
856 | #ifdef HAVE_slt | |
857 | if (HAVE_slt | |
a995e389 | 858 | && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
bbf6f052 RK |
859 | && STORE_FLAG_VALUE == -1) |
860 | { | |
906c4e36 | 861 | emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
a06ef755 | 862 | lowpart_mode, 0); |
bbf6f052 RK |
863 | fill_value = gen_reg_rtx (word_mode); |
864 | emit_insn (gen_slt (fill_value)); | |
865 | } | |
866 | else | |
867 | #endif | |
868 | { | |
869 | fill_value | |
870 | = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, | |
871 | size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), | |
906c4e36 | 872 | NULL_RTX, 0); |
bbf6f052 RK |
873 | fill_value = convert_to_mode (word_mode, fill_value, 1); |
874 | } | |
875 | } | |
876 | ||
877 | /* Fill the remaining words. */ | |
878 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
879 | { | |
880 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
881 | rtx subword = operand_subword (to, index, 1, to_mode); | |
882 | ||
883 | if (subword == 0) | |
884 | abort (); | |
885 | ||
886 | if (fill_value != subword) | |
887 | emit_move_insn (subword, fill_value); | |
888 | } | |
889 | ||
890 | insns = get_insns (); | |
891 | end_sequence (); | |
892 | ||
906c4e36 | 893 | emit_no_conflict_block (insns, to, from, NULL_RTX, |
38a448ca | 894 | gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); |
bbf6f052 RK |
895 | return; |
896 | } | |
897 | ||
d3c64ee3 RS |
898 | /* Truncating multi-word to a word or less. */ |
899 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
900 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
bbf6f052 | 901 | { |
431a6eca JW |
902 | if (!((GET_CODE (from) == MEM |
903 | && ! MEM_VOLATILE_P (from) | |
904 | && direct_load[(int) to_mode] | |
905 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
906 | || GET_CODE (from) == REG | |
907 | || GET_CODE (from) == SUBREG)) | |
908 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
909 | convert_move (to, gen_lowpart (word_mode, from), 0); |
910 | return; | |
911 | } | |
912 | ||
3a94c984 | 913 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
e5e809f4 JL |
914 | if (to_mode == PQImode) |
915 | { | |
916 | if (from_mode != QImode) | |
917 | from = convert_to_mode (QImode, from, unsignedp); | |
918 | ||
919 | #ifdef HAVE_truncqipqi2 | |
920 | if (HAVE_truncqipqi2) | |
921 | { | |
922 | emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN); | |
923 | return; | |
924 | } | |
925 | #endif /* HAVE_truncqipqi2 */ | |
926 | abort (); | |
927 | } | |
928 | ||
929 | if (from_mode == PQImode) | |
930 | { | |
931 | if (to_mode != QImode) | |
932 | { | |
933 | from = convert_to_mode (QImode, from, unsignedp); | |
934 | from_mode = QImode; | |
935 | } | |
936 | else | |
937 | { | |
938 | #ifdef HAVE_extendpqiqi2 | |
939 | if (HAVE_extendpqiqi2) | |
940 | { | |
941 | emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN); | |
942 | return; | |
943 | } | |
944 | #endif /* HAVE_extendpqiqi2 */ | |
945 | abort (); | |
946 | } | |
947 | } | |
948 | ||
bbf6f052 RK |
949 | if (to_mode == PSImode) |
950 | { | |
951 | if (from_mode != SImode) | |
952 | from = convert_to_mode (SImode, from, unsignedp); | |
953 | ||
1f584163 DE |
954 | #ifdef HAVE_truncsipsi2 |
955 | if (HAVE_truncsipsi2) | |
bbf6f052 | 956 | { |
1f584163 | 957 | emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN); |
bbf6f052 RK |
958 | return; |
959 | } | |
1f584163 | 960 | #endif /* HAVE_truncsipsi2 */ |
bbf6f052 RK |
961 | abort (); |
962 | } | |
963 | ||
964 | if (from_mode == PSImode) | |
965 | { | |
966 | if (to_mode != SImode) | |
967 | { | |
968 | from = convert_to_mode (SImode, from, unsignedp); | |
969 | from_mode = SImode; | |
970 | } | |
971 | else | |
972 | { | |
1f584163 | 973 | #ifdef HAVE_extendpsisi2 |
43d75418 | 974 | if (! unsignedp && HAVE_extendpsisi2) |
bbf6f052 | 975 | { |
1f584163 | 976 | emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN); |
bbf6f052 RK |
977 | return; |
978 | } | |
1f584163 | 979 | #endif /* HAVE_extendpsisi2 */ |
43d75418 R |
980 | #ifdef HAVE_zero_extendpsisi2 |
981 | if (unsignedp && HAVE_zero_extendpsisi2) | |
982 | { | |
983 | emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN); | |
984 | return; | |
985 | } | |
986 | #endif /* HAVE_zero_extendpsisi2 */ | |
bbf6f052 RK |
987 | abort (); |
988 | } | |
989 | } | |
990 | ||
0407367d RK |
991 | if (to_mode == PDImode) |
992 | { | |
993 | if (from_mode != DImode) | |
994 | from = convert_to_mode (DImode, from, unsignedp); | |
995 | ||
996 | #ifdef HAVE_truncdipdi2 | |
997 | if (HAVE_truncdipdi2) | |
998 | { | |
999 | emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN); | |
1000 | return; | |
1001 | } | |
1002 | #endif /* HAVE_truncdipdi2 */ | |
1003 | abort (); | |
1004 | } | |
1005 | ||
1006 | if (from_mode == PDImode) | |
1007 | { | |
1008 | if (to_mode != DImode) | |
1009 | { | |
1010 | from = convert_to_mode (DImode, from, unsignedp); | |
1011 | from_mode = DImode; | |
1012 | } | |
1013 | else | |
1014 | { | |
1015 | #ifdef HAVE_extendpdidi2 | |
1016 | if (HAVE_extendpdidi2) | |
1017 | { | |
1018 | emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN); | |
1019 | return; | |
1020 | } | |
1021 | #endif /* HAVE_extendpdidi2 */ | |
1022 | abort (); | |
1023 | } | |
1024 | } | |
1025 | ||
bbf6f052 RK |
1026 | /* Now follow all the conversions between integers |
1027 | no more than a word long. */ | |
1028 | ||
1029 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
1030 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
1031 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
d3c64ee3 | 1032 | GET_MODE_BITSIZE (from_mode))) |
bbf6f052 | 1033 | { |
d3c64ee3 RS |
1034 | if (!((GET_CODE (from) == MEM |
1035 | && ! MEM_VOLATILE_P (from) | |
1036 | && direct_load[(int) to_mode] | |
1037 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
1038 | || GET_CODE (from) == REG | |
1039 | || GET_CODE (from) == SUBREG)) | |
1040 | from = force_reg (from_mode, from); | |
34aa3599 RK |
1041 | if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER |
1042 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) | |
1043 | from = copy_to_reg (from); | |
bbf6f052 RK |
1044 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
1045 | return; | |
1046 | } | |
1047 | ||
d3c64ee3 | 1048 | /* Handle extension. */ |
bbf6f052 RK |
1049 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
1050 | { | |
1051 | /* Convert directly if that works. */ | |
1052 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
1053 | != CODE_FOR_nothing) | |
1054 | { | |
9413de45 RK |
1055 | if (flag_force_mem) |
1056 | from = force_not_mem (from); | |
1057 | ||
bbf6f052 RK |
1058 | emit_unop_insn (code, to, from, equiv_code); |
1059 | return; | |
1060 | } | |
1061 | else | |
1062 | { | |
1063 | enum machine_mode intermediate; | |
2b28d92e NC |
1064 | rtx tmp; |
1065 | tree shift_amount; | |
bbf6f052 RK |
1066 | |
1067 | /* Search for a mode to convert via. */ | |
1068 | for (intermediate = from_mode; intermediate != VOIDmode; | |
1069 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
930b4e39 RK |
1070 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
1071 | != CODE_FOR_nothing) | |
1072 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
d60eaeff JL |
1073 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
1074 | GET_MODE_BITSIZE (intermediate)))) | |
bbf6f052 RK |
1075 | && (can_extend_p (intermediate, from_mode, unsignedp) |
1076 | != CODE_FOR_nothing)) | |
1077 | { | |
1078 | convert_move (to, convert_to_mode (intermediate, from, | |
1079 | unsignedp), unsignedp); | |
1080 | return; | |
1081 | } | |
1082 | ||
2b28d92e | 1083 | /* No suitable intermediate mode. |
3a94c984 | 1084 | Generate what we need with shifts. */ |
2b28d92e NC |
1085 | shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode) |
1086 | - GET_MODE_BITSIZE (from_mode), 0); | |
1087 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); | |
1088 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
1089 | to, unsignedp); | |
3a94c984 | 1090 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
2b28d92e NC |
1091 | to, unsignedp); |
1092 | if (tmp != to) | |
1093 | emit_move_insn (to, tmp); | |
1094 | return; | |
bbf6f052 RK |
1095 | } |
1096 | } | |
1097 | ||
3a94c984 | 1098 | /* Support special truncate insns for certain modes. */ |
bbf6f052 RK |
1099 | |
1100 | if (from_mode == DImode && to_mode == SImode) | |
1101 | { | |
1102 | #ifdef HAVE_truncdisi2 | |
1103 | if (HAVE_truncdisi2) | |
1104 | { | |
1105 | emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN); | |
1106 | return; | |
1107 | } | |
1108 | #endif | |
1109 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1110 | return; | |
1111 | } | |
1112 | ||
1113 | if (from_mode == DImode && to_mode == HImode) | |
1114 | { | |
1115 | #ifdef HAVE_truncdihi2 | |
1116 | if (HAVE_truncdihi2) | |
1117 | { | |
1118 | emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN); | |
1119 | return; | |
1120 | } | |
1121 | #endif | |
1122 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1123 | return; | |
1124 | } | |
1125 | ||
1126 | if (from_mode == DImode && to_mode == QImode) | |
1127 | { | |
1128 | #ifdef HAVE_truncdiqi2 | |
1129 | if (HAVE_truncdiqi2) | |
1130 | { | |
1131 | emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN); | |
1132 | return; | |
1133 | } | |
1134 | #endif | |
1135 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1136 | return; | |
1137 | } | |
1138 | ||
1139 | if (from_mode == SImode && to_mode == HImode) | |
1140 | { | |
1141 | #ifdef HAVE_truncsihi2 | |
1142 | if (HAVE_truncsihi2) | |
1143 | { | |
1144 | emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN); | |
1145 | return; | |
1146 | } | |
1147 | #endif | |
1148 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1149 | return; | |
1150 | } | |
1151 | ||
1152 | if (from_mode == SImode && to_mode == QImode) | |
1153 | { | |
1154 | #ifdef HAVE_truncsiqi2 | |
1155 | if (HAVE_truncsiqi2) | |
1156 | { | |
1157 | emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN); | |
1158 | return; | |
1159 | } | |
1160 | #endif | |
1161 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1162 | return; | |
1163 | } | |
1164 | ||
1165 | if (from_mode == HImode && to_mode == QImode) | |
1166 | { | |
1167 | #ifdef HAVE_trunchiqi2 | |
1168 | if (HAVE_trunchiqi2) | |
1169 | { | |
1170 | emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN); | |
1171 | return; | |
1172 | } | |
1173 | #endif | |
1174 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1175 | return; | |
1176 | } | |
1177 | ||
b9bcad65 RK |
1178 | if (from_mode == TImode && to_mode == DImode) |
1179 | { | |
1180 | #ifdef HAVE_trunctidi2 | |
1181 | if (HAVE_trunctidi2) | |
1182 | { | |
1183 | emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN); | |
1184 | return; | |
1185 | } | |
1186 | #endif | |
1187 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1188 | return; | |
1189 | } | |
1190 | ||
1191 | if (from_mode == TImode && to_mode == SImode) | |
1192 | { | |
1193 | #ifdef HAVE_trunctisi2 | |
1194 | if (HAVE_trunctisi2) | |
1195 | { | |
1196 | emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN); | |
1197 | return; | |
1198 | } | |
1199 | #endif | |
1200 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1201 | return; | |
1202 | } | |
1203 | ||
1204 | if (from_mode == TImode && to_mode == HImode) | |
1205 | { | |
1206 | #ifdef HAVE_trunctihi2 | |
1207 | if (HAVE_trunctihi2) | |
1208 | { | |
1209 | emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN); | |
1210 | return; | |
1211 | } | |
1212 | #endif | |
1213 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1214 | return; | |
1215 | } | |
1216 | ||
1217 | if (from_mode == TImode && to_mode == QImode) | |
1218 | { | |
1219 | #ifdef HAVE_trunctiqi2 | |
1220 | if (HAVE_trunctiqi2) | |
1221 | { | |
1222 | emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN); | |
1223 | return; | |
1224 | } | |
1225 | #endif | |
1226 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1227 | return; | |
1228 | } | |
1229 | ||
bbf6f052 RK |
1230 | /* Handle truncation of volatile memrefs, and so on; |
1231 | the things that couldn't be truncated directly, | |
1232 | and for which there was no special instruction. */ | |
1233 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) | |
1234 | { | |
1235 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
1236 | emit_move_insn (to, temp); | |
1237 | return; | |
1238 | } | |
1239 | ||
1240 | /* Mode combination is not recognized. */ | |
1241 | abort (); | |
1242 | } | |
1243 | ||
1244 | /* Return an rtx for a value that would result | |
1245 | from converting X to mode MODE. | |
1246 | Both X and MODE may be floating, or both integer. | |
1247 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1248 | This can be done by referring to a part of X in place | |
5d901c31 RS |
1249 | or by copying to a new temporary with conversion. |
1250 | ||
1251 | This function *must not* call protect_from_queue | |
1252 | except when putting X into an insn (in which case convert_move does it). */ | |
bbf6f052 RK |
1253 | |
1254 | rtx | |
1255 | convert_to_mode (mode, x, unsignedp) | |
1256 | enum machine_mode mode; | |
1257 | rtx x; | |
1258 | int unsignedp; | |
5ffe63ed RS |
1259 | { |
1260 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
1261 | } | |
1262 | ||
1263 | /* Return an rtx for a value that would result | |
1264 | from converting X from mode OLDMODE to mode MODE. | |
1265 | Both modes may be floating, or both integer. | |
1266 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1267 | ||
1268 | This can be done by referring to a part of X in place | |
1269 | or by copying to a new temporary with conversion. | |
1270 | ||
1271 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. | |
1272 | ||
1273 | This function *must not* call protect_from_queue | |
1274 | except when putting X into an insn (in which case convert_move does it). */ | |
1275 | ||
1276 | rtx | |
1277 | convert_modes (mode, oldmode, x, unsignedp) | |
1278 | enum machine_mode mode, oldmode; | |
1279 | rtx x; | |
1280 | int unsignedp; | |
bbf6f052 | 1281 | { |
b3694847 | 1282 | rtx temp; |
5ffe63ed | 1283 | |
1499e0a8 RK |
1284 | /* If FROM is a SUBREG that indicates that we have already done at least |
1285 | the required extension, strip it. */ | |
1286 | ||
1287 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
1288 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
1289 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
1290 | x = gen_lowpart (mode, x); | |
bbf6f052 | 1291 | |
64791b18 RK |
1292 | if (GET_MODE (x) != VOIDmode) |
1293 | oldmode = GET_MODE (x); | |
3a94c984 | 1294 | |
5ffe63ed | 1295 | if (mode == oldmode) |
bbf6f052 RK |
1296 | return x; |
1297 | ||
1298 | /* There is one case that we must handle specially: If we are converting | |
906c4e36 | 1299 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
bbf6f052 RK |
1300 | we are to interpret the constant as unsigned, gen_lowpart will do |
1301 | the wrong if the constant appears negative. What we want to do is | |
1302 | make the high-order word of the constant zero, not all ones. */ | |
1303 | ||
1304 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
906c4e36 | 1305 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
bbf6f052 | 1306 | && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
96ff8a16 ILT |
1307 | { |
1308 | HOST_WIDE_INT val = INTVAL (x); | |
1309 | ||
1310 | if (oldmode != VOIDmode | |
1311 | && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) | |
1312 | { | |
1313 | int width = GET_MODE_BITSIZE (oldmode); | |
1314 | ||
1315 | /* We need to zero extend VAL. */ | |
1316 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1317 | } | |
1318 | ||
1319 | return immed_double_const (val, (HOST_WIDE_INT) 0, mode); | |
1320 | } | |
bbf6f052 RK |
1321 | |
1322 | /* We can do this with a gen_lowpart if both desired and current modes | |
1323 | are integer, and this is either a constant integer, a register, or a | |
ba2e110c RK |
1324 | non-volatile MEM. Except for the constant case where MODE is no |
1325 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
bbf6f052 | 1326 | |
ba2e110c RK |
1327 | if ((GET_CODE (x) == CONST_INT |
1328 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
bbf6f052 | 1329 | || (GET_MODE_CLASS (mode) == MODE_INT |
5ffe63ed | 1330 | && GET_MODE_CLASS (oldmode) == MODE_INT |
bbf6f052 | 1331 | && (GET_CODE (x) == CONST_DOUBLE |
5ffe63ed | 1332 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
d57c66da JW |
1333 | && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x) |
1334 | && direct_load[(int) mode]) | |
2bf29316 JW |
1335 | || (GET_CODE (x) == REG |
1336 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
1337 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
ba2e110c RK |
1338 | { |
1339 | /* ?? If we don't know OLDMODE, we have to assume here that | |
1340 | X does not need sign- or zero-extension. This may not be | |
1341 | the case, but it's the best we can do. */ | |
1342 | if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode | |
1343 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) | |
1344 | { | |
1345 | HOST_WIDE_INT val = INTVAL (x); | |
1346 | int width = GET_MODE_BITSIZE (oldmode); | |
1347 | ||
1348 | /* We must sign or zero-extend in this case. Start by | |
1349 | zero-extending, then sign extend if we need to. */ | |
1350 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1351 | if (! unsignedp | |
1352 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
1353 | val |= (HOST_WIDE_INT) (-1) << width; | |
1354 | ||
2496c7bd | 1355 | return gen_int_mode (val, mode); |
ba2e110c RK |
1356 | } |
1357 | ||
1358 | return gen_lowpart (mode, x); | |
1359 | } | |
bbf6f052 RK |
1360 | |
1361 | temp = gen_reg_rtx (mode); | |
1362 | convert_move (temp, x, unsignedp); | |
1363 | return temp; | |
1364 | } | |
1365 | \f | |
fbe1758d | 1366 | /* This macro is used to determine what the largest unit size that |
3a94c984 | 1367 | move_by_pieces can use is. */ |
fbe1758d AM |
1368 | |
1369 | /* MOVE_MAX_PIECES is the number of bytes at a time which we can | |
1370 | move efficiently, as opposed to MOVE_MAX which is the maximum | |
3a94c984 | 1371 | number of bytes we can move with a single instruction. */ |
fbe1758d AM |
1372 | |
1373 | #ifndef MOVE_MAX_PIECES | |
1374 | #define MOVE_MAX_PIECES MOVE_MAX | |
1375 | #endif | |
1376 | ||
21d93687 RK |
1377 | /* Generate several move instructions to copy LEN bytes from block FROM to |
1378 | block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM | |
1379 | and TO through protect_from_queue before calling. | |
566aa174 | 1380 | |
21d93687 RK |
1381 | If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is |
1382 | used to push FROM to the stack. | |
566aa174 | 1383 | |
19caa751 | 1384 | ALIGN is maximum alignment we can assume. */ |
bbf6f052 | 1385 | |
2e245dac | 1386 | void |
bbf6f052 RK |
1387 | move_by_pieces (to, from, len, align) |
1388 | rtx to, from; | |
3bdf5ad1 | 1389 | unsigned HOST_WIDE_INT len; |
729a2125 | 1390 | unsigned int align; |
bbf6f052 RK |
1391 | { |
1392 | struct move_by_pieces data; | |
566aa174 | 1393 | rtx to_addr, from_addr = XEXP (from, 0); |
770ae6cc | 1394 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
1395 | enum machine_mode mode = VOIDmode, tmode; |
1396 | enum insn_code icode; | |
bbf6f052 RK |
1397 | |
1398 | data.offset = 0; | |
bbf6f052 | 1399 | data.from_addr = from_addr; |
566aa174 JH |
1400 | if (to) |
1401 | { | |
1402 | to_addr = XEXP (to, 0); | |
1403 | data.to = to; | |
1404 | data.autinc_to | |
1405 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
1406 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
1407 | data.reverse | |
1408 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
1409 | } | |
1410 | else | |
1411 | { | |
1412 | to_addr = NULL_RTX; | |
1413 | data.to = NULL_RTX; | |
1414 | data.autinc_to = 1; | |
1415 | #ifdef STACK_GROWS_DOWNWARD | |
1416 | data.reverse = 1; | |
1417 | #else | |
1418 | data.reverse = 0; | |
1419 | #endif | |
1420 | } | |
1421 | data.to_addr = to_addr; | |
bbf6f052 | 1422 | data.from = from; |
bbf6f052 RK |
1423 | data.autinc_from |
1424 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
1425 | || GET_CODE (from_addr) == POST_INC | |
1426 | || GET_CODE (from_addr) == POST_DEC); | |
1427 | ||
1428 | data.explicit_inc_from = 0; | |
1429 | data.explicit_inc_to = 0; | |
bbf6f052 RK |
1430 | if (data.reverse) data.offset = len; |
1431 | data.len = len; | |
1432 | ||
1433 | /* If copying requires more than two move insns, | |
1434 | copy addresses to registers (to make displacements shorter) | |
1435 | and use post-increment if available. */ | |
1436 | if (!(data.autinc_from && data.autinc_to) | |
1437 | && move_by_pieces_ninsns (len, align) > 2) | |
1438 | { | |
3a94c984 | 1439 | /* Find the mode of the largest move... */ |
fbe1758d AM |
1440 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1441 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1442 | if (GET_MODE_SIZE (tmode) < max_size) | |
1443 | mode = tmode; | |
1444 | ||
1445 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
bbf6f052 RK |
1446 | { |
1447 | data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); | |
1448 | data.autinc_from = 1; | |
1449 | data.explicit_inc_from = -1; | |
1450 | } | |
fbe1758d | 1451 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
bbf6f052 RK |
1452 | { |
1453 | data.from_addr = copy_addr_to_reg (from_addr); | |
1454 | data.autinc_from = 1; | |
1455 | data.explicit_inc_from = 1; | |
1456 | } | |
bbf6f052 RK |
1457 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
1458 | data.from_addr = copy_addr_to_reg (from_addr); | |
fbe1758d | 1459 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1460 | { |
1461 | data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); | |
1462 | data.autinc_to = 1; | |
1463 | data.explicit_inc_to = -1; | |
1464 | } | |
fbe1758d | 1465 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1466 | { |
1467 | data.to_addr = copy_addr_to_reg (to_addr); | |
1468 | data.autinc_to = 1; | |
1469 | data.explicit_inc_to = 1; | |
1470 | } | |
bbf6f052 RK |
1471 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
1472 | data.to_addr = copy_addr_to_reg (to_addr); | |
1473 | } | |
1474 | ||
e1565e65 | 1475 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 RK |
1476 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
1477 | align = MOVE_MAX * BITS_PER_UNIT; | |
bbf6f052 RK |
1478 | |
1479 | /* First move what we can in the largest integer mode, then go to | |
1480 | successively smaller modes. */ | |
1481 | ||
1482 | while (max_size > 1) | |
1483 | { | |
e7c33f54 RK |
1484 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1485 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1486 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1487 | mode = tmode; |
1488 | ||
1489 | if (mode == VOIDmode) | |
1490 | break; | |
1491 | ||
1492 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1493 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1494 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
1495 | ||
1496 | max_size = GET_MODE_SIZE (mode); | |
1497 | } | |
1498 | ||
1499 | /* The code above should have handled everything. */ | |
2a8e278c | 1500 | if (data.len > 0) |
bbf6f052 RK |
1501 | abort (); |
1502 | } | |
1503 | ||
1504 | /* Return number of insns required to move L bytes by pieces. | |
f1eaaf73 | 1505 | ALIGN (in bits) is maximum alignment we can assume. */ |
bbf6f052 | 1506 | |
3bdf5ad1 | 1507 | static unsigned HOST_WIDE_INT |
bbf6f052 | 1508 | move_by_pieces_ninsns (l, align) |
3bdf5ad1 | 1509 | unsigned HOST_WIDE_INT l; |
729a2125 | 1510 | unsigned int align; |
bbf6f052 | 1511 | { |
3bdf5ad1 RK |
1512 | unsigned HOST_WIDE_INT n_insns = 0; |
1513 | unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1; | |
bbf6f052 | 1514 | |
e1565e65 | 1515 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 1516 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
14c78e9b | 1517 | align = MOVE_MAX * BITS_PER_UNIT; |
bbf6f052 RK |
1518 | |
1519 | while (max_size > 1) | |
1520 | { | |
1521 | enum machine_mode mode = VOIDmode, tmode; | |
1522 | enum insn_code icode; | |
1523 | ||
e7c33f54 RK |
1524 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1525 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1526 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1527 | mode = tmode; |
1528 | ||
1529 | if (mode == VOIDmode) | |
1530 | break; | |
1531 | ||
1532 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1533 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1534 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1535 | ||
1536 | max_size = GET_MODE_SIZE (mode); | |
1537 | } | |
1538 | ||
13c6f0d5 NS |
1539 | if (l) |
1540 | abort (); | |
bbf6f052 RK |
1541 | return n_insns; |
1542 | } | |
1543 | ||
1544 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1545 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1546 | to make a move insn for that mode. DATA has all the other info. */ | |
1547 | ||
1548 | static void | |
1549 | move_by_pieces_1 (genfun, mode, data) | |
711d877c | 1550 | rtx (*genfun) PARAMS ((rtx, ...)); |
bbf6f052 RK |
1551 | enum machine_mode mode; |
1552 | struct move_by_pieces *data; | |
1553 | { | |
3bdf5ad1 | 1554 | unsigned int size = GET_MODE_SIZE (mode); |
ae0ed63a | 1555 | rtx to1 = NULL_RTX, from1; |
bbf6f052 RK |
1556 | |
1557 | while (data->len >= size) | |
1558 | { | |
3bdf5ad1 RK |
1559 | if (data->reverse) |
1560 | data->offset -= size; | |
1561 | ||
566aa174 | 1562 | if (data->to) |
3bdf5ad1 | 1563 | { |
566aa174 | 1564 | if (data->autinc_to) |
630036c6 JJ |
1565 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
1566 | data->offset); | |
566aa174 | 1567 | else |
f4ef873c | 1568 | to1 = adjust_address (data->to, mode, data->offset); |
3bdf5ad1 | 1569 | } |
3bdf5ad1 RK |
1570 | |
1571 | if (data->autinc_from) | |
630036c6 JJ |
1572 | from1 = adjust_automodify_address (data->from, mode, data->from_addr, |
1573 | data->offset); | |
3bdf5ad1 | 1574 | else |
f4ef873c | 1575 | from1 = adjust_address (data->from, mode, data->offset); |
bbf6f052 | 1576 | |
940da324 | 1577 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
3d709fd3 RH |
1578 | emit_insn (gen_add2_insn (data->to_addr, |
1579 | GEN_INT (-(HOST_WIDE_INT)size))); | |
940da324 | 1580 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
3d709fd3 RH |
1581 | emit_insn (gen_add2_insn (data->from_addr, |
1582 | GEN_INT (-(HOST_WIDE_INT)size))); | |
bbf6f052 | 1583 | |
566aa174 JH |
1584 | if (data->to) |
1585 | emit_insn ((*genfun) (to1, from1)); | |
1586 | else | |
21d93687 RK |
1587 | { |
1588 | #ifdef PUSH_ROUNDING | |
1589 | emit_single_push_insn (mode, from1, NULL); | |
1590 | #else | |
1591 | abort (); | |
1592 | #endif | |
1593 | } | |
3bdf5ad1 | 1594 | |
940da324 | 1595 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
906c4e36 | 1596 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
940da324 | 1597 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
906c4e36 | 1598 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
bbf6f052 | 1599 | |
3bdf5ad1 RK |
1600 | if (! data->reverse) |
1601 | data->offset += size; | |
bbf6f052 RK |
1602 | |
1603 | data->len -= size; | |
1604 | } | |
1605 | } | |
1606 | \f | |
1607 | /* Emit code to move a block Y to a block X. | |
1608 | This may be done with string-move instructions, | |
1609 | with multiple scalar move instructions, or with a library call. | |
1610 | ||
1611 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) | |
1612 | with mode BLKmode. | |
1613 | SIZE is an rtx that says how long they are. | |
19caa751 | 1614 | ALIGN is the maximum alignment we can assume they have. |
bbf6f052 | 1615 | |
e9a25f70 JL |
1616 | Return the address of the new block, if memcpy is called and returns it, |
1617 | 0 otherwise. */ | |
1618 | ||
1619 | rtx | |
8ac61af7 | 1620 | emit_block_move (x, y, size) |
bbf6f052 RK |
1621 | rtx x, y; |
1622 | rtx size; | |
bbf6f052 | 1623 | { |
e9a25f70 | 1624 | rtx retval = 0; |
52cf7115 JL |
1625 | #ifdef TARGET_MEM_FUNCTIONS |
1626 | static tree fn; | |
1627 | tree call_expr, arg_list; | |
1628 | #endif | |
8ac61af7 | 1629 | unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); |
e9a25f70 | 1630 | |
bbf6f052 RK |
1631 | if (GET_MODE (x) != BLKmode) |
1632 | abort (); | |
1633 | ||
1634 | if (GET_MODE (y) != BLKmode) | |
1635 | abort (); | |
1636 | ||
1637 | x = protect_from_queue (x, 1); | |
1638 | y = protect_from_queue (y, 0); | |
5d901c31 | 1639 | size = protect_from_queue (size, 0); |
bbf6f052 RK |
1640 | |
1641 | if (GET_CODE (x) != MEM) | |
1642 | abort (); | |
1643 | if (GET_CODE (y) != MEM) | |
1644 | abort (); | |
1645 | if (size == 0) | |
1646 | abort (); | |
1647 | ||
fbe1758d | 1648 | if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
bbf6f052 RK |
1649 | move_by_pieces (x, y, INTVAL (size), align); |
1650 | else | |
1651 | { | |
1652 | /* Try the most limited insn first, because there's no point | |
1653 | including more than one in the machine description unless | |
1654 | the more limited one has some advantage. */ | |
266007a7 | 1655 | |
19caa751 | 1656 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
266007a7 RK |
1657 | enum machine_mode mode; |
1658 | ||
3ef1eef4 RK |
1659 | /* Since this is a move insn, we don't care about volatility. */ |
1660 | volatile_ok = 1; | |
1661 | ||
266007a7 RK |
1662 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
1663 | mode = GET_MODE_WIDER_MODE (mode)) | |
bbf6f052 | 1664 | { |
266007a7 | 1665 | enum insn_code code = movstr_optab[(int) mode]; |
a995e389 | 1666 | insn_operand_predicate_fn pred; |
266007a7 RK |
1667 | |
1668 | if (code != CODE_FOR_nothing | |
803090c4 RK |
1669 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT |
1670 | here because if SIZE is less than the mode mask, as it is | |
8008b228 | 1671 | returned by the macro, it will definitely be less than the |
803090c4 | 1672 | actual mode mask. */ |
8ca00751 RK |
1673 | && ((GET_CODE (size) == CONST_INT |
1674 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
e5e809f4 | 1675 | <= (GET_MODE_MASK (mode) >> 1))) |
8ca00751 | 1676 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
a995e389 RH |
1677 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 |
1678 | || (*pred) (x, BLKmode)) | |
1679 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1680 | || (*pred) (y, BLKmode)) | |
1681 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1682 | || (*pred) (opalign, VOIDmode))) | |
bbf6f052 | 1683 | { |
1ba1e2a8 | 1684 | rtx op2; |
266007a7 RK |
1685 | rtx last = get_last_insn (); |
1686 | rtx pat; | |
1687 | ||
1ba1e2a8 | 1688 | op2 = convert_to_mode (mode, size, 1); |
a995e389 RH |
1689 | pred = insn_data[(int) code].operand[2].predicate; |
1690 | if (pred != 0 && ! (*pred) (op2, mode)) | |
266007a7 RK |
1691 | op2 = copy_to_mode_reg (mode, op2); |
1692 | ||
1693 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1694 | if (pat) | |
1695 | { | |
1696 | emit_insn (pat); | |
3ef1eef4 | 1697 | volatile_ok = 0; |
e9a25f70 | 1698 | return 0; |
266007a7 RK |
1699 | } |
1700 | else | |
1701 | delete_insns_since (last); | |
bbf6f052 RK |
1702 | } |
1703 | } | |
bbf6f052 | 1704 | |
3ef1eef4 RK |
1705 | volatile_ok = 0; |
1706 | ||
4bc973ae JL |
1707 | /* X, Y, or SIZE may have been passed through protect_from_queue. |
1708 | ||
1709 | It is unsafe to save the value generated by protect_from_queue | |
1710 | and reuse it later. Consider what happens if emit_queue is | |
1711 | called before the return value from protect_from_queue is used. | |
1712 | ||
1713 | Expansion of the CALL_EXPR below will call emit_queue before | |
1714 | we are finished emitting RTL for argument setup. So if we are | |
1715 | not careful we could get the wrong value for an argument. | |
1716 | ||
1717 | To avoid this problem we go ahead and emit code to copy X, Y & | |
1718 | SIZE into new pseudos. We can then place those new pseudos | |
1719 | into an RTL_EXPR and use them later, even after a call to | |
3a94c984 | 1720 | emit_queue. |
4bc973ae JL |
1721 | |
1722 | Note this is not strictly needed for library calls since they | |
1723 | do not call emit_queue before loading their arguments. However, | |
1724 | we may need to have library calls call emit_queue in the future | |
1725 | since failing to do so could cause problems for targets which | |
1726 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
1727 | x = copy_to_mode_reg (Pmode, XEXP (x, 0)); | |
1728 | y = copy_to_mode_reg (Pmode, XEXP (y, 0)); | |
1729 | ||
1730 | #ifdef TARGET_MEM_FUNCTIONS | |
1731 | size = copy_to_mode_reg (TYPE_MODE (sizetype), size); | |
1732 | #else | |
1733 | size = convert_to_mode (TYPE_MODE (integer_type_node), size, | |
1734 | TREE_UNSIGNED (integer_type_node)); | |
f3dc586a | 1735 | size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); |
4bc973ae JL |
1736 | #endif |
1737 | ||
bbf6f052 | 1738 | #ifdef TARGET_MEM_FUNCTIONS |
52cf7115 JL |
1739 | /* It is incorrect to use the libcall calling conventions to call |
1740 | memcpy in this context. | |
1741 | ||
1742 | This could be a user call to memcpy and the user may wish to | |
1743 | examine the return value from memcpy. | |
1744 | ||
1745 | For targets where libcalls and normal calls have different conventions | |
3a94c984 | 1746 | for returning pointers, we could end up generating incorrect code. |
52cf7115 JL |
1747 | |
1748 | So instead of using a libcall sequence we build up a suitable | |
1749 | CALL_EXPR and expand the call in the normal fashion. */ | |
1750 | if (fn == NULL_TREE) | |
1751 | { | |
1752 | tree fntype; | |
1753 | ||
1754 | /* This was copied from except.c, I don't know if all this is | |
1755 | necessary in this context or not. */ | |
1756 | fn = get_identifier ("memcpy"); | |
52cf7115 JL |
1757 | fntype = build_pointer_type (void_type_node); |
1758 | fntype = build_function_type (fntype, NULL_TREE); | |
1759 | fn = build_decl (FUNCTION_DECL, fn, fntype); | |
3a94c984 | 1760 | ggc_add_tree_root (&fn, 1); |
52cf7115 JL |
1761 | DECL_EXTERNAL (fn) = 1; |
1762 | TREE_PUBLIC (fn) = 1; | |
1763 | DECL_ARTIFICIAL (fn) = 1; | |
0d97bf4c | 1764 | TREE_NOTHROW (fn) = 1; |
6496a589 | 1765 | make_decl_rtl (fn, NULL); |
52cf7115 | 1766 | assemble_external (fn); |
52cf7115 JL |
1767 | } |
1768 | ||
3a94c984 | 1769 | /* We need to make an argument list for the function call. |
52cf7115 JL |
1770 | |
1771 | memcpy has three arguments, the first two are void * addresses and | |
1772 | the last is a size_t byte count for the copy. */ | |
1773 | arg_list | |
1774 | = build_tree_list (NULL_TREE, | |
4bc973ae | 1775 | make_tree (build_pointer_type (void_type_node), x)); |
52cf7115 JL |
1776 | TREE_CHAIN (arg_list) |
1777 | = build_tree_list (NULL_TREE, | |
4bc973ae | 1778 | make_tree (build_pointer_type (void_type_node), y)); |
52cf7115 JL |
1779 | TREE_CHAIN (TREE_CHAIN (arg_list)) |
1780 | = build_tree_list (NULL_TREE, make_tree (sizetype, size)); | |
1781 | TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE; | |
1782 | ||
1783 | /* Now we have to build up the CALL_EXPR itself. */ | |
1784 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
1785 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
1786 | call_expr, arg_list, NULL_TREE); | |
1787 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
1788 | ||
1789 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
bbf6f052 | 1790 | #else |
ebb1b59a | 1791 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
fe7bbd2a | 1792 | VOIDmode, 3, y, Pmode, x, Pmode, |
3b6f75e2 JW |
1793 | convert_to_mode (TYPE_MODE (integer_type_node), size, |
1794 | TREE_UNSIGNED (integer_type_node)), | |
1795 | TYPE_MODE (integer_type_node)); | |
bbf6f052 | 1796 | #endif |
66c60e67 RK |
1797 | |
1798 | /* If we are initializing a readonly value, show the above call | |
1799 | clobbered it. Otherwise, a load from it may erroneously be hoisted | |
1800 | from a loop. */ | |
1801 | if (RTX_UNCHANGING_P (x)) | |
1802 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
bbf6f052 | 1803 | } |
e9a25f70 JL |
1804 | |
1805 | return retval; | |
bbf6f052 RK |
1806 | } |
1807 | \f | |
1808 | /* Copy all or part of a value X into registers starting at REGNO. | |
1809 | The number of registers to be filled is NREGS. */ | |
1810 | ||
1811 | void | |
1812 | move_block_to_reg (regno, x, nregs, mode) | |
1813 | int regno; | |
1814 | rtx x; | |
1815 | int nregs; | |
1816 | enum machine_mode mode; | |
1817 | { | |
1818 | int i; | |
381127e8 | 1819 | #ifdef HAVE_load_multiple |
3a94c984 | 1820 | rtx pat; |
381127e8 RL |
1821 | rtx last; |
1822 | #endif | |
bbf6f052 | 1823 | |
72bb9717 RK |
1824 | if (nregs == 0) |
1825 | return; | |
1826 | ||
bbf6f052 RK |
1827 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
1828 | x = validize_mem (force_const_mem (mode, x)); | |
1829 | ||
1830 | /* See if the machine can do this with a load multiple insn. */ | |
1831 | #ifdef HAVE_load_multiple | |
c3a02afe | 1832 | if (HAVE_load_multiple) |
bbf6f052 | 1833 | { |
c3a02afe | 1834 | last = get_last_insn (); |
38a448ca | 1835 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
c3a02afe RK |
1836 | GEN_INT (nregs)); |
1837 | if (pat) | |
1838 | { | |
1839 | emit_insn (pat); | |
1840 | return; | |
1841 | } | |
1842 | else | |
1843 | delete_insns_since (last); | |
bbf6f052 | 1844 | } |
bbf6f052 RK |
1845 | #endif |
1846 | ||
1847 | for (i = 0; i < nregs; i++) | |
38a448ca | 1848 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
bbf6f052 RK |
1849 | operand_subword_force (x, i, mode)); |
1850 | } | |
1851 | ||
1852 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
0040593d JW |
1853 | The number of registers to be filled is NREGS. SIZE indicates the number |
1854 | of bytes in the object X. */ | |
1855 | ||
bbf6f052 | 1856 | void |
0040593d | 1857 | move_block_from_reg (regno, x, nregs, size) |
bbf6f052 RK |
1858 | int regno; |
1859 | rtx x; | |
1860 | int nregs; | |
0040593d | 1861 | int size; |
bbf6f052 RK |
1862 | { |
1863 | int i; | |
381127e8 | 1864 | #ifdef HAVE_store_multiple |
3a94c984 | 1865 | rtx pat; |
381127e8 RL |
1866 | rtx last; |
1867 | #endif | |
58a32c5c | 1868 | enum machine_mode mode; |
bbf6f052 | 1869 | |
2954d7db RK |
1870 | if (nregs == 0) |
1871 | return; | |
1872 | ||
58a32c5c DE |
1873 | /* If SIZE is that of a mode no bigger than a word, just use that |
1874 | mode's store operation. */ | |
1875 | if (size <= UNITS_PER_WORD | |
0d7839da SE |
1876 | && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode |
1877 | && !FUNCTION_ARG_REG_LITTLE_ENDIAN) | |
58a32c5c | 1878 | { |
792760b9 | 1879 | emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno)); |
58a32c5c DE |
1880 | return; |
1881 | } | |
3a94c984 | 1882 | |
0040593d | 1883 | /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned |
58a32c5c DE |
1884 | to the left before storing to memory. Note that the previous test |
1885 | doesn't handle all cases (e.g. SIZE == 3). */ | |
0d7839da SE |
1886 | if (size < UNITS_PER_WORD |
1887 | && BYTES_BIG_ENDIAN | |
1888 | && !FUNCTION_ARG_REG_LITTLE_ENDIAN) | |
0040593d JW |
1889 | { |
1890 | rtx tem = operand_subword (x, 0, 1, BLKmode); | |
1891 | rtx shift; | |
1892 | ||
1893 | if (tem == 0) | |
1894 | abort (); | |
1895 | ||
1896 | shift = expand_shift (LSHIFT_EXPR, word_mode, | |
38a448ca | 1897 | gen_rtx_REG (word_mode, regno), |
0040593d JW |
1898 | build_int_2 ((UNITS_PER_WORD - size) |
1899 | * BITS_PER_UNIT, 0), NULL_RTX, 0); | |
1900 | emit_move_insn (tem, shift); | |
1901 | return; | |
1902 | } | |
1903 | ||
bbf6f052 RK |
1904 | /* See if the machine can do this with a store multiple insn. */ |
1905 | #ifdef HAVE_store_multiple | |
c3a02afe | 1906 | if (HAVE_store_multiple) |
bbf6f052 | 1907 | { |
c3a02afe | 1908 | last = get_last_insn (); |
38a448ca | 1909 | pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), |
c3a02afe RK |
1910 | GEN_INT (nregs)); |
1911 | if (pat) | |
1912 | { | |
1913 | emit_insn (pat); | |
1914 | return; | |
1915 | } | |
1916 | else | |
1917 | delete_insns_since (last); | |
bbf6f052 | 1918 | } |
bbf6f052 RK |
1919 | #endif |
1920 | ||
1921 | for (i = 0; i < nregs; i++) | |
1922 | { | |
1923 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
1924 | ||
1925 | if (tem == 0) | |
1926 | abort (); | |
1927 | ||
38a448ca | 1928 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
bbf6f052 RK |
1929 | } |
1930 | } | |
1931 | ||
aac5cc16 RH |
1932 | /* Emit code to move a block SRC to a block DST, where DST is non-consecutive |
1933 | registers represented by a PARALLEL. SSIZE represents the total size of | |
04050c69 | 1934 | block SRC in bytes, or -1 if not known. */ |
d6a7951f | 1935 | /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that |
aac5cc16 RH |
1936 | the balance will be in what would be the low-order memory addresses, i.e. |
1937 | left justified for big endian, right justified for little endian. This | |
1938 | happens to be true for the targets currently using this support. If this | |
1939 | ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING | |
1940 | would be needed. */ | |
fffa9c1d JW |
1941 | |
1942 | void | |
04050c69 | 1943 | emit_group_load (dst, orig_src, ssize) |
aac5cc16 | 1944 | rtx dst, orig_src; |
729a2125 | 1945 | int ssize; |
fffa9c1d | 1946 | { |
aac5cc16 RH |
1947 | rtx *tmps, src; |
1948 | int start, i; | |
fffa9c1d | 1949 | |
aac5cc16 | 1950 | if (GET_CODE (dst) != PARALLEL) |
fffa9c1d JW |
1951 | abort (); |
1952 | ||
1953 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1954 | both on the stack and in registers. */ | |
aac5cc16 RH |
1955 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
1956 | start = 0; | |
fffa9c1d | 1957 | else |
aac5cc16 RH |
1958 | start = 1; |
1959 | ||
3a94c984 | 1960 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0)); |
aac5cc16 | 1961 | |
aac5cc16 RH |
1962 | /* Process the pieces. */ |
1963 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1964 | { | |
1965 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
770ae6cc RK |
1966 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
1967 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
aac5cc16 RH |
1968 | int shift = 0; |
1969 | ||
1970 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1971 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
aac5cc16 RH |
1972 | { |
1973 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
1974 | bytelen = ssize - bytepos; | |
1975 | if (bytelen <= 0) | |
729a2125 | 1976 | abort (); |
aac5cc16 RH |
1977 | } |
1978 | ||
f3ce87a9 DE |
1979 | /* If we won't be loading directly from memory, protect the real source |
1980 | from strange tricks we might play; but make sure that the source can | |
1981 | be loaded directly into the destination. */ | |
1982 | src = orig_src; | |
1983 | if (GET_CODE (orig_src) != MEM | |
1984 | && (!CONSTANT_P (orig_src) | |
1985 | || (GET_MODE (orig_src) != mode | |
1986 | && GET_MODE (orig_src) != VOIDmode))) | |
1987 | { | |
1988 | if (GET_MODE (orig_src) == VOIDmode) | |
1989 | src = gen_reg_rtx (mode); | |
1990 | else | |
1991 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
04050c69 | 1992 | |
f3ce87a9 DE |
1993 | emit_move_insn (src, orig_src); |
1994 | } | |
1995 | ||
aac5cc16 RH |
1996 | /* Optimize the access just a bit. */ |
1997 | if (GET_CODE (src) == MEM | |
04050c69 | 1998 | && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode) |
729a2125 | 1999 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 RH |
2000 | && bytelen == GET_MODE_SIZE (mode)) |
2001 | { | |
2002 | tmps[i] = gen_reg_rtx (mode); | |
f4ef873c | 2003 | emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
fffa9c1d | 2004 | } |
7c4a6db0 JW |
2005 | else if (GET_CODE (src) == CONCAT) |
2006 | { | |
cbb92744 JJ |
2007 | if ((bytepos == 0 |
2008 | && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))) | |
2009 | || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) | |
2010 | && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))) | |
2011 | { | |
2012 | tmps[i] = XEXP (src, bytepos != 0); | |
2013 | if (! CONSTANT_P (tmps[i]) | |
2014 | && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode)) | |
2015 | tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, | |
2016 | 0, 1, NULL_RTX, mode, mode, ssize); | |
2017 | } | |
58f69841 JH |
2018 | else if (bytepos == 0) |
2019 | { | |
04050c69 RK |
2020 | rtx mem = assign_stack_temp (GET_MODE (src), |
2021 | GET_MODE_SIZE (GET_MODE (src)), 0); | |
58f69841 | 2022 | emit_move_insn (mem, src); |
04050c69 | 2023 | tmps[i] = adjust_address (mem, mode, 0); |
58f69841 | 2024 | } |
7c4a6db0 JW |
2025 | else |
2026 | abort (); | |
2027 | } | |
f3ce87a9 | 2028 | else if (CONSTANT_P (src) |
2ee5437b RH |
2029 | || (GET_CODE (src) == REG && GET_MODE (src) == mode)) |
2030 | tmps[i] = src; | |
fffa9c1d | 2031 | else |
19caa751 RK |
2032 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
2033 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
04050c69 | 2034 | mode, mode, ssize); |
fffa9c1d | 2035 | |
aac5cc16 | 2036 | if (BYTES_BIG_ENDIAN && shift) |
19caa751 RK |
2037 | expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), |
2038 | tmps[i], 0, OPTAB_WIDEN); | |
fffa9c1d | 2039 | } |
19caa751 | 2040 | |
3a94c984 | 2041 | emit_queue (); |
aac5cc16 RH |
2042 | |
2043 | /* Copy the extracted pieces into the proper (probable) hard regs. */ | |
2044 | for (i = start; i < XVECLEN (dst, 0); i++) | |
2045 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]); | |
fffa9c1d JW |
2046 | } |
2047 | ||
aac5cc16 RH |
2048 | /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive |
2049 | registers represented by a PARALLEL. SSIZE represents the total size of | |
04050c69 | 2050 | block DST, or -1 if not known. */ |
fffa9c1d JW |
2051 | |
2052 | void | |
04050c69 | 2053 | emit_group_store (orig_dst, src, ssize) |
aac5cc16 | 2054 | rtx orig_dst, src; |
729a2125 | 2055 | int ssize; |
fffa9c1d | 2056 | { |
aac5cc16 RH |
2057 | rtx *tmps, dst; |
2058 | int start, i; | |
fffa9c1d | 2059 | |
aac5cc16 | 2060 | if (GET_CODE (src) != PARALLEL) |
fffa9c1d JW |
2061 | abort (); |
2062 | ||
2063 | /* Check for a NULL entry, used to indicate that the parameter goes | |
2064 | both on the stack and in registers. */ | |
aac5cc16 RH |
2065 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
2066 | start = 0; | |
fffa9c1d | 2067 | else |
aac5cc16 RH |
2068 | start = 1; |
2069 | ||
3a94c984 | 2070 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0)); |
fffa9c1d | 2071 | |
aac5cc16 RH |
2072 | /* Copy the (probable) hard regs into pseudos. */ |
2073 | for (i = start; i < XVECLEN (src, 0); i++) | |
fffa9c1d | 2074 | { |
aac5cc16 RH |
2075 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
2076 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
2077 | emit_move_insn (tmps[i], reg); | |
2078 | } | |
3a94c984 | 2079 | emit_queue (); |
fffa9c1d | 2080 | |
aac5cc16 RH |
2081 | /* If we won't be storing directly into memory, protect the real destination |
2082 | from strange tricks we might play. */ | |
2083 | dst = orig_dst; | |
10a9f2be JW |
2084 | if (GET_CODE (dst) == PARALLEL) |
2085 | { | |
2086 | rtx temp; | |
2087 | ||
2088 | /* We can get a PARALLEL dst if there is a conditional expression in | |
2089 | a return statement. In that case, the dst and src are the same, | |
2090 | so no action is necessary. */ | |
2091 | if (rtx_equal_p (dst, src)) | |
2092 | return; | |
2093 | ||
2094 | /* It is unclear if we can ever reach here, but we may as well handle | |
2095 | it. Allocate a temporary, and split this into a store/load to/from | |
2096 | the temporary. */ | |
2097 | ||
2098 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
04050c69 RK |
2099 | emit_group_store (temp, src, ssize); |
2100 | emit_group_load (dst, temp, ssize); | |
10a9f2be JW |
2101 | return; |
2102 | } | |
2103 | else if (GET_CODE (dst) != MEM) | |
aac5cc16 RH |
2104 | { |
2105 | dst = gen_reg_rtx (GET_MODE (orig_dst)); | |
2106 | /* Make life a bit easier for combine. */ | |
2107 | emit_move_insn (dst, const0_rtx); | |
2108 | } | |
aac5cc16 RH |
2109 | |
2110 | /* Process the pieces. */ | |
2111 | for (i = start; i < XVECLEN (src, 0); i++) | |
2112 | { | |
770ae6cc | 2113 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
aac5cc16 | 2114 | enum machine_mode mode = GET_MODE (tmps[i]); |
770ae6cc | 2115 | unsigned int bytelen = GET_MODE_SIZE (mode); |
aac5cc16 RH |
2116 | |
2117 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 2118 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
71bc0330 | 2119 | { |
aac5cc16 RH |
2120 | if (BYTES_BIG_ENDIAN) |
2121 | { | |
2122 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2123 | expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift), | |
2124 | tmps[i], 0, OPTAB_WIDEN); | |
2125 | } | |
2126 | bytelen = ssize - bytepos; | |
71bc0330 | 2127 | } |
fffa9c1d | 2128 | |
aac5cc16 RH |
2129 | /* Optimize the access just a bit. */ |
2130 | if (GET_CODE (dst) == MEM | |
04050c69 | 2131 | && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode) |
729a2125 | 2132 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 | 2133 | && bytelen == GET_MODE_SIZE (mode)) |
f4ef873c | 2134 | emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]); |
aac5cc16 | 2135 | else |
729a2125 | 2136 | store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
04050c69 | 2137 | mode, tmps[i], ssize); |
fffa9c1d | 2138 | } |
729a2125 | 2139 | |
3a94c984 | 2140 | emit_queue (); |
aac5cc16 RH |
2141 | |
2142 | /* Copy from the pseudo into the (probable) hard reg. */ | |
2143 | if (GET_CODE (dst) == REG) | |
2144 | emit_move_insn (orig_dst, dst); | |
fffa9c1d JW |
2145 | } |
2146 | ||
c36fce9a GRK |
2147 | /* Generate code to copy a BLKmode object of TYPE out of a |
2148 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
2149 | is null, a stack temporary is created. TGTBLK is returned. | |
2150 | ||
2151 | The primary purpose of this routine is to handle functions | |
2152 | that return BLKmode structures in registers. Some machines | |
2153 | (the PA for example) want to return all small structures | |
3a94c984 | 2154 | in registers regardless of the structure's alignment. */ |
c36fce9a GRK |
2155 | |
2156 | rtx | |
19caa751 | 2157 | copy_blkmode_from_reg (tgtblk, srcreg, type) |
c36fce9a GRK |
2158 | rtx tgtblk; |
2159 | rtx srcreg; | |
2160 | tree type; | |
2161 | { | |
19caa751 RK |
2162 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2163 | rtx src = NULL, dst = NULL; | |
2164 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
2165 | unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0; | |
2166 | ||
2167 | if (tgtblk == 0) | |
2168 | { | |
1da68f56 RK |
2169 | tgtblk = assign_temp (build_qualified_type (type, |
2170 | (TYPE_QUALS (type) | |
2171 | | TYPE_QUAL_CONST)), | |
2172 | 0, 1, 1); | |
19caa751 RK |
2173 | preserve_temp_slots (tgtblk); |
2174 | } | |
3a94c984 | 2175 | |
1ed1b4fb RK |
2176 | /* This code assumes srcreg is at least a full word. If it isn't, copy it |
2177 | into a new pseudo which is a full word. | |
0d7839da | 2178 | |
1ed1b4fb RK |
2179 | If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy, |
2180 | the wrong part of the register gets copied so we fake a type conversion | |
2181 | in place. */ | |
19caa751 RK |
2182 | if (GET_MODE (srcreg) != BLKmode |
2183 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
1ed1b4fb RK |
2184 | { |
2185 | if (FUNCTION_ARG_REG_LITTLE_ENDIAN) | |
2186 | srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0); | |
2187 | else | |
2188 | srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type)); | |
2189 | } | |
19caa751 RK |
2190 | |
2191 | /* Structures whose size is not a multiple of a word are aligned | |
2192 | to the least significant byte (to the right). On a BYTES_BIG_ENDIAN | |
2193 | machine, this means we must skip the empty high order bytes when | |
2194 | calculating the bit offset. */ | |
0d7839da SE |
2195 | if (BYTES_BIG_ENDIAN |
2196 | && !FUNCTION_ARG_REG_LITTLE_ENDIAN | |
2197 | && bytes % UNITS_PER_WORD) | |
19caa751 RK |
2198 | big_endian_correction |
2199 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); | |
2200 | ||
2201 | /* Copy the structure BITSIZE bites at a time. | |
3a94c984 | 2202 | |
19caa751 RK |
2203 | We could probably emit more efficient code for machines which do not use |
2204 | strict alignment, but it doesn't seem worth the effort at the current | |
2205 | time. */ | |
2206 | for (bitpos = 0, xbitpos = big_endian_correction; | |
2207 | bitpos < bytes * BITS_PER_UNIT; | |
2208 | bitpos += bitsize, xbitpos += bitsize) | |
2209 | { | |
3a94c984 | 2210 | /* We need a new source operand each time xbitpos is on a |
19caa751 RK |
2211 | word boundary and when xbitpos == big_endian_correction |
2212 | (the first time through). */ | |
2213 | if (xbitpos % BITS_PER_WORD == 0 | |
2214 | || xbitpos == big_endian_correction) | |
b47f8cfc JH |
2215 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
2216 | GET_MODE (srcreg)); | |
19caa751 RK |
2217 | |
2218 | /* We need a new destination operand each time bitpos is on | |
2219 | a word boundary. */ | |
2220 | if (bitpos % BITS_PER_WORD == 0) | |
2221 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
3a94c984 | 2222 | |
19caa751 RK |
2223 | /* Use xbitpos for the source extraction (right justified) and |
2224 | xbitpos for the destination store (left justified). */ | |
2225 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, | |
2226 | extract_bit_field (src, bitsize, | |
2227 | xbitpos % BITS_PER_WORD, 1, | |
2228 | NULL_RTX, word_mode, word_mode, | |
04050c69 RK |
2229 | BITS_PER_WORD), |
2230 | BITS_PER_WORD); | |
19caa751 RK |
2231 | } |
2232 | ||
2233 | return tgtblk; | |
c36fce9a GRK |
2234 | } |
2235 | ||
94b25f81 RK |
2236 | /* Add a USE expression for REG to the (possibly empty) list pointed |
2237 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
bbf6f052 RK |
2238 | |
2239 | void | |
b3f8cf4a RK |
2240 | use_reg (call_fusage, reg) |
2241 | rtx *call_fusage, reg; | |
2242 | { | |
0304dfbb DE |
2243 | if (GET_CODE (reg) != REG |
2244 | || REGNO (reg) >= FIRST_PSEUDO_REGISTER) | |
3a94c984 | 2245 | abort (); |
b3f8cf4a RK |
2246 | |
2247 | *call_fusage | |
38a448ca RH |
2248 | = gen_rtx_EXPR_LIST (VOIDmode, |
2249 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
b3f8cf4a RK |
2250 | } |
2251 | ||
94b25f81 RK |
2252 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
2253 | starting at REGNO. All of these registers must be hard registers. */ | |
b3f8cf4a RK |
2254 | |
2255 | void | |
0304dfbb DE |
2256 | use_regs (call_fusage, regno, nregs) |
2257 | rtx *call_fusage; | |
bbf6f052 RK |
2258 | int regno; |
2259 | int nregs; | |
2260 | { | |
0304dfbb | 2261 | int i; |
bbf6f052 | 2262 | |
0304dfbb DE |
2263 | if (regno + nregs > FIRST_PSEUDO_REGISTER) |
2264 | abort (); | |
2265 | ||
2266 | for (i = 0; i < nregs; i++) | |
38a448ca | 2267 | use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i)); |
bbf6f052 | 2268 | } |
fffa9c1d JW |
2269 | |
2270 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
2271 | PARALLEL REGS. This is for calls that pass values in multiple | |
2272 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
2273 | ||
2274 | void | |
2275 | use_group_regs (call_fusage, regs) | |
2276 | rtx *call_fusage; | |
2277 | rtx regs; | |
2278 | { | |
2279 | int i; | |
2280 | ||
6bd35f86 DE |
2281 | for (i = 0; i < XVECLEN (regs, 0); i++) |
2282 | { | |
2283 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
fffa9c1d | 2284 | |
6bd35f86 DE |
2285 | /* A NULL entry means the parameter goes both on the stack and in |
2286 | registers. This can also be a MEM for targets that pass values | |
2287 | partially on the stack and partially in registers. */ | |
e9a25f70 | 2288 | if (reg != 0 && GET_CODE (reg) == REG) |
6bd35f86 DE |
2289 | use_reg (call_fusage, reg); |
2290 | } | |
fffa9c1d | 2291 | } |
bbf6f052 | 2292 | \f |
57814e5e JJ |
2293 | |
2294 | int | |
2295 | can_store_by_pieces (len, constfun, constfundata, align) | |
2296 | unsigned HOST_WIDE_INT len; | |
2297 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2298 | PTR constfundata; | |
2299 | unsigned int align; | |
2300 | { | |
98166639 | 2301 | unsigned HOST_WIDE_INT max_size, l; |
57814e5e JJ |
2302 | HOST_WIDE_INT offset = 0; |
2303 | enum machine_mode mode, tmode; | |
2304 | enum insn_code icode; | |
2305 | int reverse; | |
2306 | rtx cst; | |
2307 | ||
2308 | if (! MOVE_BY_PIECES_P (len, align)) | |
2309 | return 0; | |
2310 | ||
2311 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) | |
2312 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) | |
2313 | align = MOVE_MAX * BITS_PER_UNIT; | |
2314 | ||
2315 | /* We would first store what we can in the largest integer mode, then go to | |
2316 | successively smaller modes. */ | |
2317 | ||
2318 | for (reverse = 0; | |
2319 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2320 | reverse++) | |
2321 | { | |
2322 | l = len; | |
2323 | mode = VOIDmode; | |
98166639 | 2324 | max_size = MOVE_MAX_PIECES + 1; |
57814e5e JJ |
2325 | while (max_size > 1) |
2326 | { | |
2327 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2328 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2329 | if (GET_MODE_SIZE (tmode) < max_size) | |
2330 | mode = tmode; | |
2331 | ||
2332 | if (mode == VOIDmode) | |
2333 | break; | |
2334 | ||
2335 | icode = mov_optab->handlers[(int) mode].insn_code; | |
2336 | if (icode != CODE_FOR_nothing | |
2337 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2338 | { | |
2339 | unsigned int size = GET_MODE_SIZE (mode); | |
2340 | ||
2341 | while (l >= size) | |
2342 | { | |
2343 | if (reverse) | |
2344 | offset -= size; | |
2345 | ||
2346 | cst = (*constfun) (constfundata, offset, mode); | |
2347 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2348 | return 0; | |
2349 | ||
2350 | if (!reverse) | |
2351 | offset += size; | |
2352 | ||
2353 | l -= size; | |
2354 | } | |
2355 | } | |
2356 | ||
2357 | max_size = GET_MODE_SIZE (mode); | |
2358 | } | |
2359 | ||
2360 | /* The code above should have handled everything. */ | |
2361 | if (l != 0) | |
2362 | abort (); | |
2363 | } | |
2364 | ||
2365 | return 1; | |
2366 | } | |
2367 | ||
2368 | /* Generate several move instructions to store LEN bytes generated by | |
2369 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2370 | pointer which will be passed as argument in every CONSTFUN call. | |
2371 | ALIGN is maximum alignment we can assume. */ | |
2372 | ||
2373 | void | |
2374 | store_by_pieces (to, len, constfun, constfundata, align) | |
2375 | rtx to; | |
2376 | unsigned HOST_WIDE_INT len; | |
2377 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2378 | PTR constfundata; | |
2379 | unsigned int align; | |
2380 | { | |
2381 | struct store_by_pieces data; | |
2382 | ||
2383 | if (! MOVE_BY_PIECES_P (len, align)) | |
2384 | abort (); | |
2385 | to = protect_from_queue (to, 1); | |
2386 | data.constfun = constfun; | |
2387 | data.constfundata = constfundata; | |
2388 | data.len = len; | |
2389 | data.to = to; | |
2390 | store_by_pieces_1 (&data, align); | |
2391 | } | |
2392 | ||
19caa751 RK |
2393 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
2394 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2395 | before calling. ALIGN is maximum alignment we can assume. */ | |
9de08200 RK |
2396 | |
2397 | static void | |
2398 | clear_by_pieces (to, len, align) | |
2399 | rtx to; | |
3bdf5ad1 | 2400 | unsigned HOST_WIDE_INT len; |
729a2125 | 2401 | unsigned int align; |
9de08200 | 2402 | { |
57814e5e JJ |
2403 | struct store_by_pieces data; |
2404 | ||
2405 | data.constfun = clear_by_pieces_1; | |
df4ae160 | 2406 | data.constfundata = NULL; |
57814e5e JJ |
2407 | data.len = len; |
2408 | data.to = to; | |
2409 | store_by_pieces_1 (&data, align); | |
2410 | } | |
2411 | ||
2412 | /* Callback routine for clear_by_pieces. | |
2413 | Return const0_rtx unconditionally. */ | |
2414 | ||
2415 | static rtx | |
2416 | clear_by_pieces_1 (data, offset, mode) | |
2417 | PTR data ATTRIBUTE_UNUSED; | |
2418 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED; | |
2419 | enum machine_mode mode ATTRIBUTE_UNUSED; | |
2420 | { | |
2421 | return const0_rtx; | |
2422 | } | |
2423 | ||
2424 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2425 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
2426 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2427 | before calling. ALIGN is maximum alignment we can assume. */ | |
2428 | ||
2429 | static void | |
2430 | store_by_pieces_1 (data, align) | |
2431 | struct store_by_pieces *data; | |
2432 | unsigned int align; | |
2433 | { | |
2434 | rtx to_addr = XEXP (data->to, 0); | |
3bdf5ad1 | 2435 | unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
2436 | enum machine_mode mode = VOIDmode, tmode; |
2437 | enum insn_code icode; | |
9de08200 | 2438 | |
57814e5e JJ |
2439 | data->offset = 0; |
2440 | data->to_addr = to_addr; | |
2441 | data->autinc_to | |
9de08200 RK |
2442 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2443 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2444 | ||
57814e5e JJ |
2445 | data->explicit_inc_to = 0; |
2446 | data->reverse | |
9de08200 | 2447 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
57814e5e JJ |
2448 | if (data->reverse) |
2449 | data->offset = data->len; | |
9de08200 | 2450 | |
57814e5e | 2451 | /* If storing requires more than two move insns, |
9de08200 RK |
2452 | copy addresses to registers (to make displacements shorter) |
2453 | and use post-increment if available. */ | |
57814e5e JJ |
2454 | if (!data->autinc_to |
2455 | && move_by_pieces_ninsns (data->len, align) > 2) | |
9de08200 | 2456 | { |
3a94c984 | 2457 | /* Determine the main mode we'll be using. */ |
fbe1758d AM |
2458 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2459 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2460 | if (GET_MODE_SIZE (tmode) < max_size) | |
2461 | mode = tmode; | |
2462 | ||
57814e5e | 2463 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
9de08200 | 2464 | { |
57814e5e JJ |
2465 | data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
2466 | data->autinc_to = 1; | |
2467 | data->explicit_inc_to = -1; | |
9de08200 | 2468 | } |
3bdf5ad1 | 2469 | |
57814e5e JJ |
2470 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2471 | && ! data->autinc_to) | |
9de08200 | 2472 | { |
57814e5e JJ |
2473 | data->to_addr = copy_addr_to_reg (to_addr); |
2474 | data->autinc_to = 1; | |
2475 | data->explicit_inc_to = 1; | |
9de08200 | 2476 | } |
3bdf5ad1 | 2477 | |
57814e5e JJ |
2478 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
2479 | data->to_addr = copy_addr_to_reg (to_addr); | |
9de08200 RK |
2480 | } |
2481 | ||
e1565e65 | 2482 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 2483 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
bdb429a5 | 2484 | align = MOVE_MAX * BITS_PER_UNIT; |
9de08200 | 2485 | |
57814e5e | 2486 | /* First store what we can in the largest integer mode, then go to |
9de08200 RK |
2487 | successively smaller modes. */ |
2488 | ||
2489 | while (max_size > 1) | |
2490 | { | |
9de08200 RK |
2491 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2492 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2493 | if (GET_MODE_SIZE (tmode) < max_size) | |
2494 | mode = tmode; | |
2495 | ||
2496 | if (mode == VOIDmode) | |
2497 | break; | |
2498 | ||
2499 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 2500 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
57814e5e | 2501 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
9de08200 RK |
2502 | |
2503 | max_size = GET_MODE_SIZE (mode); | |
2504 | } | |
2505 | ||
2506 | /* The code above should have handled everything. */ | |
57814e5e | 2507 | if (data->len != 0) |
9de08200 RK |
2508 | abort (); |
2509 | } | |
2510 | ||
57814e5e | 2511 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
9de08200 RK |
2512 | with move instructions for mode MODE. GENFUN is the gen_... function |
2513 | to make a move insn for that mode. DATA has all the other info. */ | |
2514 | ||
2515 | static void | |
57814e5e | 2516 | store_by_pieces_2 (genfun, mode, data) |
711d877c | 2517 | rtx (*genfun) PARAMS ((rtx, ...)); |
9de08200 | 2518 | enum machine_mode mode; |
57814e5e | 2519 | struct store_by_pieces *data; |
9de08200 | 2520 | { |
3bdf5ad1 | 2521 | unsigned int size = GET_MODE_SIZE (mode); |
57814e5e | 2522 | rtx to1, cst; |
9de08200 RK |
2523 | |
2524 | while (data->len >= size) | |
2525 | { | |
3bdf5ad1 RK |
2526 | if (data->reverse) |
2527 | data->offset -= size; | |
9de08200 | 2528 | |
3bdf5ad1 | 2529 | if (data->autinc_to) |
630036c6 JJ |
2530 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
2531 | data->offset); | |
3a94c984 | 2532 | else |
f4ef873c | 2533 | to1 = adjust_address (data->to, mode, data->offset); |
9de08200 | 2534 | |
940da324 | 2535 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
57814e5e JJ |
2536 | emit_insn (gen_add2_insn (data->to_addr, |
2537 | GEN_INT (-(HOST_WIDE_INT) size))); | |
9de08200 | 2538 | |
57814e5e JJ |
2539 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2540 | emit_insn ((*genfun) (to1, cst)); | |
3bdf5ad1 | 2541 | |
940da324 | 2542 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
9de08200 | 2543 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
9de08200 | 2544 | |
3bdf5ad1 RK |
2545 | if (! data->reverse) |
2546 | data->offset += size; | |
9de08200 RK |
2547 | |
2548 | data->len -= size; | |
2549 | } | |
2550 | } | |
2551 | \f | |
19caa751 | 2552 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
8ac61af7 | 2553 | its length in bytes. */ |
e9a25f70 JL |
2554 | |
2555 | rtx | |
8ac61af7 | 2556 | clear_storage (object, size) |
bbf6f052 | 2557 | rtx object; |
4c08eef0 | 2558 | rtx size; |
bbf6f052 | 2559 | { |
52cf7115 JL |
2560 | #ifdef TARGET_MEM_FUNCTIONS |
2561 | static tree fn; | |
2562 | tree call_expr, arg_list; | |
2563 | #endif | |
e9a25f70 | 2564 | rtx retval = 0; |
8ac61af7 RK |
2565 | unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object) |
2566 | : GET_MODE_ALIGNMENT (GET_MODE (object))); | |
e9a25f70 | 2567 | |
fcf1b822 RK |
2568 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2569 | just move a zero. Otherwise, do this a piece at a time. */ | |
69ef87e2 | 2570 | if (GET_MODE (object) != BLKmode |
fcf1b822 | 2571 | && GET_CODE (size) == CONST_INT |
8752c357 | 2572 | && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size)) |
fcf1b822 RK |
2573 | emit_move_insn (object, CONST0_RTX (GET_MODE (object))); |
2574 | else | |
bbf6f052 | 2575 | { |
9de08200 RK |
2576 | object = protect_from_queue (object, 1); |
2577 | size = protect_from_queue (size, 0); | |
2578 | ||
2579 | if (GET_CODE (size) == CONST_INT | |
fbe1758d | 2580 | && MOVE_BY_PIECES_P (INTVAL (size), align)) |
9de08200 | 2581 | clear_by_pieces (object, INTVAL (size), align); |
9de08200 RK |
2582 | else |
2583 | { | |
2584 | /* Try the most limited insn first, because there's no point | |
2585 | including more than one in the machine description unless | |
2586 | the more limited one has some advantage. */ | |
2587 | ||
19caa751 | 2588 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
9de08200 RK |
2589 | enum machine_mode mode; |
2590 | ||
2591 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2592 | mode = GET_MODE_WIDER_MODE (mode)) | |
2593 | { | |
2594 | enum insn_code code = clrstr_optab[(int) mode]; | |
a995e389 | 2595 | insn_operand_predicate_fn pred; |
9de08200 RK |
2596 | |
2597 | if (code != CODE_FOR_nothing | |
2598 | /* We don't need MODE to be narrower than | |
2599 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2600 | the mode mask, as it is returned by the macro, it will | |
2601 | definitely be less than the actual mode mask. */ | |
2602 | && ((GET_CODE (size) == CONST_INT | |
2603 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
e5e809f4 | 2604 | <= (GET_MODE_MASK (mode) >> 1))) |
9de08200 | 2605 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
a995e389 RH |
2606 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 |
2607 | || (*pred) (object, BLKmode)) | |
2608 | && ((pred = insn_data[(int) code].operand[2].predicate) == 0 | |
2609 | || (*pred) (opalign, VOIDmode))) | |
9de08200 RK |
2610 | { |
2611 | rtx op1; | |
2612 | rtx last = get_last_insn (); | |
2613 | rtx pat; | |
2614 | ||
2615 | op1 = convert_to_mode (mode, size, 1); | |
a995e389 RH |
2616 | pred = insn_data[(int) code].operand[1].predicate; |
2617 | if (pred != 0 && ! (*pred) (op1, mode)) | |
9de08200 RK |
2618 | op1 = copy_to_mode_reg (mode, op1); |
2619 | ||
2620 | pat = GEN_FCN ((int) code) (object, op1, opalign); | |
2621 | if (pat) | |
2622 | { | |
2623 | emit_insn (pat); | |
e9a25f70 | 2624 | return 0; |
9de08200 RK |
2625 | } |
2626 | else | |
2627 | delete_insns_since (last); | |
2628 | } | |
2629 | } | |
2630 | ||
4bc973ae | 2631 | /* OBJECT or SIZE may have been passed through protect_from_queue. |
9de08200 | 2632 | |
4bc973ae JL |
2633 | It is unsafe to save the value generated by protect_from_queue |
2634 | and reuse it later. Consider what happens if emit_queue is | |
2635 | called before the return value from protect_from_queue is used. | |
52cf7115 | 2636 | |
4bc973ae JL |
2637 | Expansion of the CALL_EXPR below will call emit_queue before |
2638 | we are finished emitting RTL for argument setup. So if we are | |
2639 | not careful we could get the wrong value for an argument. | |
52cf7115 | 2640 | |
4bc973ae JL |
2641 | To avoid this problem we go ahead and emit code to copy OBJECT |
2642 | and SIZE into new pseudos. We can then place those new pseudos | |
2643 | into an RTL_EXPR and use them later, even after a call to | |
2644 | emit_queue. | |
52cf7115 | 2645 | |
4bc973ae JL |
2646 | Note this is not strictly needed for library calls since they |
2647 | do not call emit_queue before loading their arguments. However, | |
2648 | we may need to have library calls call emit_queue in the future | |
2649 | since failing to do so could cause problems for targets which | |
2650 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
2651 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); | |
52cf7115 | 2652 | |
4bc973ae JL |
2653 | #ifdef TARGET_MEM_FUNCTIONS |
2654 | size = copy_to_mode_reg (TYPE_MODE (sizetype), size); | |
2655 | #else | |
2656 | size = convert_to_mode (TYPE_MODE (integer_type_node), size, | |
2657 | TREE_UNSIGNED (integer_type_node)); | |
f3dc586a | 2658 | size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); |
4bc973ae | 2659 | #endif |
52cf7115 | 2660 | |
4bc973ae JL |
2661 | #ifdef TARGET_MEM_FUNCTIONS |
2662 | /* It is incorrect to use the libcall calling conventions to call | |
2663 | memset in this context. | |
52cf7115 | 2664 | |
4bc973ae JL |
2665 | This could be a user call to memset and the user may wish to |
2666 | examine the return value from memset. | |
52cf7115 | 2667 | |
4bc973ae JL |
2668 | For targets where libcalls and normal calls have different |
2669 | conventions for returning pointers, we could end up generating | |
0d97bf4c | 2670 | incorrect code. |
4bc973ae JL |
2671 | |
2672 | So instead of using a libcall sequence we build up a suitable | |
2673 | CALL_EXPR and expand the call in the normal fashion. */ | |
2674 | if (fn == NULL_TREE) | |
2675 | { | |
2676 | tree fntype; | |
2677 | ||
2678 | /* This was copied from except.c, I don't know if all this is | |
2679 | necessary in this context or not. */ | |
2680 | fn = get_identifier ("memset"); | |
4bc973ae JL |
2681 | fntype = build_pointer_type (void_type_node); |
2682 | fntype = build_function_type (fntype, NULL_TREE); | |
2683 | fn = build_decl (FUNCTION_DECL, fn, fntype); | |
d7db6646 | 2684 | ggc_add_tree_root (&fn, 1); |
4bc973ae JL |
2685 | DECL_EXTERNAL (fn) = 1; |
2686 | TREE_PUBLIC (fn) = 1; | |
2687 | DECL_ARTIFICIAL (fn) = 1; | |
0d97bf4c | 2688 | TREE_NOTHROW (fn) = 1; |
6496a589 | 2689 | make_decl_rtl (fn, NULL); |
4bc973ae | 2690 | assemble_external (fn); |
4bc973ae JL |
2691 | } |
2692 | ||
3a94c984 | 2693 | /* We need to make an argument list for the function call. |
4bc973ae JL |
2694 | |
2695 | memset has three arguments, the first is a void * addresses, the | |
4fe9b91c | 2696 | second an integer with the initialization value, the last is a |
4bc973ae JL |
2697 | size_t byte count for the copy. */ |
2698 | arg_list | |
2699 | = build_tree_list (NULL_TREE, | |
2700 | make_tree (build_pointer_type (void_type_node), | |
2701 | object)); | |
2702 | TREE_CHAIN (arg_list) | |
2703 | = build_tree_list (NULL_TREE, | |
3a94c984 | 2704 | make_tree (integer_type_node, const0_rtx)); |
4bc973ae JL |
2705 | TREE_CHAIN (TREE_CHAIN (arg_list)) |
2706 | = build_tree_list (NULL_TREE, make_tree (sizetype, size)); | |
2707 | TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE; | |
2708 | ||
2709 | /* Now we have to build up the CALL_EXPR itself. */ | |
2710 | call_expr = build1 (ADDR_EXPR, | |
2711 | build_pointer_type (TREE_TYPE (fn)), fn); | |
2712 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
2713 | call_expr, arg_list, NULL_TREE); | |
2714 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
2715 | ||
2716 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
bbf6f052 | 2717 | #else |
ebb1b59a | 2718 | emit_library_call (bzero_libfunc, LCT_NORMAL, |
fe7bbd2a | 2719 | VOIDmode, 2, object, Pmode, size, |
9de08200 | 2720 | TYPE_MODE (integer_type_node)); |
bbf6f052 | 2721 | #endif |
66c60e67 RK |
2722 | |
2723 | /* If we are initializing a readonly value, show the above call | |
2724 | clobbered it. Otherwise, a load from it may erroneously be | |
2725 | hoisted from a loop. */ | |
2726 | if (RTX_UNCHANGING_P (object)) | |
2727 | emit_insn (gen_rtx_CLOBBER (VOIDmode, object)); | |
9de08200 | 2728 | } |
bbf6f052 | 2729 | } |
e9a25f70 JL |
2730 | |
2731 | return retval; | |
bbf6f052 RK |
2732 | } |
2733 | ||
2734 | /* Generate code to copy Y into X. | |
2735 | Both Y and X must have the same mode, except that | |
2736 | Y can be a constant with VOIDmode. | |
2737 | This mode cannot be BLKmode; use emit_block_move for that. | |
2738 | ||
2739 | Return the last instruction emitted. */ | |
2740 | ||
2741 | rtx | |
2742 | emit_move_insn (x, y) | |
2743 | rtx x, y; | |
2744 | { | |
2745 | enum machine_mode mode = GET_MODE (x); | |
de1b33dd AO |
2746 | rtx y_cst = NULL_RTX; |
2747 | rtx last_insn; | |
bbf6f052 RK |
2748 | |
2749 | x = protect_from_queue (x, 1); | |
2750 | y = protect_from_queue (y, 0); | |
2751 | ||
2752 | if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) | |
2753 | abort (); | |
2754 | ||
ee5332b8 RH |
2755 | /* Never force constant_p_rtx to memory. */ |
2756 | if (GET_CODE (y) == CONSTANT_P_RTX) | |
2757 | ; | |
2758 | else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y)) | |
de1b33dd AO |
2759 | { |
2760 | y_cst = y; | |
2761 | y = force_const_mem (mode, y); | |
2762 | } | |
bbf6f052 RK |
2763 | |
2764 | /* If X or Y are memory references, verify that their addresses are valid | |
2765 | for the machine. */ | |
2766 | if (GET_CODE (x) == MEM | |
2767 | && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) | |
2768 | && ! push_operand (x, GET_MODE (x))) | |
2769 | || (flag_force_addr | |
2770 | && CONSTANT_ADDRESS_P (XEXP (x, 0))))) | |
792760b9 | 2771 | x = validize_mem (x); |
bbf6f052 RK |
2772 | |
2773 | if (GET_CODE (y) == MEM | |
2774 | && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) | |
2775 | || (flag_force_addr | |
2776 | && CONSTANT_ADDRESS_P (XEXP (y, 0))))) | |
792760b9 | 2777 | y = validize_mem (y); |
bbf6f052 RK |
2778 | |
2779 | if (mode == BLKmode) | |
2780 | abort (); | |
2781 | ||
de1b33dd AO |
2782 | last_insn = emit_move_insn_1 (x, y); |
2783 | ||
2784 | if (y_cst && GET_CODE (x) == REG) | |
3d238248 | 2785 | set_unique_reg_note (last_insn, REG_EQUAL, y_cst); |
de1b33dd AO |
2786 | |
2787 | return last_insn; | |
261c4230 RS |
2788 | } |
2789 | ||
2790 | /* Low level part of emit_move_insn. | |
2791 | Called just like emit_move_insn, but assumes X and Y | |
2792 | are basically valid. */ | |
2793 | ||
2794 | rtx | |
2795 | emit_move_insn_1 (x, y) | |
2796 | rtx x, y; | |
2797 | { | |
2798 | enum machine_mode mode = GET_MODE (x); | |
2799 | enum machine_mode submode; | |
2800 | enum mode_class class = GET_MODE_CLASS (mode); | |
261c4230 | 2801 | |
dbbbbf3b | 2802 | if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) |
3a94c984 | 2803 | abort (); |
76bbe028 | 2804 | |
bbf6f052 RK |
2805 | if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
2806 | return | |
2807 | emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y)); | |
2808 | ||
89742723 | 2809 | /* Expand complex moves by moving real part and imag part, if possible. */ |
7308a047 | 2810 | else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT) |
d0c76654 RK |
2811 | && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode) |
2812 | * BITS_PER_UNIT), | |
2813 | (class == MODE_COMPLEX_INT | |
2814 | ? MODE_INT : MODE_FLOAT), | |
2815 | 0)) | |
7308a047 RS |
2816 | && (mov_optab->handlers[(int) submode].insn_code |
2817 | != CODE_FOR_nothing)) | |
2818 | { | |
2819 | /* Don't split destination if it is a stack push. */ | |
2820 | int stack = push_operand (x, GET_MODE (x)); | |
7308a047 | 2821 | |
79ce92d7 | 2822 | #ifdef PUSH_ROUNDING |
1a06f5fe JH |
2823 | /* In case we output to the stack, but the size is smaller machine can |
2824 | push exactly, we need to use move instructions. */ | |
2825 | if (stack | |
bb93b973 RK |
2826 | && (PUSH_ROUNDING (GET_MODE_SIZE (submode)) |
2827 | != GET_MODE_SIZE (submode))) | |
1a06f5fe JH |
2828 | { |
2829 | rtx temp; | |
bb93b973 | 2830 | HOST_WIDE_INT offset1, offset2; |
1a06f5fe JH |
2831 | |
2832 | /* Do not use anti_adjust_stack, since we don't want to update | |
2833 | stack_pointer_delta. */ | |
2834 | temp = expand_binop (Pmode, | |
2835 | #ifdef STACK_GROWS_DOWNWARD | |
2836 | sub_optab, | |
2837 | #else | |
2838 | add_optab, | |
2839 | #endif | |
2840 | stack_pointer_rtx, | |
2841 | GEN_INT | |
bb93b973 RK |
2842 | (PUSH_ROUNDING |
2843 | (GET_MODE_SIZE (GET_MODE (x)))), | |
2844 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); | |
2845 | ||
1a06f5fe JH |
2846 | if (temp != stack_pointer_rtx) |
2847 | emit_move_insn (stack_pointer_rtx, temp); | |
bb93b973 | 2848 | |
1a06f5fe JH |
2849 | #ifdef STACK_GROWS_DOWNWARD |
2850 | offset1 = 0; | |
2851 | offset2 = GET_MODE_SIZE (submode); | |
2852 | #else | |
2853 | offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); | |
2854 | offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) | |
2855 | + GET_MODE_SIZE (submode)); | |
2856 | #endif | |
bb93b973 | 2857 | |
1a06f5fe JH |
2858 | emit_move_insn (change_address (x, submode, |
2859 | gen_rtx_PLUS (Pmode, | |
2860 | stack_pointer_rtx, | |
2861 | GEN_INT (offset1))), | |
2862 | gen_realpart (submode, y)); | |
2863 | emit_move_insn (change_address (x, submode, | |
2864 | gen_rtx_PLUS (Pmode, | |
2865 | stack_pointer_rtx, | |
2866 | GEN_INT (offset2))), | |
2867 | gen_imagpart (submode, y)); | |
2868 | } | |
e9c0bd54 | 2869 | else |
79ce92d7 | 2870 | #endif |
7308a047 RS |
2871 | /* If this is a stack, push the highpart first, so it |
2872 | will be in the argument order. | |
2873 | ||
2874 | In that case, change_address is used only to convert | |
2875 | the mode, not to change the address. */ | |
e9c0bd54 | 2876 | if (stack) |
c937357e | 2877 | { |
e33c0d66 RS |
2878 | /* Note that the real part always precedes the imag part in memory |
2879 | regardless of machine's endianness. */ | |
c937357e RS |
2880 | #ifdef STACK_GROWS_DOWNWARD |
2881 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
3bdf5ad1 | 2882 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2883 | gen_imagpart (submode, y))); |
c937357e | 2884 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
3bdf5ad1 | 2885 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2886 | gen_realpart (submode, y))); |
c937357e RS |
2887 | #else |
2888 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
3bdf5ad1 | 2889 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2890 | gen_realpart (submode, y))); |
c937357e | 2891 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
3bdf5ad1 | 2892 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2893 | gen_imagpart (submode, y))); |
c937357e RS |
2894 | #endif |
2895 | } | |
2896 | else | |
2897 | { | |
235ae7be DM |
2898 | rtx realpart_x, realpart_y; |
2899 | rtx imagpart_x, imagpart_y; | |
2900 | ||
405f63da MM |
2901 | /* If this is a complex value with each part being smaller than a |
2902 | word, the usual calling sequence will likely pack the pieces into | |
2903 | a single register. Unfortunately, SUBREG of hard registers only | |
2904 | deals in terms of words, so we have a problem converting input | |
2905 | arguments to the CONCAT of two registers that is used elsewhere | |
2906 | for complex values. If this is before reload, we can copy it into | |
2907 | memory and reload. FIXME, we should see about using extract and | |
2908 | insert on integer registers, but complex short and complex char | |
2909 | variables should be rarely used. */ | |
3a94c984 | 2910 | if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD |
405f63da MM |
2911 | && (reload_in_progress | reload_completed) == 0) |
2912 | { | |
bb93b973 RK |
2913 | int packed_dest_p |
2914 | = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); | |
2915 | int packed_src_p | |
2916 | = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); | |
405f63da MM |
2917 | |
2918 | if (packed_dest_p || packed_src_p) | |
2919 | { | |
2920 | enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) | |
2921 | ? MODE_FLOAT : MODE_INT); | |
2922 | ||
1da68f56 RK |
2923 | enum machine_mode reg_mode |
2924 | = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); | |
405f63da MM |
2925 | |
2926 | if (reg_mode != BLKmode) | |
2927 | { | |
2928 | rtx mem = assign_stack_temp (reg_mode, | |
2929 | GET_MODE_SIZE (mode), 0); | |
f4ef873c | 2930 | rtx cmem = adjust_address (mem, mode, 0); |
405f63da | 2931 | |
1da68f56 RK |
2932 | cfun->cannot_inline |
2933 | = N_("function using short complex types cannot be inline"); | |
405f63da MM |
2934 | |
2935 | if (packed_dest_p) | |
2936 | { | |
2937 | rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); | |
bb93b973 | 2938 | |
405f63da MM |
2939 | emit_move_insn_1 (cmem, y); |
2940 | return emit_move_insn_1 (sreg, mem); | |
2941 | } | |
2942 | else | |
2943 | { | |
2944 | rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); | |
bb93b973 | 2945 | |
405f63da MM |
2946 | emit_move_insn_1 (mem, sreg); |
2947 | return emit_move_insn_1 (x, cmem); | |
2948 | } | |
2949 | } | |
2950 | } | |
2951 | } | |
2952 | ||
235ae7be DM |
2953 | realpart_x = gen_realpart (submode, x); |
2954 | realpart_y = gen_realpart (submode, y); | |
2955 | imagpart_x = gen_imagpart (submode, x); | |
2956 | imagpart_y = gen_imagpart (submode, y); | |
2957 | ||
2958 | /* Show the output dies here. This is necessary for SUBREGs | |
2959 | of pseudos since we cannot track their lifetimes correctly; | |
c14c6529 RH |
2960 | hard regs shouldn't appear here except as return values. |
2961 | We never want to emit such a clobber after reload. */ | |
2962 | if (x != y | |
235ae7be DM |
2963 | && ! (reload_in_progress || reload_completed) |
2964 | && (GET_CODE (realpart_x) == SUBREG | |
2965 | || GET_CODE (imagpart_x) == SUBREG)) | |
bb93b973 | 2966 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
2638126a | 2967 | |
c937357e | 2968 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
235ae7be | 2969 | (realpart_x, realpart_y)); |
c937357e | 2970 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
235ae7be | 2971 | (imagpart_x, imagpart_y)); |
c937357e | 2972 | } |
7308a047 | 2973 | |
7a1ab50a | 2974 | return get_last_insn (); |
7308a047 RS |
2975 | } |
2976 | ||
bbf6f052 RK |
2977 | /* This will handle any multi-word mode that lacks a move_insn pattern. |
2978 | However, you will get better code if you define such patterns, | |
2979 | even if they must turn into multiple assembler instructions. */ | |
a4320483 | 2980 | else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
bbf6f052 RK |
2981 | { |
2982 | rtx last_insn = 0; | |
3ef1eef4 | 2983 | rtx seq, inner; |
235ae7be | 2984 | int need_clobber; |
bb93b973 | 2985 | int i; |
3a94c984 | 2986 | |
a98c9f1a RK |
2987 | #ifdef PUSH_ROUNDING |
2988 | ||
2989 | /* If X is a push on the stack, do the push now and replace | |
2990 | X with a reference to the stack pointer. */ | |
2991 | if (push_operand (x, GET_MODE (x))) | |
2992 | { | |
918a6124 GK |
2993 | rtx temp; |
2994 | enum rtx_code code; | |
2995 | ||
2996 | /* Do not use anti_adjust_stack, since we don't want to update | |
2997 | stack_pointer_delta. */ | |
2998 | temp = expand_binop (Pmode, | |
2999 | #ifdef STACK_GROWS_DOWNWARD | |
3000 | sub_optab, | |
3001 | #else | |
3002 | add_optab, | |
3003 | #endif | |
3004 | stack_pointer_rtx, | |
3005 | GEN_INT | |
bb93b973 RK |
3006 | (PUSH_ROUNDING |
3007 | (GET_MODE_SIZE (GET_MODE (x)))), | |
a426c92e | 3008 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); |
bb93b973 | 3009 | |
918a6124 GK |
3010 | if (temp != stack_pointer_rtx) |
3011 | emit_move_insn (stack_pointer_rtx, temp); | |
3012 | ||
3013 | code = GET_CODE (XEXP (x, 0)); | |
bb93b973 | 3014 | |
918a6124 GK |
3015 | /* Just hope that small offsets off SP are OK. */ |
3016 | if (code == POST_INC) | |
3017 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
bb93b973 RK |
3018 | GEN_INT (-((HOST_WIDE_INT) |
3019 | GET_MODE_SIZE (GET_MODE (x))))); | |
918a6124 GK |
3020 | else if (code == POST_DEC) |
3021 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
3022 | GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); | |
3023 | else | |
3024 | temp = stack_pointer_rtx; | |
3025 | ||
3026 | x = change_address (x, VOIDmode, temp); | |
a98c9f1a RK |
3027 | } |
3028 | #endif | |
3a94c984 | 3029 | |
3ef1eef4 RK |
3030 | /* If we are in reload, see if either operand is a MEM whose address |
3031 | is scheduled for replacement. */ | |
3032 | if (reload_in_progress && GET_CODE (x) == MEM | |
3033 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) | |
f1ec5147 | 3034 | x = replace_equiv_address_nv (x, inner); |
3ef1eef4 RK |
3035 | if (reload_in_progress && GET_CODE (y) == MEM |
3036 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) | |
f1ec5147 | 3037 | y = replace_equiv_address_nv (y, inner); |
3ef1eef4 | 3038 | |
235ae7be | 3039 | start_sequence (); |
15a7a8ec | 3040 | |
235ae7be | 3041 | need_clobber = 0; |
bbf6f052 | 3042 | for (i = 0; |
3a94c984 | 3043 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
bbf6f052 RK |
3044 | i++) |
3045 | { | |
3046 | rtx xpart = operand_subword (x, i, 1, mode); | |
3047 | rtx ypart = operand_subword (y, i, 1, mode); | |
3048 | ||
3049 | /* If we can't get a part of Y, put Y into memory if it is a | |
3050 | constant. Otherwise, force it into a register. If we still | |
3051 | can't get a part of Y, abort. */ | |
3052 | if (ypart == 0 && CONSTANT_P (y)) | |
3053 | { | |
3054 | y = force_const_mem (mode, y); | |
3055 | ypart = operand_subword (y, i, 1, mode); | |
3056 | } | |
3057 | else if (ypart == 0) | |
3058 | ypart = operand_subword_force (y, i, mode); | |
3059 | ||
3060 | if (xpart == 0 || ypart == 0) | |
3061 | abort (); | |
3062 | ||
235ae7be DM |
3063 | need_clobber |= (GET_CODE (xpart) == SUBREG); |
3064 | ||
bbf6f052 RK |
3065 | last_insn = emit_move_insn (xpart, ypart); |
3066 | } | |
6551fa4d | 3067 | |
235ae7be DM |
3068 | seq = gen_sequence (); |
3069 | end_sequence (); | |
3070 | ||
3071 | /* Show the output dies here. This is necessary for SUBREGs | |
3072 | of pseudos since we cannot track their lifetimes correctly; | |
3073 | hard regs shouldn't appear here except as return values. | |
3074 | We never want to emit such a clobber after reload. */ | |
3075 | if (x != y | |
3076 | && ! (reload_in_progress || reload_completed) | |
3077 | && need_clobber != 0) | |
bb93b973 | 3078 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
235ae7be DM |
3079 | |
3080 | emit_insn (seq); | |
3081 | ||
bbf6f052 RK |
3082 | return last_insn; |
3083 | } | |
3084 | else | |
3085 | abort (); | |
3086 | } | |
3087 | \f | |
3088 | /* Pushing data onto the stack. */ | |
3089 | ||
3090 | /* Push a block of length SIZE (perhaps variable) | |
3091 | and return an rtx to address the beginning of the block. | |
3092 | Note that it is not possible for the value returned to be a QUEUED. | |
3093 | The value may be virtual_outgoing_args_rtx. | |
3094 | ||
3095 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
3096 | BELOW nonzero means this padding comes at low addresses; | |
3097 | otherwise, the padding comes at high addresses. */ | |
3098 | ||
3099 | rtx | |
3100 | push_block (size, extra, below) | |
3101 | rtx size; | |
3102 | int extra, below; | |
3103 | { | |
b3694847 | 3104 | rtx temp; |
88f63c77 RK |
3105 | |
3106 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
bbf6f052 RK |
3107 | if (CONSTANT_P (size)) |
3108 | anti_adjust_stack (plus_constant (size, extra)); | |
3109 | else if (GET_CODE (size) == REG && extra == 0) | |
3110 | anti_adjust_stack (size); | |
3111 | else | |
3112 | { | |
ce48579b | 3113 | temp = copy_to_mode_reg (Pmode, size); |
bbf6f052 | 3114 | if (extra != 0) |
906c4e36 | 3115 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
bbf6f052 RK |
3116 | temp, 0, OPTAB_LIB_WIDEN); |
3117 | anti_adjust_stack (temp); | |
3118 | } | |
3119 | ||
f73ad30e | 3120 | #ifndef STACK_GROWS_DOWNWARD |
f73ad30e | 3121 | if (0) |
f73ad30e JH |
3122 | #else |
3123 | if (1) | |
bbf6f052 | 3124 | #endif |
f73ad30e | 3125 | { |
f73ad30e JH |
3126 | temp = virtual_outgoing_args_rtx; |
3127 | if (extra != 0 && below) | |
3128 | temp = plus_constant (temp, extra); | |
3129 | } | |
3130 | else | |
3131 | { | |
3132 | if (GET_CODE (size) == CONST_INT) | |
3133 | temp = plus_constant (virtual_outgoing_args_rtx, | |
3a94c984 | 3134 | -INTVAL (size) - (below ? 0 : extra)); |
f73ad30e JH |
3135 | else if (extra != 0 && !below) |
3136 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3bdf5ad1 | 3137 | negate_rtx (Pmode, plus_constant (size, extra))); |
f73ad30e JH |
3138 | else |
3139 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3140 | negate_rtx (Pmode, size)); | |
3141 | } | |
bbf6f052 RK |
3142 | |
3143 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
3144 | } | |
3145 | ||
21d93687 RK |
3146 | #ifdef PUSH_ROUNDING |
3147 | ||
566aa174 | 3148 | /* Emit single push insn. */ |
21d93687 | 3149 | |
566aa174 JH |
3150 | static void |
3151 | emit_single_push_insn (mode, x, type) | |
3152 | rtx x; | |
3153 | enum machine_mode mode; | |
3154 | tree type; | |
3155 | { | |
566aa174 | 3156 | rtx dest_addr; |
918a6124 | 3157 | unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
566aa174 | 3158 | rtx dest; |
371b8fc0 JH |
3159 | enum insn_code icode; |
3160 | insn_operand_predicate_fn pred; | |
566aa174 | 3161 | |
371b8fc0 JH |
3162 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
3163 | /* If there is push pattern, use it. Otherwise try old way of throwing | |
3164 | MEM representing push operation to move expander. */ | |
3165 | icode = push_optab->handlers[(int) mode].insn_code; | |
3166 | if (icode != CODE_FOR_nothing) | |
3167 | { | |
3168 | if (((pred = insn_data[(int) icode].operand[0].predicate) | |
505ddab6 | 3169 | && !((*pred) (x, mode)))) |
371b8fc0 JH |
3170 | x = force_reg (mode, x); |
3171 | emit_insn (GEN_FCN (icode) (x)); | |
3172 | return; | |
3173 | } | |
566aa174 JH |
3174 | if (GET_MODE_SIZE (mode) == rounded_size) |
3175 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
3176 | else | |
3177 | { | |
3178 | #ifdef STACK_GROWS_DOWNWARD | |
3179 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
505ddab6 | 3180 | GEN_INT (-(HOST_WIDE_INT) rounded_size)); |
566aa174 JH |
3181 | #else |
3182 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
3183 | GEN_INT (rounded_size)); | |
3184 | #endif | |
3185 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3186 | } | |
3187 | ||
3188 | dest = gen_rtx_MEM (mode, dest_addr); | |
3189 | ||
566aa174 JH |
3190 | if (type != 0) |
3191 | { | |
3192 | set_mem_attributes (dest, type, 1); | |
c3d32120 RK |
3193 | |
3194 | if (flag_optimize_sibling_calls) | |
3195 | /* Function incoming arguments may overlap with sibling call | |
3196 | outgoing arguments and we cannot allow reordering of reads | |
3197 | from function arguments with stores to outgoing arguments | |
3198 | of sibling calls. */ | |
3199 | set_mem_alias_set (dest, 0); | |
566aa174 JH |
3200 | } |
3201 | emit_move_insn (dest, x); | |
566aa174 | 3202 | } |
21d93687 | 3203 | #endif |
566aa174 | 3204 | |
bbf6f052 RK |
3205 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3206 | type TYPE. | |
3207 | MODE is redundant except when X is a CONST_INT (since they don't | |
3208 | carry mode info). | |
3209 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3210 | needed only if X is BLKmode. | |
3211 | ||
f1eaaf73 | 3212 | ALIGN (in bits) is maximum alignment we can assume. |
bbf6f052 | 3213 | |
cd048831 RK |
3214 | If PARTIAL and REG are both nonzero, then copy that many of the first |
3215 | words of X into registers starting with REG, and push the rest of X. | |
bbf6f052 RK |
3216 | The amount of space pushed is decreased by PARTIAL words, |
3217 | rounded *down* to a multiple of PARM_BOUNDARY. | |
3218 | REG must be a hard register in this case. | |
cd048831 RK |
3219 | If REG is zero but PARTIAL is not, take any all others actions for an |
3220 | argument partially in registers, but do not actually load any | |
3221 | registers. | |
bbf6f052 RK |
3222 | |
3223 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
6dc42e49 | 3224 | This is ignored if an argument block has already been allocated. |
bbf6f052 RK |
3225 | |
3226 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3227 | the bottom of the argument block for this call. We use indexing off there | |
3228 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3229 | argument block has not been preallocated. | |
3230 | ||
e5e809f4 JL |
3231 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3232 | ||
3233 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3234 | for arguments passed in registers. If nonzero, it will be the number | |
3235 | of bytes required. */ | |
bbf6f052 RK |
3236 | |
3237 | void | |
3238 | emit_push_insn (x, mode, type, size, align, partial, reg, extra, | |
4fc026cd CM |
3239 | args_addr, args_so_far, reg_parm_stack_space, |
3240 | alignment_pad) | |
b3694847 | 3241 | rtx x; |
bbf6f052 RK |
3242 | enum machine_mode mode; |
3243 | tree type; | |
3244 | rtx size; | |
729a2125 | 3245 | unsigned int align; |
bbf6f052 RK |
3246 | int partial; |
3247 | rtx reg; | |
3248 | int extra; | |
3249 | rtx args_addr; | |
3250 | rtx args_so_far; | |
e5e809f4 | 3251 | int reg_parm_stack_space; |
4fc026cd | 3252 | rtx alignment_pad; |
bbf6f052 RK |
3253 | { |
3254 | rtx xinner; | |
3255 | enum direction stack_direction | |
3256 | #ifdef STACK_GROWS_DOWNWARD | |
3257 | = downward; | |
3258 | #else | |
3259 | = upward; | |
3260 | #endif | |
3261 | ||
3262 | /* Decide where to pad the argument: `downward' for below, | |
3263 | `upward' for above, or `none' for don't pad it. | |
3264 | Default is below for small data on big-endian machines; else above. */ | |
3265 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3266 | ||
9e0e11bf GK |
3267 | /* Invert direction if stack is post-decrement. |
3268 | FIXME: why? */ | |
3269 | if (STACK_PUSH_CODE == POST_DEC) | |
bbf6f052 RK |
3270 | if (where_pad != none) |
3271 | where_pad = (where_pad == downward ? upward : downward); | |
3272 | ||
3273 | xinner = x = protect_from_queue (x, 0); | |
3274 | ||
3275 | if (mode == BLKmode) | |
3276 | { | |
3277 | /* Copy a block into the stack, entirely or partially. */ | |
3278 | ||
b3694847 | 3279 | rtx temp; |
bbf6f052 RK |
3280 | int used = partial * UNITS_PER_WORD; |
3281 | int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3282 | int skip; | |
3a94c984 | 3283 | |
bbf6f052 RK |
3284 | if (size == 0) |
3285 | abort (); | |
3286 | ||
3287 | used -= offset; | |
3288 | ||
3289 | /* USED is now the # of bytes we need not copy to the stack | |
3290 | because registers will take care of them. */ | |
3291 | ||
3292 | if (partial != 0) | |
f4ef873c | 3293 | xinner = adjust_address (xinner, BLKmode, used); |
bbf6f052 RK |
3294 | |
3295 | /* If the partial register-part of the arg counts in its stack size, | |
3296 | skip the part of stack space corresponding to the registers. | |
3297 | Otherwise, start copying to the beginning of the stack space, | |
3298 | by setting SKIP to 0. */ | |
e5e809f4 | 3299 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
bbf6f052 RK |
3300 | |
3301 | #ifdef PUSH_ROUNDING | |
3302 | /* Do it with several push insns if that doesn't take lots of insns | |
3303 | and if there is no difficulty with push insns that skip bytes | |
3304 | on the stack for alignment purposes. */ | |
3305 | if (args_addr == 0 | |
f73ad30e | 3306 | && PUSH_ARGS |
bbf6f052 RK |
3307 | && GET_CODE (size) == CONST_INT |
3308 | && skip == 0 | |
15914757 | 3309 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
bbf6f052 RK |
3310 | /* Here we avoid the case of a structure whose weak alignment |
3311 | forces many pushes of a small amount of data, | |
3312 | and such small pushes do rounding that causes trouble. */ | |
e1565e65 | 3313 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
19caa751 | 3314 | || align >= BIGGEST_ALIGNMENT |
f1eaaf73 DE |
3315 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3316 | == (align / BITS_PER_UNIT))) | |
bbf6f052 RK |
3317 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3318 | { | |
3319 | /* Push padding now if padding above and stack grows down, | |
3320 | or if padding below and stack grows up. | |
3321 | But if space already allocated, this has already been done. */ | |
3322 | if (extra && args_addr == 0 | |
3323 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3324 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 | 3325 | |
566aa174 | 3326 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align); |
bbf6f052 RK |
3327 | } |
3328 | else | |
3a94c984 | 3329 | #endif /* PUSH_ROUNDING */ |
bbf6f052 | 3330 | { |
7ab923cc JJ |
3331 | rtx target; |
3332 | ||
bbf6f052 RK |
3333 | /* Otherwise make space on the stack and copy the data |
3334 | to the address of that space. */ | |
3335 | ||
3336 | /* Deduct words put into registers from the size we must copy. */ | |
3337 | if (partial != 0) | |
3338 | { | |
3339 | if (GET_CODE (size) == CONST_INT) | |
906c4e36 | 3340 | size = GEN_INT (INTVAL (size) - used); |
bbf6f052 RK |
3341 | else |
3342 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
906c4e36 RK |
3343 | GEN_INT (used), NULL_RTX, 0, |
3344 | OPTAB_LIB_WIDEN); | |
bbf6f052 RK |
3345 | } |
3346 | ||
3347 | /* Get the address of the stack space. | |
3348 | In this case, we do not deal with EXTRA separately. | |
3349 | A single stack adjust will do. */ | |
3350 | if (! args_addr) | |
3351 | { | |
3352 | temp = push_block (size, extra, where_pad == downward); | |
3353 | extra = 0; | |
3354 | } | |
3355 | else if (GET_CODE (args_so_far) == CONST_INT) | |
3356 | temp = memory_address (BLKmode, | |
3357 | plus_constant (args_addr, | |
3358 | skip + INTVAL (args_so_far))); | |
3359 | else | |
3360 | temp = memory_address (BLKmode, | |
38a448ca RH |
3361 | plus_constant (gen_rtx_PLUS (Pmode, |
3362 | args_addr, | |
3363 | args_so_far), | |
bbf6f052 | 3364 | skip)); |
3a94c984 | 3365 | target = gen_rtx_MEM (BLKmode, temp); |
7ab923cc | 3366 | |
3a94c984 KH |
3367 | if (type != 0) |
3368 | { | |
3369 | set_mem_attributes (target, type, 1); | |
3370 | /* Function incoming arguments may overlap with sibling call | |
3371 | outgoing arguments and we cannot allow reordering of reads | |
3372 | from function arguments with stores to outgoing arguments | |
3373 | of sibling calls. */ | |
ba4828e0 | 3374 | set_mem_alias_set (target, 0); |
3a94c984 | 3375 | } |
8ac61af7 RK |
3376 | else |
3377 | set_mem_align (target, align); | |
7ab923cc | 3378 | |
bbf6f052 RK |
3379 | /* TEMP is the address of the block. Copy the data there. */ |
3380 | if (GET_CODE (size) == CONST_INT | |
729a2125 | 3381 | && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)) |
bbf6f052 | 3382 | { |
7ab923cc | 3383 | move_by_pieces (target, xinner, INTVAL (size), align); |
bbf6f052 RK |
3384 | goto ret; |
3385 | } | |
e5e809f4 | 3386 | else |
bbf6f052 | 3387 | { |
19caa751 | 3388 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
e5e809f4 | 3389 | enum machine_mode mode; |
3bdf5ad1 | 3390 | |
e5e809f4 JL |
3391 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
3392 | mode != VOIDmode; | |
3393 | mode = GET_MODE_WIDER_MODE (mode)) | |
c841050e | 3394 | { |
e5e809f4 | 3395 | enum insn_code code = movstr_optab[(int) mode]; |
a995e389 | 3396 | insn_operand_predicate_fn pred; |
e5e809f4 JL |
3397 | |
3398 | if (code != CODE_FOR_nothing | |
3399 | && ((GET_CODE (size) == CONST_INT | |
3400 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
3401 | <= (GET_MODE_MASK (mode) >> 1))) | |
3402 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
a995e389 RH |
3403 | && (!(pred = insn_data[(int) code].operand[0].predicate) |
3404 | || ((*pred) (target, BLKmode))) | |
3405 | && (!(pred = insn_data[(int) code].operand[1].predicate) | |
3406 | || ((*pred) (xinner, BLKmode))) | |
3407 | && (!(pred = insn_data[(int) code].operand[3].predicate) | |
3408 | || ((*pred) (opalign, VOIDmode)))) | |
e5e809f4 JL |
3409 | { |
3410 | rtx op2 = convert_to_mode (mode, size, 1); | |
3411 | rtx last = get_last_insn (); | |
3412 | rtx pat; | |
3413 | ||
a995e389 RH |
3414 | pred = insn_data[(int) code].operand[2].predicate; |
3415 | if (pred != 0 && ! (*pred) (op2, mode)) | |
e5e809f4 JL |
3416 | op2 = copy_to_mode_reg (mode, op2); |
3417 | ||
3418 | pat = GEN_FCN ((int) code) (target, xinner, | |
3419 | op2, opalign); | |
3420 | if (pat) | |
3421 | { | |
3422 | emit_insn (pat); | |
3423 | goto ret; | |
3424 | } | |
3425 | else | |
3426 | delete_insns_since (last); | |
3427 | } | |
c841050e | 3428 | } |
bbf6f052 | 3429 | } |
bbf6f052 | 3430 | |
f73ad30e JH |
3431 | if (!ACCUMULATE_OUTGOING_ARGS) |
3432 | { | |
3433 | /* If the source is referenced relative to the stack pointer, | |
3434 | copy it to another register to stabilize it. We do not need | |
3435 | to do this if we know that we won't be changing sp. */ | |
bbf6f052 | 3436 | |
f73ad30e JH |
3437 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) |
3438 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3439 | temp = copy_to_reg (temp); | |
3440 | } | |
bbf6f052 RK |
3441 | |
3442 | /* Make inhibit_defer_pop nonzero around the library call | |
3443 | to force it to pop the bcopy-arguments right away. */ | |
3444 | NO_DEFER_POP; | |
3445 | #ifdef TARGET_MEM_FUNCTIONS | |
ebb1b59a | 3446 | emit_library_call (memcpy_libfunc, LCT_NORMAL, |
bbf6f052 | 3447 | VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode, |
0fa83258 RK |
3448 | convert_to_mode (TYPE_MODE (sizetype), |
3449 | size, TREE_UNSIGNED (sizetype)), | |
26ba80fc | 3450 | TYPE_MODE (sizetype)); |
bbf6f052 | 3451 | #else |
ebb1b59a | 3452 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
bbf6f052 | 3453 | VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode, |
3b6f75e2 JW |
3454 | convert_to_mode (TYPE_MODE (integer_type_node), |
3455 | size, | |
3456 | TREE_UNSIGNED (integer_type_node)), | |
3457 | TYPE_MODE (integer_type_node)); | |
bbf6f052 RK |
3458 | #endif |
3459 | OK_DEFER_POP; | |
3460 | } | |
3461 | } | |
3462 | else if (partial > 0) | |
3463 | { | |
3464 | /* Scalar partly in registers. */ | |
3465 | ||
3466 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3467 | int i; | |
3468 | int not_stack; | |
3469 | /* # words of start of argument | |
3470 | that we must make space for but need not store. */ | |
3471 | int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); | |
3472 | int args_offset = INTVAL (args_so_far); | |
3473 | int skip; | |
3474 | ||
3475 | /* Push padding now if padding above and stack grows down, | |
3476 | or if padding below and stack grows up. | |
3477 | But if space already allocated, this has already been done. */ | |
3478 | if (extra && args_addr == 0 | |
3479 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3480 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3481 | |
3482 | /* If we make space by pushing it, we might as well push | |
3483 | the real data. Otherwise, we can leave OFFSET nonzero | |
3484 | and leave the space uninitialized. */ | |
3485 | if (args_addr == 0) | |
3486 | offset = 0; | |
3487 | ||
3488 | /* Now NOT_STACK gets the number of words that we don't need to | |
3489 | allocate on the stack. */ | |
3490 | not_stack = partial - offset; | |
3491 | ||
3492 | /* If the partial register-part of the arg counts in its stack size, | |
3493 | skip the part of stack space corresponding to the registers. | |
3494 | Otherwise, start copying to the beginning of the stack space, | |
3495 | by setting SKIP to 0. */ | |
e5e809f4 | 3496 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
bbf6f052 RK |
3497 | |
3498 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3499 | x = validize_mem (force_const_mem (mode, x)); | |
3500 | ||
3501 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3502 | SUBREGs of such registers are not allowed. */ | |
3503 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER | |
3504 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) | |
3505 | x = copy_to_reg (x); | |
3506 | ||
3507 | /* Loop over all the words allocated on the stack for this arg. */ | |
3508 | /* We can do it by words, because any scalar bigger than a word | |
3509 | has a size a multiple of a word. */ | |
3510 | #ifndef PUSH_ARGS_REVERSED | |
3511 | for (i = not_stack; i < size; i++) | |
3512 | #else | |
3513 | for (i = size - 1; i >= not_stack; i--) | |
3514 | #endif | |
3515 | if (i >= not_stack + offset) | |
3516 | emit_push_insn (operand_subword_force (x, i, mode), | |
906c4e36 RK |
3517 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3518 | 0, args_addr, | |
3519 | GEN_INT (args_offset + ((i - not_stack + skip) | |
e5e809f4 | 3520 | * UNITS_PER_WORD)), |
4fc026cd | 3521 | reg_parm_stack_space, alignment_pad); |
bbf6f052 RK |
3522 | } |
3523 | else | |
3524 | { | |
3525 | rtx addr; | |
921b3427 | 3526 | rtx target = NULL_RTX; |
3bdf5ad1 | 3527 | rtx dest; |
bbf6f052 RK |
3528 | |
3529 | /* Push padding now if padding above and stack grows down, | |
3530 | or if padding below and stack grows up. | |
3531 | But if space already allocated, this has already been done. */ | |
3532 | if (extra && args_addr == 0 | |
3533 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3534 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3535 | |
3536 | #ifdef PUSH_ROUNDING | |
f73ad30e | 3537 | if (args_addr == 0 && PUSH_ARGS) |
566aa174 | 3538 | emit_single_push_insn (mode, x, type); |
bbf6f052 RK |
3539 | else |
3540 | #endif | |
921b3427 RK |
3541 | { |
3542 | if (GET_CODE (args_so_far) == CONST_INT) | |
3543 | addr | |
3544 | = memory_address (mode, | |
3a94c984 | 3545 | plus_constant (args_addr, |
921b3427 | 3546 | INTVAL (args_so_far))); |
3a94c984 | 3547 | else |
38a448ca RH |
3548 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3549 | args_so_far)); | |
921b3427 | 3550 | target = addr; |
566aa174 JH |
3551 | dest = gen_rtx_MEM (mode, addr); |
3552 | if (type != 0) | |
3553 | { | |
3554 | set_mem_attributes (dest, type, 1); | |
3555 | /* Function incoming arguments may overlap with sibling call | |
3556 | outgoing arguments and we cannot allow reordering of reads | |
3557 | from function arguments with stores to outgoing arguments | |
3558 | of sibling calls. */ | |
ba4828e0 | 3559 | set_mem_alias_set (dest, 0); |
566aa174 | 3560 | } |
bbf6f052 | 3561 | |
566aa174 | 3562 | emit_move_insn (dest, x); |
566aa174 | 3563 | } |
921b3427 | 3564 | |
bbf6f052 RK |
3565 | } |
3566 | ||
3567 | ret: | |
3568 | /* If part should go in registers, copy that part | |
3569 | into the appropriate registers. Do this now, at the end, | |
3570 | since mem-to-mem copies above may do function calls. */ | |
cd048831 | 3571 | if (partial > 0 && reg != 0) |
fffa9c1d JW |
3572 | { |
3573 | /* Handle calls that pass values in multiple non-contiguous locations. | |
3574 | The Irix 6 ABI has examples of this. */ | |
3575 | if (GET_CODE (reg) == PARALLEL) | |
04050c69 | 3576 | emit_group_load (reg, x, -1); /* ??? size? */ |
fffa9c1d JW |
3577 | else |
3578 | move_block_to_reg (REGNO (reg), x, partial, mode); | |
3579 | } | |
bbf6f052 RK |
3580 | |
3581 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
906c4e36 | 3582 | anti_adjust_stack (GEN_INT (extra)); |
3a94c984 | 3583 | |
3ea2292a | 3584 | if (alignment_pad && args_addr == 0) |
4fc026cd | 3585 | anti_adjust_stack (alignment_pad); |
bbf6f052 RK |
3586 | } |
3587 | \f | |
296b4ed9 RK |
3588 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
3589 | operations. */ | |
3590 | ||
3591 | static rtx | |
3592 | get_subtarget (x) | |
3593 | rtx x; | |
3594 | { | |
3595 | return ((x == 0 | |
3596 | /* Only registers can be subtargets. */ | |
3597 | || GET_CODE (x) != REG | |
3598 | /* If the register is readonly, it can't be set more than once. */ | |
3599 | || RTX_UNCHANGING_P (x) | |
3600 | /* Don't use hard regs to avoid extending their life. */ | |
3601 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
3602 | /* Avoid subtargets inside loops, | |
3603 | since they hide some invariant expressions. */ | |
3604 | || preserve_subexpressions_p ()) | |
3605 | ? 0 : x); | |
3606 | } | |
3607 | ||
bbf6f052 RK |
3608 | /* Expand an assignment that stores the value of FROM into TO. |
3609 | If WANT_VALUE is nonzero, return an rtx for the value of TO. | |
709f5be1 RS |
3610 | (This may contain a QUEUED rtx; |
3611 | if the value is constant, this rtx is a constant.) | |
3612 | Otherwise, the returned value is NULL_RTX. | |
bbf6f052 RK |
3613 | |
3614 | SUGGEST_REG is no longer actually used. | |
3615 | It used to mean, copy the value through a register | |
3616 | and return that register, if that is possible. | |
709f5be1 | 3617 | We now use WANT_VALUE to decide whether to do this. */ |
bbf6f052 RK |
3618 | |
3619 | rtx | |
3620 | expand_assignment (to, from, want_value, suggest_reg) | |
3621 | tree to, from; | |
3622 | int want_value; | |
c5c76735 | 3623 | int suggest_reg ATTRIBUTE_UNUSED; |
bbf6f052 | 3624 | { |
b3694847 | 3625 | rtx to_rtx = 0; |
bbf6f052 RK |
3626 | rtx result; |
3627 | ||
3628 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
3629 | ||
3630 | if (TREE_CODE (to) == ERROR_MARK) | |
709f5be1 RS |
3631 | { |
3632 | result = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
3633 | return want_value ? result : NULL_RTX; | |
3634 | } | |
bbf6f052 RK |
3635 | |
3636 | /* Assignment of a structure component needs special treatment | |
3637 | if the structure component's rtx is not simply a MEM. | |
6be58303 JW |
3638 | Assignment of an array element at a constant index, and assignment of |
3639 | an array element in an unaligned packed structure field, has the same | |
3640 | problem. */ | |
bbf6f052 | 3641 | |
08293add | 3642 | if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF |
b4e3fabb | 3643 | || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF) |
bbf6f052 RK |
3644 | { |
3645 | enum machine_mode mode1; | |
770ae6cc | 3646 | HOST_WIDE_INT bitsize, bitpos; |
a06ef755 | 3647 | rtx orig_to_rtx; |
7bb0943f | 3648 | tree offset; |
bbf6f052 RK |
3649 | int unsignedp; |
3650 | int volatilep = 0; | |
0088fcb1 RK |
3651 | tree tem; |
3652 | ||
3653 | push_temp_slots (); | |
839c4796 | 3654 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
a06ef755 | 3655 | &unsignedp, &volatilep); |
bbf6f052 RK |
3656 | |
3657 | /* If we are going to use store_bit_field and extract_bit_field, | |
3658 | make sure to_rtx will be safe for multiple use. */ | |
3659 | ||
3660 | if (mode1 == VOIDmode && want_value) | |
3661 | tem = stabilize_reference (tem); | |
3662 | ||
1ed1b4fb RK |
3663 | orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); |
3664 | ||
7bb0943f RS |
3665 | if (offset != 0) |
3666 | { | |
e3c8ea67 | 3667 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
7bb0943f RS |
3668 | |
3669 | if (GET_CODE (to_rtx) != MEM) | |
3670 | abort (); | |
bd070e1a RH |
3671 | |
3672 | if (GET_MODE (offset_rtx) != ptr_mode) | |
4b6c1672 RK |
3673 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
3674 | ||
bd070e1a | 3675 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 RK |
3676 | if (GET_MODE (offset_rtx) != Pmode) |
3677 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
bd070e1a | 3678 | #endif |
bd070e1a | 3679 | |
9a7b9f4f JL |
3680 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
3681 | to call force_reg for that case. Avoid that case. */ | |
89752202 HB |
3682 | if (GET_CODE (to_rtx) == MEM |
3683 | && GET_MODE (to_rtx) == BLKmode | |
9a7b9f4f | 3684 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
a06ef755 | 3685 | && bitsize > 0 |
3a94c984 | 3686 | && (bitpos % bitsize) == 0 |
89752202 | 3687 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 3688 | && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 3689 | { |
e3c8ea67 | 3690 | to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
3691 | bitpos = 0; |
3692 | } | |
3693 | ||
0d4903b8 RK |
3694 | to_rtx = offset_address (to_rtx, offset_rtx, |
3695 | highest_pow2_factor (offset)); | |
7bb0943f | 3696 | } |
c5c76735 | 3697 | |
998d7deb RH |
3698 | if (GET_CODE (to_rtx) == MEM) |
3699 | { | |
3700 | tree old_expr = MEM_EXPR (to_rtx); | |
3701 | ||
3702 | /* If the field is at offset zero, we could have been given the | |
3703 | DECL_RTX of the parent struct. Don't munge it. */ | |
3704 | to_rtx = shallow_copy_rtx (to_rtx); | |
3705 | ||
3706 | set_mem_attributes (to_rtx, to, 0); | |
3707 | ||
3708 | /* If we changed MEM_EXPR, that means we're now referencing | |
3709 | the COMPONENT_REF, which means that MEM_OFFSET must be | |
3710 | relative to that field. But we've not yet reflected BITPOS | |
3711 | in TO_RTX. This will be done in store_field. Adjust for | |
3712 | that by biasing MEM_OFFSET by -bitpos. */ | |
3713 | if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx) | |
3714 | && (bitpos / BITS_PER_UNIT) != 0) | |
3715 | set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx)) | |
3716 | - (bitpos / BITS_PER_UNIT))); | |
3717 | } | |
effbcc6a | 3718 | |
a06ef755 RK |
3719 | /* Deal with volatile and readonly fields. The former is only done |
3720 | for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ | |
3721 | if (volatilep && GET_CODE (to_rtx) == MEM) | |
3722 | { | |
3723 | if (to_rtx == orig_to_rtx) | |
3724 | to_rtx = copy_rtx (to_rtx); | |
3725 | MEM_VOLATILE_P (to_rtx) = 1; | |
bbf6f052 RK |
3726 | } |
3727 | ||
956d6950 JL |
3728 | if (TREE_CODE (to) == COMPONENT_REF |
3729 | && TREE_READONLY (TREE_OPERAND (to, 1))) | |
3730 | { | |
a06ef755 | 3731 | if (to_rtx == orig_to_rtx) |
956d6950 | 3732 | to_rtx = copy_rtx (to_rtx); |
956d6950 JL |
3733 | RTX_UNCHANGING_P (to_rtx) = 1; |
3734 | } | |
3735 | ||
a84b4898 | 3736 | if (GET_CODE (to_rtx) == MEM && ! can_address_p (to)) |
a06ef755 RK |
3737 | { |
3738 | if (to_rtx == orig_to_rtx) | |
3739 | to_rtx = copy_rtx (to_rtx); | |
3740 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
3741 | } | |
3742 | ||
a06ef755 RK |
3743 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, |
3744 | (want_value | |
3745 | /* Spurious cast for HPUX compiler. */ | |
3746 | ? ((enum machine_mode) | |
3747 | TYPE_MODE (TREE_TYPE (to))) | |
3748 | : VOIDmode), | |
3749 | unsignedp, TREE_TYPE (tem), get_alias_set (to)); | |
a69beca1 | 3750 | |
a06ef755 RK |
3751 | preserve_temp_slots (result); |
3752 | free_temp_slots (); | |
3753 | pop_temp_slots (); | |
a69beca1 | 3754 | |
a06ef755 RK |
3755 | /* If the value is meaningful, convert RESULT to the proper mode. |
3756 | Otherwise, return nothing. */ | |
3757 | return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)), | |
3758 | TYPE_MODE (TREE_TYPE (from)), | |
3759 | result, | |
3760 | TREE_UNSIGNED (TREE_TYPE (to))) | |
3761 | : NULL_RTX); | |
bbf6f052 RK |
3762 | } |
3763 | ||
cd1db108 RS |
3764 | /* If the rhs is a function call and its value is not an aggregate, |
3765 | call the function before we start to compute the lhs. | |
3766 | This is needed for correct code for cases such as | |
3767 | val = setjmp (buf) on machines where reference to val | |
1ad87b63 RK |
3768 | requires loading up part of an address in a separate insn. |
3769 | ||
1858863b JW |
3770 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
3771 | since it might be a promoted variable where the zero- or sign- extension | |
3772 | needs to be done. Handling this in the normal way is safe because no | |
3773 | computation is done before the call. */ | |
1ad87b63 | 3774 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from) |
b35cd3c1 | 3775 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
1858863b JW |
3776 | && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
3777 | && GET_CODE (DECL_RTL (to)) == REG)) | |
cd1db108 | 3778 | { |
0088fcb1 RK |
3779 | rtx value; |
3780 | ||
3781 | push_temp_slots (); | |
3782 | value = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
cd1db108 | 3783 | if (to_rtx == 0) |
37a08a29 | 3784 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
aaf87c45 | 3785 | |
fffa9c1d JW |
3786 | /* Handle calls that return values in multiple non-contiguous locations. |
3787 | The Irix 6 ABI has examples of this. */ | |
3788 | if (GET_CODE (to_rtx) == PARALLEL) | |
04050c69 | 3789 | emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from))); |
fffa9c1d | 3790 | else if (GET_MODE (to_rtx) == BLKmode) |
8ac61af7 | 3791 | emit_block_move (to_rtx, value, expr_size (from)); |
aaf87c45 | 3792 | else |
6419e5b0 DT |
3793 | { |
3794 | #ifdef POINTERS_EXTEND_UNSIGNED | |
0d4903b8 RK |
3795 | if (POINTER_TYPE_P (TREE_TYPE (to)) |
3796 | && GET_MODE (to_rtx) != GET_MODE (value)) | |
6419e5b0 DT |
3797 | value = convert_memory_address (GET_MODE (to_rtx), value); |
3798 | #endif | |
3799 | emit_move_insn (to_rtx, value); | |
3800 | } | |
cd1db108 RS |
3801 | preserve_temp_slots (to_rtx); |
3802 | free_temp_slots (); | |
0088fcb1 | 3803 | pop_temp_slots (); |
709f5be1 | 3804 | return want_value ? to_rtx : NULL_RTX; |
cd1db108 RS |
3805 | } |
3806 | ||
bbf6f052 RK |
3807 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
3808 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
3809 | ||
3810 | if (to_rtx == 0) | |
37a08a29 | 3811 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
bbf6f052 | 3812 | |
86d38d25 | 3813 | /* Don't move directly into a return register. */ |
14a774a9 RK |
3814 | if (TREE_CODE (to) == RESULT_DECL |
3815 | && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL)) | |
86d38d25 | 3816 | { |
0088fcb1 RK |
3817 | rtx temp; |
3818 | ||
3819 | push_temp_slots (); | |
3820 | temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); | |
14a774a9 RK |
3821 | |
3822 | if (GET_CODE (to_rtx) == PARALLEL) | |
04050c69 | 3823 | emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from))); |
14a774a9 RK |
3824 | else |
3825 | emit_move_insn (to_rtx, temp); | |
3826 | ||
86d38d25 RS |
3827 | preserve_temp_slots (to_rtx); |
3828 | free_temp_slots (); | |
0088fcb1 | 3829 | pop_temp_slots (); |
709f5be1 | 3830 | return want_value ? to_rtx : NULL_RTX; |
86d38d25 RS |
3831 | } |
3832 | ||
bbf6f052 RK |
3833 | /* In case we are returning the contents of an object which overlaps |
3834 | the place the value is being stored, use a safe function when copying | |
3835 | a value through a pointer into a structure value return block. */ | |
3836 | if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF | |
3837 | && current_function_returns_struct | |
3838 | && !current_function_returns_pcc_struct) | |
3839 | { | |
0088fcb1 RK |
3840 | rtx from_rtx, size; |
3841 | ||
3842 | push_temp_slots (); | |
33a20d10 | 3843 | size = expr_size (from); |
37a08a29 | 3844 | from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
3845 | |
3846 | #ifdef TARGET_MEM_FUNCTIONS | |
b215b52e | 3847 | emit_library_call (memmove_libfunc, LCT_NORMAL, |
bbf6f052 RK |
3848 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, |
3849 | XEXP (from_rtx, 0), Pmode, | |
0fa83258 RK |
3850 | convert_to_mode (TYPE_MODE (sizetype), |
3851 | size, TREE_UNSIGNED (sizetype)), | |
26ba80fc | 3852 | TYPE_MODE (sizetype)); |
bbf6f052 | 3853 | #else |
ebb1b59a | 3854 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
bbf6f052 RK |
3855 | VOIDmode, 3, XEXP (from_rtx, 0), Pmode, |
3856 | XEXP (to_rtx, 0), Pmode, | |
3b6f75e2 JW |
3857 | convert_to_mode (TYPE_MODE (integer_type_node), |
3858 | size, TREE_UNSIGNED (integer_type_node)), | |
3859 | TYPE_MODE (integer_type_node)); | |
bbf6f052 RK |
3860 | #endif |
3861 | ||
3862 | preserve_temp_slots (to_rtx); | |
3863 | free_temp_slots (); | |
0088fcb1 | 3864 | pop_temp_slots (); |
709f5be1 | 3865 | return want_value ? to_rtx : NULL_RTX; |
bbf6f052 RK |
3866 | } |
3867 | ||
3868 | /* Compute FROM and store the value in the rtx we got. */ | |
3869 | ||
0088fcb1 | 3870 | push_temp_slots (); |
bbf6f052 RK |
3871 | result = store_expr (from, to_rtx, want_value); |
3872 | preserve_temp_slots (result); | |
3873 | free_temp_slots (); | |
0088fcb1 | 3874 | pop_temp_slots (); |
709f5be1 | 3875 | return want_value ? result : NULL_RTX; |
bbf6f052 RK |
3876 | } |
3877 | ||
3878 | /* Generate code for computing expression EXP, | |
3879 | and storing the value into TARGET. | |
bbf6f052 RK |
3880 | TARGET may contain a QUEUED rtx. |
3881 | ||
709f5be1 RS |
3882 | If WANT_VALUE is nonzero, return a copy of the value |
3883 | not in TARGET, so that we can be sure to use the proper | |
3884 | value in a containing expression even if TARGET has something | |
3885 | else stored in it. If possible, we copy the value through a pseudo | |
3886 | and return that pseudo. Or, if the value is constant, we try to | |
3887 | return the constant. In some cases, we return a pseudo | |
3888 | copied *from* TARGET. | |
3889 | ||
3890 | If the mode is BLKmode then we may return TARGET itself. | |
3891 | It turns out that in BLKmode it doesn't cause a problem. | |
3892 | because C has no operators that could combine two different | |
3893 | assignments into the same BLKmode object with different values | |
3894 | with no sequence point. Will other languages need this to | |
3895 | be more thorough? | |
3896 | ||
3897 | If WANT_VALUE is 0, we return NULL, to make sure | |
3898 | to catch quickly any cases where the caller uses the value | |
3899 | and fails to set WANT_VALUE. */ | |
bbf6f052 RK |
3900 | |
3901 | rtx | |
709f5be1 | 3902 | store_expr (exp, target, want_value) |
b3694847 SS |
3903 | tree exp; |
3904 | rtx target; | |
709f5be1 | 3905 | int want_value; |
bbf6f052 | 3906 | { |
b3694847 | 3907 | rtx temp; |
bbf6f052 | 3908 | int dont_return_target = 0; |
e5408e52 | 3909 | int dont_store_target = 0; |
bbf6f052 RK |
3910 | |
3911 | if (TREE_CODE (exp) == COMPOUND_EXPR) | |
3912 | { | |
3913 | /* Perform first part of compound expression, then assign from second | |
3914 | part. */ | |
3915 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
3916 | emit_queue (); | |
709f5be1 | 3917 | return store_expr (TREE_OPERAND (exp, 1), target, want_value); |
bbf6f052 RK |
3918 | } |
3919 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
3920 | { | |
3921 | /* For conditional expression, get safe form of the target. Then | |
3922 | test the condition, doing the appropriate assignment on either | |
3923 | side. This avoids the creation of unnecessary temporaries. | |
3924 | For non-BLKmode, it is more efficient not to do this. */ | |
3925 | ||
3926 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
3927 | ||
3928 | emit_queue (); | |
3929 | target = protect_from_queue (target, 1); | |
3930 | ||
dabf8373 | 3931 | do_pending_stack_adjust (); |
bbf6f052 RK |
3932 | NO_DEFER_POP; |
3933 | jumpifnot (TREE_OPERAND (exp, 0), lab1); | |
956d6950 | 3934 | start_cleanup_deferral (); |
709f5be1 | 3935 | store_expr (TREE_OPERAND (exp, 1), target, 0); |
956d6950 | 3936 | end_cleanup_deferral (); |
bbf6f052 RK |
3937 | emit_queue (); |
3938 | emit_jump_insn (gen_jump (lab2)); | |
3939 | emit_barrier (); | |
3940 | emit_label (lab1); | |
956d6950 | 3941 | start_cleanup_deferral (); |
709f5be1 | 3942 | store_expr (TREE_OPERAND (exp, 2), target, 0); |
956d6950 | 3943 | end_cleanup_deferral (); |
bbf6f052 RK |
3944 | emit_queue (); |
3945 | emit_label (lab2); | |
3946 | OK_DEFER_POP; | |
a3a58acc | 3947 | |
709f5be1 | 3948 | return want_value ? target : NULL_RTX; |
bbf6f052 | 3949 | } |
bbf6f052 | 3950 | else if (queued_subexp_p (target)) |
709f5be1 RS |
3951 | /* If target contains a postincrement, let's not risk |
3952 | using it as the place to generate the rhs. */ | |
bbf6f052 RK |
3953 | { |
3954 | if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode) | |
3955 | { | |
3956 | /* Expand EXP into a new pseudo. */ | |
3957 | temp = gen_reg_rtx (GET_MODE (target)); | |
3958 | temp = expand_expr (exp, temp, GET_MODE (target), 0); | |
3959 | } | |
3960 | else | |
906c4e36 | 3961 | temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0); |
709f5be1 RS |
3962 | |
3963 | /* If target is volatile, ANSI requires accessing the value | |
3964 | *from* the target, if it is accessed. So make that happen. | |
3965 | In no case return the target itself. */ | |
3966 | if (! MEM_VOLATILE_P (target) && want_value) | |
3967 | dont_return_target = 1; | |
bbf6f052 | 3968 | } |
12f06d17 CH |
3969 | else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target) |
3970 | && GET_MODE (target) != BLKmode) | |
3971 | /* If target is in memory and caller wants value in a register instead, | |
3972 | arrange that. Pass TARGET as target for expand_expr so that, | |
3973 | if EXP is another assignment, WANT_VALUE will be nonzero for it. | |
3974 | We know expand_expr will not use the target in that case. | |
3975 | Don't do this if TARGET is volatile because we are supposed | |
3976 | to write it and then read it. */ | |
3977 | { | |
1da93fe0 | 3978 | temp = expand_expr (exp, target, GET_MODE (target), 0); |
12f06d17 | 3979 | if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) |
e5408e52 JJ |
3980 | { |
3981 | /* If TEMP is already in the desired TARGET, only copy it from | |
3982 | memory and don't store it there again. */ | |
3983 | if (temp == target | |
3984 | || (rtx_equal_p (temp, target) | |
3985 | && ! side_effects_p (temp) && ! side_effects_p (target))) | |
3986 | dont_store_target = 1; | |
3987 | temp = copy_to_reg (temp); | |
3988 | } | |
12f06d17 CH |
3989 | dont_return_target = 1; |
3990 | } | |
1499e0a8 RK |
3991 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
3992 | /* If this is an scalar in a register that is stored in a wider mode | |
3993 | than the declared mode, compute the result into its declared mode | |
3994 | and then convert to the wider mode. Our value is the computed | |
3995 | expression. */ | |
3996 | { | |
b76b08ef RK |
3997 | rtx inner_target = 0; |
3998 | ||
5a32d038 | 3999 | /* If we don't want a value, we can do the conversion inside EXP, |
f635a84d RK |
4000 | which will often result in some optimizations. Do the conversion |
4001 | in two steps: first change the signedness, if needed, then | |
ab6c58f1 RK |
4002 | the extend. But don't do this if the type of EXP is a subtype |
4003 | of something else since then the conversion might involve | |
4004 | more than just converting modes. */ | |
4005 | if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
4006 | && TREE_TYPE (TREE_TYPE (exp)) == 0) | |
f635a84d RK |
4007 | { |
4008 | if (TREE_UNSIGNED (TREE_TYPE (exp)) | |
4009 | != SUBREG_PROMOTED_UNSIGNED_P (target)) | |
4010 | exp | |
4011 | = convert | |
4012 | (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target), | |
4013 | TREE_TYPE (exp)), | |
4014 | exp); | |
4015 | ||
4016 | exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)), | |
4017 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
4018 | exp); | |
b76b08ef RK |
4019 | |
4020 | inner_target = SUBREG_REG (target); | |
f635a84d | 4021 | } |
3a94c984 | 4022 | |
b76b08ef | 4023 | temp = expand_expr (exp, inner_target, VOIDmode, 0); |
b258707c | 4024 | |
766f36c7 | 4025 | /* If TEMP is a volatile MEM and we want a result value, make |
f29369b9 RK |
4026 | the access now so it gets done only once. Likewise if |
4027 | it contains TARGET. */ | |
4028 | if (GET_CODE (temp) == MEM && want_value | |
4029 | && (MEM_VOLATILE_P (temp) | |
4030 | || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0)))) | |
766f36c7 RK |
4031 | temp = copy_to_reg (temp); |
4032 | ||
b258707c RS |
4033 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
4034 | sure that we properly convert it. */ | |
4035 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1f1b0541 RH |
4036 | { |
4037 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4038 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4039 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
4040 | GET_MODE (target), temp, | |
4041 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4042 | } | |
b258707c | 4043 | |
1499e0a8 RK |
4044 | convert_move (SUBREG_REG (target), temp, |
4045 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
4046 | |
4047 | /* If we promoted a constant, change the mode back down to match | |
4048 | target. Otherwise, the caller might get confused by a result whose | |
4049 | mode is larger than expected. */ | |
4050 | ||
b3ca30df | 4051 | if (want_value && GET_MODE (temp) != GET_MODE (target)) |
3dbecef9 | 4052 | { |
b3ca30df JJ |
4053 | if (GET_MODE (temp) != VOIDmode) |
4054 | { | |
4055 | temp = gen_lowpart_SUBREG (GET_MODE (target), temp); | |
4056 | SUBREG_PROMOTED_VAR_P (temp) = 1; | |
7879b81e SE |
4057 | SUBREG_PROMOTED_UNSIGNED_SET (temp, |
4058 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
b3ca30df JJ |
4059 | } |
4060 | else | |
4061 | temp = convert_modes (GET_MODE (target), | |
4062 | GET_MODE (SUBREG_REG (target)), | |
4063 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
4064 | } |
4065 | ||
709f5be1 | 4066 | return want_value ? temp : NULL_RTX; |
1499e0a8 | 4067 | } |
bbf6f052 RK |
4068 | else |
4069 | { | |
4070 | temp = expand_expr (exp, target, GET_MODE (target), 0); | |
766f36c7 | 4071 | /* Return TARGET if it's a specified hardware register. |
709f5be1 RS |
4072 | If TARGET is a volatile mem ref, either return TARGET |
4073 | or return a reg copied *from* TARGET; ANSI requires this. | |
4074 | ||
4075 | Otherwise, if TEMP is not TARGET, return TEMP | |
4076 | if it is constant (for efficiency), | |
4077 | or if we really want the correct value. */ | |
bbf6f052 RK |
4078 | if (!(target && GET_CODE (target) == REG |
4079 | && REGNO (target) < FIRST_PSEUDO_REGISTER) | |
709f5be1 | 4080 | && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
effbcc6a | 4081 | && ! rtx_equal_p (temp, target) |
709f5be1 | 4082 | && (CONSTANT_P (temp) || want_value)) |
bbf6f052 RK |
4083 | dont_return_target = 1; |
4084 | } | |
4085 | ||
b258707c RS |
4086 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
4087 | the same as that of TARGET, adjust the constant. This is needed, for | |
4088 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
4089 | value. */ | |
4090 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
c1da1f33 | 4091 | && TREE_CODE (exp) != ERROR_MARK |
b258707c RS |
4092 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
4093 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4094 | temp, TREE_UNSIGNED (TREE_TYPE (exp))); | |
4095 | ||
bbf6f052 | 4096 | /* If value was not generated in the target, store it there. |
37a08a29 RK |
4097 | Convert the value to TARGET's type first if necessary. |
4098 | If TEMP and TARGET compare equal according to rtx_equal_p, but | |
f3f2255a R |
4099 | one or both of them are volatile memory refs, we have to distinguish |
4100 | two cases: | |
4101 | - expand_expr has used TARGET. In this case, we must not generate | |
4102 | another copy. This can be detected by TARGET being equal according | |
4103 | to == . | |
4104 | - expand_expr has not used TARGET - that means that the source just | |
4105 | happens to have the same RTX form. Since temp will have been created | |
4106 | by expand_expr, it will compare unequal according to == . | |
4107 | We must generate a copy in this case, to reach the correct number | |
4108 | of volatile memory references. */ | |
bbf6f052 | 4109 | |
6036acbb | 4110 | if ((! rtx_equal_p (temp, target) |
f3f2255a R |
4111 | || (temp != target && (side_effects_p (temp) |
4112 | || side_effects_p (target)))) | |
e5408e52 JJ |
4113 | && TREE_CODE (exp) != ERROR_MARK |
4114 | && ! dont_store_target) | |
bbf6f052 RK |
4115 | { |
4116 | target = protect_from_queue (target, 1); | |
4117 | if (GET_MODE (temp) != GET_MODE (target) | |
f0348c25 | 4118 | && GET_MODE (temp) != VOIDmode) |
bbf6f052 RK |
4119 | { |
4120 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); | |
4121 | if (dont_return_target) | |
4122 | { | |
4123 | /* In this case, we will return TEMP, | |
4124 | so make sure it has the proper mode. | |
4125 | But don't forget to store the value into TARGET. */ | |
4126 | temp = convert_to_mode (GET_MODE (target), temp, unsignedp); | |
4127 | emit_move_insn (target, temp); | |
4128 | } | |
4129 | else | |
4130 | convert_move (target, temp, unsignedp); | |
4131 | } | |
4132 | ||
4133 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4134 | { | |
c24ae149 RK |
4135 | /* Handle copying a string constant into an array. The string |
4136 | constant may be shorter than the array. So copy just the string's | |
4137 | actual length, and clear the rest. First get the size of the data | |
4138 | type of the string, which is actually the size of the target. */ | |
4139 | rtx size = expr_size (exp); | |
bbf6f052 | 4140 | |
e87b4f3f RS |
4141 | if (GET_CODE (size) == CONST_INT |
4142 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) | |
8ac61af7 | 4143 | emit_block_move (target, temp, size); |
e87b4f3f | 4144 | else |
bbf6f052 | 4145 | { |
e87b4f3f RS |
4146 | /* Compute the size of the data to copy from the string. */ |
4147 | tree copy_size | |
c03b7665 | 4148 | = size_binop (MIN_EXPR, |
b50d17a1 | 4149 | make_tree (sizetype, size), |
fed3cef0 | 4150 | size_int (TREE_STRING_LENGTH (exp))); |
906c4e36 RK |
4151 | rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX, |
4152 | VOIDmode, 0); | |
e87b4f3f RS |
4153 | rtx label = 0; |
4154 | ||
4155 | /* Copy that much. */ | |
c24ae149 | 4156 | copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0); |
8ac61af7 | 4157 | emit_block_move (target, temp, copy_size_rtx); |
e87b4f3f | 4158 | |
88f63c77 RK |
4159 | /* Figure out how much is left in TARGET that we have to clear. |
4160 | Do all calculations in ptr_mode. */ | |
e87b4f3f RS |
4161 | if (GET_CODE (copy_size_rtx) == CONST_INT) |
4162 | { | |
c24ae149 RK |
4163 | size = plus_constant (size, -INTVAL (copy_size_rtx)); |
4164 | target = adjust_address (target, BLKmode, | |
4165 | INTVAL (copy_size_rtx)); | |
e87b4f3f RS |
4166 | } |
4167 | else | |
4168 | { | |
88f63c77 | 4169 | size = expand_binop (ptr_mode, sub_optab, size, |
906c4e36 RK |
4170 | copy_size_rtx, NULL_RTX, 0, |
4171 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4172 | |
c24ae149 RK |
4173 | #ifdef POINTERS_EXTEND_UNSIGNED |
4174 | if (GET_MODE (copy_size_rtx) != Pmode) | |
4175 | copy_size_rtx = convert_memory_address (Pmode, | |
4176 | copy_size_rtx); | |
4177 | #endif | |
4178 | ||
4179 | target = offset_address (target, copy_size_rtx, | |
4180 | highest_pow2_factor (copy_size)); | |
e87b4f3f | 4181 | label = gen_label_rtx (); |
c5d5d461 | 4182 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
a06ef755 | 4183 | GET_MODE (size), 0, label); |
e87b4f3f RS |
4184 | } |
4185 | ||
4186 | if (size != const0_rtx) | |
37a08a29 | 4187 | clear_storage (target, size); |
22619c3f | 4188 | |
e87b4f3f RS |
4189 | if (label) |
4190 | emit_label (label); | |
bbf6f052 RK |
4191 | } |
4192 | } | |
fffa9c1d JW |
4193 | /* Handle calls that return values in multiple non-contiguous locations. |
4194 | The Irix 6 ABI has examples of this. */ | |
4195 | else if (GET_CODE (target) == PARALLEL) | |
04050c69 | 4196 | emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp))); |
bbf6f052 | 4197 | else if (GET_MODE (temp) == BLKmode) |
8ac61af7 | 4198 | emit_block_move (target, temp, expr_size (exp)); |
bbf6f052 RK |
4199 | else |
4200 | emit_move_insn (target, temp); | |
4201 | } | |
709f5be1 | 4202 | |
766f36c7 RK |
4203 | /* If we don't want a value, return NULL_RTX. */ |
4204 | if (! want_value) | |
4205 | return NULL_RTX; | |
4206 | ||
4207 | /* If we are supposed to return TEMP, do so as long as it isn't a MEM. | |
4208 | ??? The latter test doesn't seem to make sense. */ | |
4209 | else if (dont_return_target && GET_CODE (temp) != MEM) | |
bbf6f052 | 4210 | return temp; |
766f36c7 RK |
4211 | |
4212 | /* Return TARGET itself if it is a hard register. */ | |
4213 | else if (want_value && GET_MODE (target) != BLKmode | |
4214 | && ! (GET_CODE (target) == REG | |
4215 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
709f5be1 | 4216 | return copy_to_reg (target); |
3a94c984 | 4217 | |
766f36c7 | 4218 | else |
709f5be1 | 4219 | return target; |
bbf6f052 RK |
4220 | } |
4221 | \f | |
9de08200 RK |
4222 | /* Return 1 if EXP just contains zeros. */ |
4223 | ||
4224 | static int | |
4225 | is_zeros_p (exp) | |
4226 | tree exp; | |
4227 | { | |
4228 | tree elt; | |
4229 | ||
4230 | switch (TREE_CODE (exp)) | |
4231 | { | |
4232 | case CONVERT_EXPR: | |
4233 | case NOP_EXPR: | |
4234 | case NON_LVALUE_EXPR: | |
ed239f5a | 4235 | case VIEW_CONVERT_EXPR: |
9de08200 RK |
4236 | return is_zeros_p (TREE_OPERAND (exp, 0)); |
4237 | ||
4238 | case INTEGER_CST: | |
05bccae2 | 4239 | return integer_zerop (exp); |
9de08200 RK |
4240 | |
4241 | case COMPLEX_CST: | |
4242 | return | |
4243 | is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp)); | |
4244 | ||
4245 | case REAL_CST: | |
41c9120b | 4246 | return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0); |
9de08200 | 4247 | |
69ef87e2 AH |
4248 | case VECTOR_CST: |
4249 | for (elt = TREE_VECTOR_CST_ELTS (exp); elt; | |
4250 | elt = TREE_CHAIN (elt)) | |
4251 | if (!is_zeros_p (TREE_VALUE (elt))) | |
4252 | return 0; | |
4253 | ||
4254 | return 1; | |
4255 | ||
9de08200 | 4256 | case CONSTRUCTOR: |
e1a43f73 PB |
4257 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) |
4258 | return CONSTRUCTOR_ELTS (exp) == NULL_TREE; | |
9de08200 RK |
4259 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
4260 | if (! is_zeros_p (TREE_VALUE (elt))) | |
4261 | return 0; | |
4262 | ||
4263 | return 1; | |
3a94c984 | 4264 | |
e9a25f70 JL |
4265 | default: |
4266 | return 0; | |
9de08200 | 4267 | } |
9de08200 RK |
4268 | } |
4269 | ||
4270 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
4271 | ||
4272 | static int | |
4273 | mostly_zeros_p (exp) | |
4274 | tree exp; | |
4275 | { | |
9de08200 RK |
4276 | if (TREE_CODE (exp) == CONSTRUCTOR) |
4277 | { | |
e1a43f73 PB |
4278 | int elts = 0, zeros = 0; |
4279 | tree elt = CONSTRUCTOR_ELTS (exp); | |
4280 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) | |
4281 | { | |
4282 | /* If there are no ranges of true bits, it is all zero. */ | |
4283 | return elt == NULL_TREE; | |
4284 | } | |
4285 | for (; elt; elt = TREE_CHAIN (elt)) | |
4286 | { | |
4287 | /* We do not handle the case where the index is a RANGE_EXPR, | |
4288 | so the statistic will be somewhat inaccurate. | |
4289 | We do make a more accurate count in store_constructor itself, | |
4290 | so since this function is only used for nested array elements, | |
0f41302f | 4291 | this should be close enough. */ |
e1a43f73 PB |
4292 | if (mostly_zeros_p (TREE_VALUE (elt))) |
4293 | zeros++; | |
4294 | elts++; | |
4295 | } | |
9de08200 RK |
4296 | |
4297 | return 4 * zeros >= 3 * elts; | |
4298 | } | |
4299 | ||
4300 | return is_zeros_p (exp); | |
4301 | } | |
4302 | \f | |
e1a43f73 PB |
4303 | /* Helper function for store_constructor. |
4304 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
4305 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
04050c69 | 4306 | CLEARED is as for store_constructor. |
23cb1766 | 4307 | ALIAS_SET is the alias set to use for any stores. |
23ccec44 JW |
4308 | |
4309 | This provides a recursive shortcut back to store_constructor when it isn't | |
4310 | necessary to go through store_field. This is so that we can pass through | |
4311 | the cleared field to let store_constructor know that we may not have to | |
4312 | clear a substructure if the outer structure has already been cleared. */ | |
e1a43f73 PB |
4313 | |
4314 | static void | |
04050c69 RK |
4315 | store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared, |
4316 | alias_set) | |
e1a43f73 | 4317 | rtx target; |
770ae6cc RK |
4318 | unsigned HOST_WIDE_INT bitsize; |
4319 | HOST_WIDE_INT bitpos; | |
e1a43f73 PB |
4320 | enum machine_mode mode; |
4321 | tree exp, type; | |
4322 | int cleared; | |
23cb1766 | 4323 | int alias_set; |
e1a43f73 PB |
4324 | { |
4325 | if (TREE_CODE (exp) == CONSTRUCTOR | |
23ccec44 JW |
4326 | && bitpos % BITS_PER_UNIT == 0 |
4327 | /* If we have a non-zero bitpos for a register target, then we just | |
4328 | let store_field do the bitfield handling. This is unlikely to | |
4329 | generate unnecessary clear instructions anyways. */ | |
4330 | && (bitpos == 0 || GET_CODE (target) == MEM)) | |
e1a43f73 | 4331 | { |
61cb205c RK |
4332 | if (GET_CODE (target) == MEM) |
4333 | target | |
4334 | = adjust_address (target, | |
4335 | GET_MODE (target) == BLKmode | |
4336 | || 0 != (bitpos | |
4337 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
4338 | ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | |
23cb1766 | 4339 | |
e0339ef7 | 4340 | |
04050c69 | 4341 | /* Update the alias set, if required. */ |
10b76d73 RK |
4342 | if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target) |
4343 | && MEM_ALIAS_SET (target) != 0) | |
70072ed9 RK |
4344 | { |
4345 | target = copy_rtx (target); | |
4346 | set_mem_alias_set (target, alias_set); | |
4347 | } | |
e0339ef7 | 4348 | |
04050c69 | 4349 | store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
4350 | } |
4351 | else | |
a06ef755 RK |
4352 | store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
4353 | alias_set); | |
e1a43f73 PB |
4354 | } |
4355 | ||
bbf6f052 | 4356 | /* Store the value of constructor EXP into the rtx TARGET. |
04050c69 RK |
4357 | TARGET is either a REG or a MEM; we know it cannot conflict, since |
4358 | safe_from_p has been called. | |
b7010412 RK |
4359 | CLEARED is true if TARGET is known to have been zero'd. |
4360 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
4361 | may not be the same as the size of EXP if we are assigning to a field | |
4362 | which has been packed to exclude padding bits. */ | |
bbf6f052 RK |
4363 | |
4364 | static void | |
04050c69 | 4365 | store_constructor (exp, target, cleared, size) |
bbf6f052 RK |
4366 | tree exp; |
4367 | rtx target; | |
e1a43f73 | 4368 | int cleared; |
13eb1f7f | 4369 | HOST_WIDE_INT size; |
bbf6f052 | 4370 | { |
4af3895e | 4371 | tree type = TREE_TYPE (exp); |
a5efcd63 | 4372 | #ifdef WORD_REGISTER_OPERATIONS |
13eb1f7f | 4373 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
a5efcd63 | 4374 | #endif |
4af3895e | 4375 | |
e44842fe RK |
4376 | if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE |
4377 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
bbf6f052 | 4378 | { |
b3694847 | 4379 | tree elt; |
bbf6f052 | 4380 | |
04050c69 | 4381 | /* We either clear the aggregate or indicate the value is dead. */ |
dd1db5ec RK |
4382 | if ((TREE_CODE (type) == UNION_TYPE |
4383 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
04050c69 RK |
4384 | && ! cleared |
4385 | && ! CONSTRUCTOR_ELTS (exp)) | |
4386 | /* If the constructor is empty, clear the union. */ | |
a59f8640 | 4387 | { |
04050c69 RK |
4388 | clear_storage (target, expr_size (exp)); |
4389 | cleared = 1; | |
a59f8640 | 4390 | } |
4af3895e JVA |
4391 | |
4392 | /* If we are building a static constructor into a register, | |
4393 | set the initial value as zero so we can fold the value into | |
67225c15 RK |
4394 | a constant. But if more than one register is involved, |
4395 | this probably loses. */ | |
04050c69 | 4396 | else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp) |
67225c15 | 4397 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) |
9de08200 | 4398 | { |
04050c69 | 4399 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
9de08200 RK |
4400 | cleared = 1; |
4401 | } | |
4402 | ||
4403 | /* If the constructor has fewer fields than the structure | |
4404 | or if we are initializing the structure to mostly zeros, | |
0d97bf4c | 4405 | clear the whole structure first. Don't do this if TARGET is a |
fcf1b822 RK |
4406 | register whose mode size isn't equal to SIZE since clear_storage |
4407 | can't handle this case. */ | |
04050c69 | 4408 | else if (! cleared && size > 0 |
9376fcd6 | 4409 | && ((list_length (CONSTRUCTOR_ELTS (exp)) |
c3b247b4 | 4410 | != fields_length (type)) |
fcf1b822 RK |
4411 | || mostly_zeros_p (exp)) |
4412 | && (GET_CODE (target) != REG | |
04050c69 RK |
4413 | || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) |
4414 | == size))) | |
9de08200 | 4415 | { |
04050c69 | 4416 | clear_storage (target, GEN_INT (size)); |
9de08200 RK |
4417 | cleared = 1; |
4418 | } | |
04050c69 RK |
4419 | |
4420 | if (! cleared) | |
38a448ca | 4421 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
bbf6f052 RK |
4422 | |
4423 | /* Store each element of the constructor into | |
4424 | the corresponding field of TARGET. */ | |
4425 | ||
4426 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) | |
4427 | { | |
b3694847 | 4428 | tree field = TREE_PURPOSE (elt); |
34c73909 | 4429 | tree value = TREE_VALUE (elt); |
b3694847 | 4430 | enum machine_mode mode; |
770ae6cc RK |
4431 | HOST_WIDE_INT bitsize; |
4432 | HOST_WIDE_INT bitpos = 0; | |
bbf6f052 | 4433 | int unsignedp; |
770ae6cc | 4434 | tree offset; |
b50d17a1 | 4435 | rtx to_rtx = target; |
bbf6f052 | 4436 | |
f32fd778 RS |
4437 | /* Just ignore missing fields. |
4438 | We cleared the whole structure, above, | |
4439 | if any fields are missing. */ | |
4440 | if (field == 0) | |
4441 | continue; | |
4442 | ||
8b6000fc | 4443 | if (cleared && is_zeros_p (value)) |
e1a43f73 | 4444 | continue; |
9de08200 | 4445 | |
770ae6cc RK |
4446 | if (host_integerp (DECL_SIZE (field), 1)) |
4447 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
14a774a9 RK |
4448 | else |
4449 | bitsize = -1; | |
4450 | ||
bbf6f052 RK |
4451 | unsignedp = TREE_UNSIGNED (field); |
4452 | mode = DECL_MODE (field); | |
4453 | if (DECL_BIT_FIELD (field)) | |
4454 | mode = VOIDmode; | |
4455 | ||
770ae6cc RK |
4456 | offset = DECL_FIELD_OFFSET (field); |
4457 | if (host_integerp (offset, 0) | |
4458 | && host_integerp (bit_position (field), 0)) | |
4459 | { | |
4460 | bitpos = int_bit_position (field); | |
4461 | offset = 0; | |
4462 | } | |
b50d17a1 | 4463 | else |
770ae6cc | 4464 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); |
3a94c984 | 4465 | |
b50d17a1 RK |
4466 | if (offset) |
4467 | { | |
4468 | rtx offset_rtx; | |
4469 | ||
4470 | if (contains_placeholder_p (offset)) | |
7fa96708 | 4471 | offset = build (WITH_RECORD_EXPR, sizetype, |
956d6950 | 4472 | offset, make_tree (TREE_TYPE (exp), target)); |
bbf6f052 | 4473 | |
b50d17a1 RK |
4474 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
4475 | if (GET_CODE (to_rtx) != MEM) | |
4476 | abort (); | |
4477 | ||
3a94c984 | 4478 | if (GET_MODE (offset_rtx) != ptr_mode) |
0d4903b8 RK |
4479 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
4480 | ||
bd070e1a | 4481 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 RK |
4482 | if (GET_MODE (offset_rtx) != Pmode) |
4483 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
bd070e1a | 4484 | #endif |
bd070e1a | 4485 | |
0d4903b8 RK |
4486 | to_rtx = offset_address (to_rtx, offset_rtx, |
4487 | highest_pow2_factor (offset)); | |
b50d17a1 | 4488 | } |
c5c76735 | 4489 | |
cf04eb80 RK |
4490 | if (TREE_READONLY (field)) |
4491 | { | |
9151b3bf | 4492 | if (GET_CODE (to_rtx) == MEM) |
effbcc6a RK |
4493 | to_rtx = copy_rtx (to_rtx); |
4494 | ||
cf04eb80 RK |
4495 | RTX_UNCHANGING_P (to_rtx) = 1; |
4496 | } | |
4497 | ||
34c73909 R |
4498 | #ifdef WORD_REGISTER_OPERATIONS |
4499 | /* If this initializes a field that is smaller than a word, at the | |
4500 | start of a word, try to widen it to a full word. | |
4501 | This special case allows us to output C++ member function | |
4502 | initializations in a form that the optimizers can understand. */ | |
770ae6cc | 4503 | if (GET_CODE (target) == REG |
34c73909 R |
4504 | && bitsize < BITS_PER_WORD |
4505 | && bitpos % BITS_PER_WORD == 0 | |
4506 | && GET_MODE_CLASS (mode) == MODE_INT | |
4507 | && TREE_CODE (value) == INTEGER_CST | |
13eb1f7f RK |
4508 | && exp_size >= 0 |
4509 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
34c73909 R |
4510 | { |
4511 | tree type = TREE_TYPE (value); | |
04050c69 | 4512 | |
34c73909 R |
4513 | if (TYPE_PRECISION (type) < BITS_PER_WORD) |
4514 | { | |
4515 | type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type)); | |
4516 | value = convert (type, value); | |
4517 | } | |
04050c69 | 4518 | |
34c73909 R |
4519 | if (BYTES_BIG_ENDIAN) |
4520 | value | |
4521 | = fold (build (LSHIFT_EXPR, type, value, | |
4522 | build_int_2 (BITS_PER_WORD - bitsize, 0))); | |
4523 | bitsize = BITS_PER_WORD; | |
4524 | mode = word_mode; | |
4525 | } | |
4526 | #endif | |
10b76d73 RK |
4527 | |
4528 | if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx) | |
4529 | && DECL_NONADDRESSABLE_P (field)) | |
4530 | { | |
4531 | to_rtx = copy_rtx (to_rtx); | |
4532 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
4533 | } | |
4534 | ||
c5c76735 | 4535 | store_constructor_field (to_rtx, bitsize, bitpos, mode, |
8b6000fc | 4536 | value, type, cleared, |
10b76d73 | 4537 | get_alias_set (TREE_TYPE (field))); |
bbf6f052 RK |
4538 | } |
4539 | } | |
e6834654 SS |
4540 | else if (TREE_CODE (type) == ARRAY_TYPE |
4541 | || TREE_CODE (type) == VECTOR_TYPE) | |
bbf6f052 | 4542 | { |
b3694847 SS |
4543 | tree elt; |
4544 | int i; | |
e1a43f73 | 4545 | int need_to_clear; |
4af3895e | 4546 | tree domain = TYPE_DOMAIN (type); |
4af3895e | 4547 | tree elttype = TREE_TYPE (type); |
e6834654 | 4548 | int const_bounds_p; |
ae0ed63a JM |
4549 | HOST_WIDE_INT minelt = 0; |
4550 | HOST_WIDE_INT maxelt = 0; | |
85f3d674 | 4551 | |
e6834654 SS |
4552 | /* Vectors are like arrays, but the domain is stored via an array |
4553 | type indirectly. */ | |
4554 | if (TREE_CODE (type) == VECTOR_TYPE) | |
4555 | { | |
4556 | /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses | |
4557 | the same field as TYPE_DOMAIN, we are not guaranteed that | |
4558 | it always will. */ | |
4559 | domain = TYPE_DEBUG_REPRESENTATION_TYPE (type); | |
4560 | domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain))); | |
4561 | } | |
4562 | ||
4563 | const_bounds_p = (TYPE_MIN_VALUE (domain) | |
4564 | && TYPE_MAX_VALUE (domain) | |
4565 | && host_integerp (TYPE_MIN_VALUE (domain), 0) | |
4566 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
4567 | ||
85f3d674 RK |
4568 | /* If we have constant bounds for the range of the type, get them. */ |
4569 | if (const_bounds_p) | |
4570 | { | |
4571 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
4572 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
4573 | } | |
bbf6f052 | 4574 | |
e1a43f73 | 4575 | /* If the constructor has fewer elements than the array, |
38e01259 | 4576 | clear the whole array first. Similarly if this is |
e1a43f73 PB |
4577 | static constructor of a non-BLKmode object. */ |
4578 | if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp))) | |
4579 | need_to_clear = 1; | |
4580 | else | |
4581 | { | |
4582 | HOST_WIDE_INT count = 0, zero_count = 0; | |
85f3d674 RK |
4583 | need_to_clear = ! const_bounds_p; |
4584 | ||
e1a43f73 PB |
4585 | /* This loop is a more accurate version of the loop in |
4586 | mostly_zeros_p (it handles RANGE_EXPR in an index). | |
4587 | It is also needed to check for missing elements. */ | |
4588 | for (elt = CONSTRUCTOR_ELTS (exp); | |
85f3d674 | 4589 | elt != NULL_TREE && ! need_to_clear; |
df0faff1 | 4590 | elt = TREE_CHAIN (elt)) |
e1a43f73 PB |
4591 | { |
4592 | tree index = TREE_PURPOSE (elt); | |
4593 | HOST_WIDE_INT this_node_count; | |
19caa751 | 4594 | |
e1a43f73 PB |
4595 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4596 | { | |
4597 | tree lo_index = TREE_OPERAND (index, 0); | |
4598 | tree hi_index = TREE_OPERAND (index, 1); | |
05bccae2 | 4599 | |
19caa751 RK |
4600 | if (! host_integerp (lo_index, 1) |
4601 | || ! host_integerp (hi_index, 1)) | |
e1a43f73 PB |
4602 | { |
4603 | need_to_clear = 1; | |
4604 | break; | |
4605 | } | |
19caa751 RK |
4606 | |
4607 | this_node_count = (tree_low_cst (hi_index, 1) | |
4608 | - tree_low_cst (lo_index, 1) + 1); | |
e1a43f73 PB |
4609 | } |
4610 | else | |
4611 | this_node_count = 1; | |
85f3d674 | 4612 | |
e1a43f73 PB |
4613 | count += this_node_count; |
4614 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4615 | zero_count += this_node_count; | |
4616 | } | |
85f3d674 | 4617 | |
8e958f70 | 4618 | /* Clear the entire array first if there are any missing elements, |
0f41302f | 4619 | or if the incidence of zero elements is >= 75%. */ |
85f3d674 RK |
4620 | if (! need_to_clear |
4621 | && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) | |
e1a43f73 PB |
4622 | need_to_clear = 1; |
4623 | } | |
85f3d674 | 4624 | |
9376fcd6 | 4625 | if (need_to_clear && size > 0) |
9de08200 RK |
4626 | { |
4627 | if (! cleared) | |
725e58b1 RK |
4628 | { |
4629 | if (REG_P (target)) | |
4630 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
4631 | else | |
4632 | clear_storage (target, GEN_INT (size)); | |
4633 | } | |
9de08200 RK |
4634 | cleared = 1; |
4635 | } | |
df4556a3 | 4636 | else if (REG_P (target)) |
bbf6f052 | 4637 | /* Inform later passes that the old value is dead. */ |
38a448ca | 4638 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
bbf6f052 RK |
4639 | |
4640 | /* Store each element of the constructor into | |
4641 | the corresponding element of TARGET, determined | |
4642 | by counting the elements. */ | |
4643 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
4644 | elt; | |
4645 | elt = TREE_CHAIN (elt), i++) | |
4646 | { | |
b3694847 | 4647 | enum machine_mode mode; |
19caa751 RK |
4648 | HOST_WIDE_INT bitsize; |
4649 | HOST_WIDE_INT bitpos; | |
bbf6f052 | 4650 | int unsignedp; |
e1a43f73 | 4651 | tree value = TREE_VALUE (elt); |
03dc44a6 RS |
4652 | tree index = TREE_PURPOSE (elt); |
4653 | rtx xtarget = target; | |
bbf6f052 | 4654 | |
e1a43f73 PB |
4655 | if (cleared && is_zeros_p (value)) |
4656 | continue; | |
9de08200 | 4657 | |
bbf6f052 | 4658 | unsignedp = TREE_UNSIGNED (elttype); |
14a774a9 RK |
4659 | mode = TYPE_MODE (elttype); |
4660 | if (mode == BLKmode) | |
19caa751 RK |
4661 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) |
4662 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
4663 | : -1); | |
14a774a9 RK |
4664 | else |
4665 | bitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 4666 | |
e1a43f73 PB |
4667 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4668 | { | |
4669 | tree lo_index = TREE_OPERAND (index, 0); | |
4670 | tree hi_index = TREE_OPERAND (index, 1); | |
0d4903b8 | 4671 | rtx index_r, pos_rtx, hi_r, loop_top, loop_end; |
e1a43f73 | 4672 | struct nesting *loop; |
05c0b405 PB |
4673 | HOST_WIDE_INT lo, hi, count; |
4674 | tree position; | |
e1a43f73 | 4675 | |
0f41302f | 4676 | /* If the range is constant and "small", unroll the loop. */ |
85f3d674 RK |
4677 | if (const_bounds_p |
4678 | && host_integerp (lo_index, 0) | |
19caa751 RK |
4679 | && host_integerp (hi_index, 0) |
4680 | && (lo = tree_low_cst (lo_index, 0), | |
4681 | hi = tree_low_cst (hi_index, 0), | |
05c0b405 PB |
4682 | count = hi - lo + 1, |
4683 | (GET_CODE (target) != MEM | |
4684 | || count <= 2 | |
19caa751 RK |
4685 | || (host_integerp (TYPE_SIZE (elttype), 1) |
4686 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
4687 | <= 40 * 8))))) | |
e1a43f73 | 4688 | { |
05c0b405 PB |
4689 | lo -= minelt; hi -= minelt; |
4690 | for (; lo <= hi; lo++) | |
e1a43f73 | 4691 | { |
19caa751 | 4692 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); |
10b76d73 RK |
4693 | |
4694 | if (GET_CODE (target) == MEM | |
4695 | && !MEM_KEEP_ALIAS_SET_P (target) | |
e6834654 | 4696 | && TREE_CODE (type) == ARRAY_TYPE |
10b76d73 RK |
4697 | && TYPE_NONALIASED_COMPONENT (type)) |
4698 | { | |
4699 | target = copy_rtx (target); | |
4700 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
4701 | } | |
4702 | ||
23cb1766 | 4703 | store_constructor_field |
04050c69 RK |
4704 | (target, bitsize, bitpos, mode, value, type, cleared, |
4705 | get_alias_set (elttype)); | |
e1a43f73 PB |
4706 | } |
4707 | } | |
4708 | else | |
4709 | { | |
4710 | hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0); | |
4711 | loop_top = gen_label_rtx (); | |
4712 | loop_end = gen_label_rtx (); | |
4713 | ||
4714 | unsignedp = TREE_UNSIGNED (domain); | |
4715 | ||
4716 | index = build_decl (VAR_DECL, NULL_TREE, domain); | |
4717 | ||
19e7881c | 4718 | index_r |
e1a43f73 PB |
4719 | = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), |
4720 | &unsignedp, 0)); | |
19e7881c | 4721 | SET_DECL_RTL (index, index_r); |
e1a43f73 PB |
4722 | if (TREE_CODE (value) == SAVE_EXPR |
4723 | && SAVE_EXPR_RTL (value) == 0) | |
4724 | { | |
0f41302f MS |
4725 | /* Make sure value gets expanded once before the |
4726 | loop. */ | |
e1a43f73 PB |
4727 | expand_expr (value, const0_rtx, VOIDmode, 0); |
4728 | emit_queue (); | |
4729 | } | |
4730 | store_expr (lo_index, index_r, 0); | |
4731 | loop = expand_start_loop (0); | |
4732 | ||
0f41302f | 4733 | /* Assign value to element index. */ |
fed3cef0 RK |
4734 | position |
4735 | = convert (ssizetype, | |
4736 | fold (build (MINUS_EXPR, TREE_TYPE (index), | |
4737 | index, TYPE_MIN_VALUE (domain)))); | |
4738 | position = size_binop (MULT_EXPR, position, | |
4739 | convert (ssizetype, | |
4740 | TYPE_SIZE_UNIT (elttype))); | |
4741 | ||
e1a43f73 | 4742 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); |
0d4903b8 RK |
4743 | xtarget = offset_address (target, pos_rtx, |
4744 | highest_pow2_factor (position)); | |
4745 | xtarget = adjust_address (xtarget, mode, 0); | |
e1a43f73 | 4746 | if (TREE_CODE (value) == CONSTRUCTOR) |
04050c69 | 4747 | store_constructor (value, xtarget, cleared, |
b7010412 | 4748 | bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
4749 | else |
4750 | store_expr (value, xtarget, 0); | |
4751 | ||
4752 | expand_exit_loop_if_false (loop, | |
4753 | build (LT_EXPR, integer_type_node, | |
4754 | index, hi_index)); | |
4755 | ||
4756 | expand_increment (build (PREINCREMENT_EXPR, | |
4757 | TREE_TYPE (index), | |
7b8b9722 | 4758 | index, integer_one_node), 0, 0); |
e1a43f73 PB |
4759 | expand_end_loop (); |
4760 | emit_label (loop_end); | |
e1a43f73 PB |
4761 | } |
4762 | } | |
19caa751 RK |
4763 | else if ((index != 0 && ! host_integerp (index, 0)) |
4764 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
03dc44a6 | 4765 | { |
03dc44a6 RS |
4766 | tree position; |
4767 | ||
5b6c44ff | 4768 | if (index == 0) |
fed3cef0 | 4769 | index = ssize_int (1); |
5b6c44ff | 4770 | |
e1a43f73 | 4771 | if (minelt) |
fed3cef0 RK |
4772 | index = convert (ssizetype, |
4773 | fold (build (MINUS_EXPR, index, | |
4774 | TYPE_MIN_VALUE (domain)))); | |
19caa751 | 4775 | |
fed3cef0 RK |
4776 | position = size_binop (MULT_EXPR, index, |
4777 | convert (ssizetype, | |
4778 | TYPE_SIZE_UNIT (elttype))); | |
0d4903b8 RK |
4779 | xtarget = offset_address (target, |
4780 | expand_expr (position, 0, VOIDmode, 0), | |
4781 | highest_pow2_factor (position)); | |
4782 | xtarget = adjust_address (xtarget, mode, 0); | |
e1a43f73 | 4783 | store_expr (value, xtarget, 0); |
03dc44a6 RS |
4784 | } |
4785 | else | |
4786 | { | |
4787 | if (index != 0) | |
19caa751 RK |
4788 | bitpos = ((tree_low_cst (index, 0) - minelt) |
4789 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
03dc44a6 | 4790 | else |
19caa751 RK |
4791 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); |
4792 | ||
10b76d73 | 4793 | if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target) |
e6834654 | 4794 | && TREE_CODE (type) == ARRAY_TYPE |
10b76d73 RK |
4795 | && TYPE_NONALIASED_COMPONENT (type)) |
4796 | { | |
4797 | target = copy_rtx (target); | |
4798 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
4799 | } | |
4800 | ||
c5c76735 | 4801 | store_constructor_field (target, bitsize, bitpos, mode, value, |
04050c69 | 4802 | type, cleared, get_alias_set (elttype)); |
23cb1766 | 4803 | |
03dc44a6 | 4804 | } |
bbf6f052 RK |
4805 | } |
4806 | } | |
19caa751 | 4807 | |
3a94c984 | 4808 | /* Set constructor assignments. */ |
071a6595 PB |
4809 | else if (TREE_CODE (type) == SET_TYPE) |
4810 | { | |
e1a43f73 | 4811 | tree elt = CONSTRUCTOR_ELTS (exp); |
19caa751 | 4812 | unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; |
071a6595 PB |
4813 | tree domain = TYPE_DOMAIN (type); |
4814 | tree domain_min, domain_max, bitlength; | |
4815 | ||
9faa82d8 | 4816 | /* The default implementation strategy is to extract the constant |
071a6595 PB |
4817 | parts of the constructor, use that to initialize the target, |
4818 | and then "or" in whatever non-constant ranges we need in addition. | |
4819 | ||
4820 | If a large set is all zero or all ones, it is | |
4821 | probably better to set it using memset (if available) or bzero. | |
4822 | Also, if a large set has just a single range, it may also be | |
4823 | better to first clear all the first clear the set (using | |
0f41302f | 4824 | bzero/memset), and set the bits we want. */ |
3a94c984 | 4825 | |
0f41302f | 4826 | /* Check for all zeros. */ |
9376fcd6 | 4827 | if (elt == NULL_TREE && size > 0) |
071a6595 | 4828 | { |
e1a43f73 | 4829 | if (!cleared) |
8ac61af7 | 4830 | clear_storage (target, GEN_INT (size)); |
071a6595 PB |
4831 | return; |
4832 | } | |
4833 | ||
071a6595 PB |
4834 | domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); |
4835 | domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); | |
4836 | bitlength = size_binop (PLUS_EXPR, | |
fed3cef0 RK |
4837 | size_diffop (domain_max, domain_min), |
4838 | ssize_int (1)); | |
071a6595 | 4839 | |
19caa751 | 4840 | nbits = tree_low_cst (bitlength, 1); |
e1a43f73 PB |
4841 | |
4842 | /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that | |
4843 | are "complicated" (more than one range), initialize (the | |
3a94c984 | 4844 | constant parts) by copying from a constant. */ |
e1a43f73 PB |
4845 | if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD |
4846 | || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) | |
071a6595 | 4847 | { |
19caa751 | 4848 | unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); |
b4ee5a72 | 4849 | enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); |
0f41302f | 4850 | char *bit_buffer = (char *) alloca (nbits); |
b4ee5a72 | 4851 | HOST_WIDE_INT word = 0; |
19caa751 RK |
4852 | unsigned int bit_pos = 0; |
4853 | unsigned int ibit = 0; | |
4854 | unsigned int offset = 0; /* In bytes from beginning of set. */ | |
4855 | ||
e1a43f73 | 4856 | elt = get_set_constructor_bits (exp, bit_buffer, nbits); |
b4ee5a72 | 4857 | for (;;) |
071a6595 | 4858 | { |
b4ee5a72 PB |
4859 | if (bit_buffer[ibit]) |
4860 | { | |
b09f3348 | 4861 | if (BYTES_BIG_ENDIAN) |
b4ee5a72 PB |
4862 | word |= (1 << (set_word_size - 1 - bit_pos)); |
4863 | else | |
4864 | word |= 1 << bit_pos; | |
4865 | } | |
19caa751 | 4866 | |
b4ee5a72 PB |
4867 | bit_pos++; ibit++; |
4868 | if (bit_pos >= set_word_size || ibit == nbits) | |
071a6595 | 4869 | { |
e1a43f73 PB |
4870 | if (word != 0 || ! cleared) |
4871 | { | |
4872 | rtx datum = GEN_INT (word); | |
4873 | rtx to_rtx; | |
19caa751 | 4874 | |
0f41302f MS |
4875 | /* The assumption here is that it is safe to use |
4876 | XEXP if the set is multi-word, but not if | |
4877 | it's single-word. */ | |
e1a43f73 | 4878 | if (GET_CODE (target) == MEM) |
f4ef873c | 4879 | to_rtx = adjust_address (target, mode, offset); |
3a94c984 | 4880 | else if (offset == 0) |
e1a43f73 PB |
4881 | to_rtx = target; |
4882 | else | |
4883 | abort (); | |
4884 | emit_move_insn (to_rtx, datum); | |
4885 | } | |
19caa751 | 4886 | |
b4ee5a72 PB |
4887 | if (ibit == nbits) |
4888 | break; | |
4889 | word = 0; | |
4890 | bit_pos = 0; | |
4891 | offset += set_word_size / BITS_PER_UNIT; | |
071a6595 PB |
4892 | } |
4893 | } | |
071a6595 | 4894 | } |
e1a43f73 | 4895 | else if (!cleared) |
19caa751 RK |
4896 | /* Don't bother clearing storage if the set is all ones. */ |
4897 | if (TREE_CHAIN (elt) != NULL_TREE | |
4898 | || (TREE_PURPOSE (elt) == NULL_TREE | |
4899 | ? nbits != 1 | |
4900 | : ( ! host_integerp (TREE_VALUE (elt), 0) | |
4901 | || ! host_integerp (TREE_PURPOSE (elt), 0) | |
4902 | || (tree_low_cst (TREE_VALUE (elt), 0) | |
4903 | - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 | |
4904 | != (HOST_WIDE_INT) nbits)))) | |
8ac61af7 | 4905 | clear_storage (target, expr_size (exp)); |
3a94c984 | 4906 | |
e1a43f73 | 4907 | for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) |
071a6595 | 4908 | { |
3a94c984 | 4909 | /* Start of range of element or NULL. */ |
071a6595 | 4910 | tree startbit = TREE_PURPOSE (elt); |
3a94c984 | 4911 | /* End of range of element, or element value. */ |
071a6595 | 4912 | tree endbit = TREE_VALUE (elt); |
381127e8 | 4913 | #ifdef TARGET_MEM_FUNCTIONS |
071a6595 | 4914 | HOST_WIDE_INT startb, endb; |
381127e8 | 4915 | #endif |
19caa751 | 4916 | rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; |
071a6595 PB |
4917 | |
4918 | bitlength_rtx = expand_expr (bitlength, | |
19caa751 | 4919 | NULL_RTX, MEM, EXPAND_CONST_ADDRESS); |
071a6595 | 4920 | |
3a94c984 | 4921 | /* Handle non-range tuple element like [ expr ]. */ |
071a6595 PB |
4922 | if (startbit == NULL_TREE) |
4923 | { | |
4924 | startbit = save_expr (endbit); | |
4925 | endbit = startbit; | |
4926 | } | |
19caa751 | 4927 | |
071a6595 PB |
4928 | startbit = convert (sizetype, startbit); |
4929 | endbit = convert (sizetype, endbit); | |
4930 | if (! integer_zerop (domain_min)) | |
4931 | { | |
4932 | startbit = size_binop (MINUS_EXPR, startbit, domain_min); | |
4933 | endbit = size_binop (MINUS_EXPR, endbit, domain_min); | |
4934 | } | |
3a94c984 | 4935 | startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, |
071a6595 | 4936 | EXPAND_CONST_ADDRESS); |
3a94c984 | 4937 | endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, |
071a6595 PB |
4938 | EXPAND_CONST_ADDRESS); |
4939 | ||
4940 | if (REG_P (target)) | |
4941 | { | |
1da68f56 RK |
4942 | targetx |
4943 | = assign_temp | |
4944 | ((build_qualified_type (type_for_mode (GET_MODE (target), 0), | |
4945 | TYPE_QUAL_CONST)), | |
4946 | 0, 1, 1); | |
071a6595 PB |
4947 | emit_move_insn (targetx, target); |
4948 | } | |
19caa751 | 4949 | |
071a6595 PB |
4950 | else if (GET_CODE (target) == MEM) |
4951 | targetx = target; | |
4952 | else | |
4953 | abort (); | |
4954 | ||
4955 | #ifdef TARGET_MEM_FUNCTIONS | |
4956 | /* Optimization: If startbit and endbit are | |
9faa82d8 | 4957 | constants divisible by BITS_PER_UNIT, |
0f41302f | 4958 | call memset instead. */ |
071a6595 PB |
4959 | if (TREE_CODE (startbit) == INTEGER_CST |
4960 | && TREE_CODE (endbit) == INTEGER_CST | |
4961 | && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 | |
e1a43f73 | 4962 | && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) |
071a6595 | 4963 | { |
ebb1b59a | 4964 | emit_library_call (memset_libfunc, LCT_NORMAL, |
071a6595 | 4965 | VOIDmode, 3, |
e1a43f73 PB |
4966 | plus_constant (XEXP (targetx, 0), |
4967 | startb / BITS_PER_UNIT), | |
071a6595 | 4968 | Pmode, |
3b6f75e2 | 4969 | constm1_rtx, TYPE_MODE (integer_type_node), |
071a6595 | 4970 | GEN_INT ((endb - startb) / BITS_PER_UNIT), |
3b6f75e2 | 4971 | TYPE_MODE (sizetype)); |
071a6595 PB |
4972 | } |
4973 | else | |
4974 | #endif | |
19caa751 | 4975 | emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), |
ebb1b59a BS |
4976 | LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0), |
4977 | Pmode, bitlength_rtx, TYPE_MODE (sizetype), | |
19caa751 RK |
4978 | startbit_rtx, TYPE_MODE (sizetype), |
4979 | endbit_rtx, TYPE_MODE (sizetype)); | |
4980 | ||
071a6595 PB |
4981 | if (REG_P (target)) |
4982 | emit_move_insn (target, targetx); | |
4983 | } | |
4984 | } | |
bbf6f052 RK |
4985 | |
4986 | else | |
4987 | abort (); | |
4988 | } | |
4989 | ||
4990 | /* Store the value of EXP (an expression tree) | |
4991 | into a subfield of TARGET which has mode MODE and occupies | |
4992 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
4993 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
4994 | ||
4995 | If VALUE_MODE is VOIDmode, return nothing in particular. | |
4996 | UNSIGNEDP is not used in this case. | |
4997 | ||
4998 | Otherwise, return an rtx for the value stored. This rtx | |
4999 | has mode VALUE_MODE if that is convenient to do. | |
5000 | In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. | |
5001 | ||
a06ef755 | 5002 | TYPE is the type of the underlying object, |
ece32014 MM |
5003 | |
5004 | ALIAS_SET is the alias set for the destination. This value will | |
5005 | (in general) be different from that for TARGET, since TARGET is a | |
5006 | reference to the containing structure. */ | |
bbf6f052 RK |
5007 | |
5008 | static rtx | |
a06ef755 RK |
5009 | store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type, |
5010 | alias_set) | |
bbf6f052 | 5011 | rtx target; |
770ae6cc RK |
5012 | HOST_WIDE_INT bitsize; |
5013 | HOST_WIDE_INT bitpos; | |
bbf6f052 RK |
5014 | enum machine_mode mode; |
5015 | tree exp; | |
5016 | enum machine_mode value_mode; | |
5017 | int unsignedp; | |
a06ef755 | 5018 | tree type; |
ece32014 | 5019 | int alias_set; |
bbf6f052 | 5020 | { |
906c4e36 | 5021 | HOST_WIDE_INT width_mask = 0; |
bbf6f052 | 5022 | |
e9a25f70 JL |
5023 | if (TREE_CODE (exp) == ERROR_MARK) |
5024 | return const0_rtx; | |
5025 | ||
2be6a7e9 RK |
5026 | /* If we have nothing to store, do nothing unless the expression has |
5027 | side-effects. */ | |
5028 | if (bitsize == 0) | |
5029 | return expand_expr (exp, const0_rtx, VOIDmode, 0); | |
a06ef755 | 5030 | else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT) |
906c4e36 | 5031 | width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; |
bbf6f052 RK |
5032 | |
5033 | /* If we are storing into an unaligned field of an aligned union that is | |
5034 | in a register, we may have the mode of TARGET being an integer mode but | |
5035 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5036 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5037 | the store if the field being stored is the entire width of TARGET). Then | |
5038 | call ourselves recursively to store the field into a BLKmode version of | |
5039 | that object. Finally, load from the object into TARGET. This is not | |
5040 | very efficient in general, but should only be slightly more expensive | |
5041 | than the otherwise-required unaligned accesses. Perhaps this can be | |
5042 | cleaned up later. */ | |
5043 | ||
5044 | if (mode == BLKmode | |
5045 | && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG)) | |
5046 | { | |
1da68f56 RK |
5047 | rtx object |
5048 | = assign_temp | |
a06ef755 | 5049 | (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST), |
1da68f56 | 5050 | 0, 1, 1); |
c4e59f51 | 5051 | rtx blk_object = adjust_address (object, BLKmode, 0); |
bbf6f052 | 5052 | |
8752c357 | 5053 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
bbf6f052 RK |
5054 | emit_move_insn (object, target); |
5055 | ||
a06ef755 RK |
5056 | store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
5057 | alias_set); | |
bbf6f052 RK |
5058 | |
5059 | emit_move_insn (target, object); | |
5060 | ||
a06ef755 | 5061 | /* We want to return the BLKmode version of the data. */ |
46093b97 | 5062 | return blk_object; |
bbf6f052 | 5063 | } |
c3b247b4 JM |
5064 | |
5065 | if (GET_CODE (target) == CONCAT) | |
5066 | { | |
5067 | /* We're storing into a struct containing a single __complex. */ | |
5068 | ||
5069 | if (bitpos != 0) | |
5070 | abort (); | |
5071 | return store_expr (exp, target, 0); | |
5072 | } | |
bbf6f052 RK |
5073 | |
5074 | /* If the structure is in a register or if the component | |
5075 | is a bit field, we cannot use addressing to access it. | |
5076 | Use bit-field techniques or SUBREG to store in it. */ | |
5077 | ||
4fa52007 | 5078 | if (mode == VOIDmode |
6ab06cbb JW |
5079 | || (mode != BLKmode && ! direct_store[(int) mode] |
5080 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5081 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
4fa52007 | 5082 | || GET_CODE (target) == REG |
c980ac49 | 5083 | || GET_CODE (target) == SUBREG |
ccc98036 RS |
5084 | /* If the field isn't aligned enough to store as an ordinary memref, |
5085 | store it as a bit field. */ | |
04050c69 RK |
5086 | || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) |
5087 | && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode) | |
14a774a9 | 5088 | || bitpos % GET_MODE_ALIGNMENT (mode))) |
14a774a9 RK |
5089 | /* If the RHS and field are a constant size and the size of the |
5090 | RHS isn't the same size as the bitfield, we must use bitfield | |
5091 | operations. */ | |
05bccae2 RK |
5092 | || (bitsize >= 0 |
5093 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
5094 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) | |
bbf6f052 | 5095 | { |
906c4e36 | 5096 | rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
bbd6cf73 | 5097 | |
ef19912d RK |
5098 | /* If BITSIZE is narrower than the size of the type of EXP |
5099 | we will be narrowing TEMP. Normally, what's wanted are the | |
5100 | low-order bits. However, if EXP's type is a record and this is | |
5101 | big-endian machine, we want the upper BITSIZE bits. */ | |
5102 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
65a07688 | 5103 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) |
ef19912d RK |
5104 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) |
5105 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5106 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5107 | - bitsize), | |
5108 | temp, 1); | |
5109 | ||
bbd6cf73 RK |
5110 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5111 | MODE. */ | |
5112 | if (mode != VOIDmode && mode != BLKmode | |
5113 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5114 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5115 | ||
a281e72d RK |
5116 | /* If the modes of TARGET and TEMP are both BLKmode, both |
5117 | must be in memory and BITPOS must be aligned on a byte | |
5118 | boundary. If so, we simply do a block copy. */ | |
5119 | if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) | |
5120 | { | |
5121 | if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM | |
5122 | || bitpos % BITS_PER_UNIT != 0) | |
5123 | abort (); | |
5124 | ||
f4ef873c | 5125 | target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); |
a281e72d | 5126 | emit_block_move (target, temp, |
a06ef755 RK |
5127 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
5128 | / BITS_PER_UNIT)); | |
a281e72d RK |
5129 | |
5130 | return value_mode == VOIDmode ? const0_rtx : target; | |
5131 | } | |
5132 | ||
bbf6f052 | 5133 | /* Store the value in the bitfield. */ |
a06ef755 RK |
5134 | store_bit_field (target, bitsize, bitpos, mode, temp, |
5135 | int_size_in_bytes (type)); | |
5136 | ||
bbf6f052 RK |
5137 | if (value_mode != VOIDmode) |
5138 | { | |
04050c69 RK |
5139 | /* The caller wants an rtx for the value. |
5140 | If possible, avoid refetching from the bitfield itself. */ | |
bbf6f052 RK |
5141 | if (width_mask != 0 |
5142 | && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))) | |
5c4d7cfb | 5143 | { |
9074de27 | 5144 | tree count; |
5c4d7cfb | 5145 | enum machine_mode tmode; |
86a2c12a | 5146 | |
5c4d7cfb | 5147 | tmode = GET_MODE (temp); |
86a2c12a RS |
5148 | if (tmode == VOIDmode) |
5149 | tmode = value_mode; | |
22273300 JJ |
5150 | |
5151 | if (unsignedp) | |
5152 | return expand_and (tmode, temp, | |
2496c7bd | 5153 | gen_int_mode (width_mask, tmode), |
22273300 JJ |
5154 | NULL_RTX); |
5155 | ||
5c4d7cfb RS |
5156 | count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0); |
5157 | temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5158 | return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5159 | } | |
04050c69 | 5160 | |
bbf6f052 | 5161 | return extract_bit_field (target, bitsize, bitpos, unsignedp, |
04050c69 | 5162 | NULL_RTX, value_mode, VOIDmode, |
a06ef755 | 5163 | int_size_in_bytes (type)); |
bbf6f052 RK |
5164 | } |
5165 | return const0_rtx; | |
5166 | } | |
5167 | else | |
5168 | { | |
5169 | rtx addr = XEXP (target, 0); | |
a06ef755 | 5170 | rtx to_rtx = target; |
bbf6f052 RK |
5171 | |
5172 | /* If a value is wanted, it must be the lhs; | |
5173 | so make the address stable for multiple use. */ | |
5174 | ||
5175 | if (value_mode != VOIDmode && GET_CODE (addr) != REG | |
5176 | && ! CONSTANT_ADDRESS_P (addr) | |
5177 | /* A frame-pointer reference is already stable. */ | |
5178 | && ! (GET_CODE (addr) == PLUS | |
5179 | && GET_CODE (XEXP (addr, 1)) == CONST_INT | |
5180 | && (XEXP (addr, 0) == virtual_incoming_args_rtx | |
5181 | || XEXP (addr, 0) == virtual_stack_vars_rtx))) | |
a06ef755 | 5182 | to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr)); |
bbf6f052 RK |
5183 | |
5184 | /* Now build a reference to just the desired component. */ | |
5185 | ||
a06ef755 RK |
5186 | to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); |
5187 | ||
5188 | if (to_rtx == target) | |
5189 | to_rtx = copy_rtx (to_rtx); | |
792760b9 | 5190 | |
c6df88cb | 5191 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
10b76d73 | 5192 | if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
a06ef755 | 5193 | set_mem_alias_set (to_rtx, alias_set); |
bbf6f052 RK |
5194 | |
5195 | return store_expr (exp, to_rtx, value_mode != VOIDmode); | |
5196 | } | |
5197 | } | |
5198 | \f | |
5199 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
b4e3fabb RK |
5200 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
5201 | codes and find the ultimate containing object, which we return. | |
bbf6f052 RK |
5202 | |
5203 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
5204 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
7bb0943f RS |
5205 | If the position of the field is variable, we store a tree |
5206 | giving the variable offset (in units) in *POFFSET. | |
5207 | This offset is in addition to the bit position. | |
5208 | If the position is not variable, we store 0 in *POFFSET. | |
bbf6f052 RK |
5209 | |
5210 | If any of the extraction expressions is volatile, | |
5211 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
5212 | ||
5213 | If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it | |
5214 | is a mode that can be used to access the field. In that case, *PBITSIZE | |
e7c33f54 RK |
5215 | is redundant. |
5216 | ||
5217 | If the field describes a variable-sized object, *PMODE is set to | |
5218 | VOIDmode and *PBITSIZE is set to -1. An access cannot be made in | |
6d2f8887 | 5219 | this case, but the address of the object can be found. */ |
bbf6f052 RK |
5220 | |
5221 | tree | |
4969d05d | 5222 | get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, |
a06ef755 | 5223 | punsignedp, pvolatilep) |
bbf6f052 | 5224 | tree exp; |
770ae6cc RK |
5225 | HOST_WIDE_INT *pbitsize; |
5226 | HOST_WIDE_INT *pbitpos; | |
7bb0943f | 5227 | tree *poffset; |
bbf6f052 RK |
5228 | enum machine_mode *pmode; |
5229 | int *punsignedp; | |
5230 | int *pvolatilep; | |
5231 | { | |
5232 | tree size_tree = 0; | |
5233 | enum machine_mode mode = VOIDmode; | |
fed3cef0 | 5234 | tree offset = size_zero_node; |
770ae6cc | 5235 | tree bit_offset = bitsize_zero_node; |
738cc472 | 5236 | tree placeholder_ptr = 0; |
770ae6cc | 5237 | tree tem; |
bbf6f052 | 5238 | |
770ae6cc RK |
5239 | /* First get the mode, signedness, and size. We do this from just the |
5240 | outermost expression. */ | |
bbf6f052 RK |
5241 | if (TREE_CODE (exp) == COMPONENT_REF) |
5242 | { | |
5243 | size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | |
5244 | if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) | |
5245 | mode = DECL_MODE (TREE_OPERAND (exp, 1)); | |
770ae6cc | 5246 | |
bbf6f052 RK |
5247 | *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1)); |
5248 | } | |
5249 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
5250 | { | |
5251 | size_tree = TREE_OPERAND (exp, 1); | |
5252 | *punsignedp = TREE_UNSIGNED (exp); | |
5253 | } | |
5254 | else | |
5255 | { | |
5256 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
770ae6cc RK |
5257 | *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); |
5258 | ||
ab87f8c8 JL |
5259 | if (mode == BLKmode) |
5260 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
770ae6cc RK |
5261 | else |
5262 | *pbitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 5263 | } |
3a94c984 | 5264 | |
770ae6cc | 5265 | if (size_tree != 0) |
bbf6f052 | 5266 | { |
770ae6cc | 5267 | if (! host_integerp (size_tree, 1)) |
e7c33f54 RK |
5268 | mode = BLKmode, *pbitsize = -1; |
5269 | else | |
770ae6cc | 5270 | *pbitsize = tree_low_cst (size_tree, 1); |
bbf6f052 RK |
5271 | } |
5272 | ||
5273 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
5274 | and find the ultimate containing object. */ | |
bbf6f052 RK |
5275 | while (1) |
5276 | { | |
770ae6cc RK |
5277 | if (TREE_CODE (exp) == BIT_FIELD_REF) |
5278 | bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); | |
5279 | else if (TREE_CODE (exp) == COMPONENT_REF) | |
bbf6f052 | 5280 | { |
770ae6cc RK |
5281 | tree field = TREE_OPERAND (exp, 1); |
5282 | tree this_offset = DECL_FIELD_OFFSET (field); | |
bbf6f052 | 5283 | |
e7f3c83f RK |
5284 | /* If this field hasn't been filled in yet, don't go |
5285 | past it. This should only happen when folding expressions | |
5286 | made during type construction. */ | |
770ae6cc | 5287 | if (this_offset == 0) |
e7f3c83f | 5288 | break; |
770ae6cc RK |
5289 | else if (! TREE_CONSTANT (this_offset) |
5290 | && contains_placeholder_p (this_offset)) | |
5291 | this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp); | |
e7f3c83f | 5292 | |
7156dead | 5293 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
770ae6cc RK |
5294 | bit_offset = size_binop (PLUS_EXPR, bit_offset, |
5295 | DECL_FIELD_BIT_OFFSET (field)); | |
e6d8c385 | 5296 | |
a06ef755 | 5297 | /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
bbf6f052 | 5298 | } |
7156dead | 5299 | |
b4e3fabb RK |
5300 | else if (TREE_CODE (exp) == ARRAY_REF |
5301 | || TREE_CODE (exp) == ARRAY_RANGE_REF) | |
bbf6f052 | 5302 | { |
742920c7 | 5303 | tree index = TREE_OPERAND (exp, 1); |
b4e3fabb RK |
5304 | tree array = TREE_OPERAND (exp, 0); |
5305 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
770ae6cc | 5306 | tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0); |
b4e3fabb | 5307 | tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array))); |
742920c7 | 5308 | |
770ae6cc RK |
5309 | /* We assume all arrays have sizes that are a multiple of a byte. |
5310 | First subtract the lower bound, if any, in the type of the | |
5311 | index, then convert to sizetype and multiply by the size of the | |
5312 | array element. */ | |
5313 | if (low_bound != 0 && ! integer_zerop (low_bound)) | |
5314 | index = fold (build (MINUS_EXPR, TREE_TYPE (index), | |
5315 | index, low_bound)); | |
f8dac6eb | 5316 | |
7156dead RK |
5317 | /* If the index has a self-referential type, pass it to a |
5318 | WITH_RECORD_EXPR; if the component size is, pass our | |
5319 | component to one. */ | |
770ae6cc RK |
5320 | if (! TREE_CONSTANT (index) |
5321 | && contains_placeholder_p (index)) | |
5322 | index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp); | |
7156dead RK |
5323 | if (! TREE_CONSTANT (unit_size) |
5324 | && contains_placeholder_p (unit_size)) | |
b4e3fabb | 5325 | unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array); |
742920c7 | 5326 | |
770ae6cc RK |
5327 | offset = size_binop (PLUS_EXPR, offset, |
5328 | size_binop (MULT_EXPR, | |
5329 | convert (sizetype, index), | |
7156dead | 5330 | unit_size)); |
bbf6f052 | 5331 | } |
7156dead | 5332 | |
738cc472 RK |
5333 | else if (TREE_CODE (exp) == PLACEHOLDER_EXPR) |
5334 | { | |
70072ed9 RK |
5335 | tree new = find_placeholder (exp, &placeholder_ptr); |
5336 | ||
5337 | /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR. | |
5338 | We might have been called from tree optimization where we | |
5339 | haven't set up an object yet. */ | |
5340 | if (new == 0) | |
5341 | break; | |
5342 | else | |
5343 | exp = new; | |
5344 | ||
738cc472 RK |
5345 | continue; |
5346 | } | |
bbf6f052 | 5347 | else if (TREE_CODE (exp) != NON_LVALUE_EXPR |
ed239f5a | 5348 | && TREE_CODE (exp) != VIEW_CONVERT_EXPR |
bbf6f052 RK |
5349 | && ! ((TREE_CODE (exp) == NOP_EXPR |
5350 | || TREE_CODE (exp) == CONVERT_EXPR) | |
5351 | && (TYPE_MODE (TREE_TYPE (exp)) | |
5352 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) | |
5353 | break; | |
7bb0943f RS |
5354 | |
5355 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
5356 | if (TREE_THIS_VOLATILE (exp)) | |
5357 | *pvolatilep = 1; | |
839c4796 | 5358 | |
bbf6f052 RK |
5359 | exp = TREE_OPERAND (exp, 0); |
5360 | } | |
5361 | ||
770ae6cc RK |
5362 | /* If OFFSET is constant, see if we can return the whole thing as a |
5363 | constant bit position. Otherwise, split it up. */ | |
5364 | if (host_integerp (offset, 0) | |
5365 | && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), | |
5366 | bitsize_unit_node)) | |
5367 | && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) | |
5368 | && host_integerp (tem, 0)) | |
5369 | *pbitpos = tree_low_cst (tem, 0), *poffset = 0; | |
5370 | else | |
5371 | *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; | |
b50d17a1 | 5372 | |
bbf6f052 | 5373 | *pmode = mode; |
bbf6f052 RK |
5374 | return exp; |
5375 | } | |
921b3427 | 5376 | |
ed239f5a RK |
5377 | /* Return 1 if T is an expression that get_inner_reference handles. */ |
5378 | ||
5379 | int | |
5380 | handled_component_p (t) | |
5381 | tree t; | |
5382 | { | |
5383 | switch (TREE_CODE (t)) | |
5384 | { | |
5385 | case BIT_FIELD_REF: | |
5386 | case COMPONENT_REF: | |
5387 | case ARRAY_REF: | |
5388 | case ARRAY_RANGE_REF: | |
5389 | case NON_LVALUE_EXPR: | |
5390 | case VIEW_CONVERT_EXPR: | |
5391 | return 1; | |
5392 | ||
5393 | case NOP_EXPR: | |
5394 | case CONVERT_EXPR: | |
5395 | return (TYPE_MODE (TREE_TYPE (t)) | |
5396 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0)))); | |
5397 | ||
5398 | default: | |
5399 | return 0; | |
5400 | } | |
5401 | } | |
bbf6f052 | 5402 | \f |
3fe44edd RK |
5403 | /* Given an rtx VALUE that may contain additions and multiplications, return |
5404 | an equivalent value that just refers to a register, memory, or constant. | |
5405 | This is done by generating instructions to perform the arithmetic and | |
5406 | returning a pseudo-register containing the value. | |
c45a13a6 RK |
5407 | |
5408 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
bbf6f052 RK |
5409 | |
5410 | rtx | |
5411 | force_operand (value, target) | |
5412 | rtx value, target; | |
5413 | { | |
b3694847 | 5414 | optab binoptab = 0; |
bbf6f052 RK |
5415 | /* Use a temporary to force order of execution of calls to |
5416 | `force_operand'. */ | |
5417 | rtx tmp; | |
b3694847 | 5418 | rtx op2; |
bbf6f052 | 5419 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
b3694847 | 5420 | rtx subtarget = get_subtarget (target); |
bbf6f052 | 5421 | |
8b015896 | 5422 | /* Check for a PIC address load. */ |
12beba6f | 5423 | if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS) |
8b015896 RH |
5424 | && XEXP (value, 0) == pic_offset_table_rtx |
5425 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
5426 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
5427 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
5428 | { | |
5429 | if (!subtarget) | |
5430 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
5431 | emit_move_insn (subtarget, value); | |
5432 | return subtarget; | |
5433 | } | |
5434 | ||
bbf6f052 RK |
5435 | if (GET_CODE (value) == PLUS) |
5436 | binoptab = add_optab; | |
5437 | else if (GET_CODE (value) == MINUS) | |
5438 | binoptab = sub_optab; | |
5439 | else if (GET_CODE (value) == MULT) | |
5440 | { | |
5441 | op2 = XEXP (value, 1); | |
5442 | if (!CONSTANT_P (op2) | |
5443 | && !(GET_CODE (op2) == REG && op2 != subtarget)) | |
5444 | subtarget = 0; | |
5445 | tmp = force_operand (XEXP (value, 0), subtarget); | |
5446 | return expand_mult (GET_MODE (value), tmp, | |
906c4e36 | 5447 | force_operand (op2, NULL_RTX), |
91ce572a | 5448 | target, 1); |
bbf6f052 RK |
5449 | } |
5450 | ||
5451 | if (binoptab) | |
5452 | { | |
5453 | op2 = XEXP (value, 1); | |
5454 | if (!CONSTANT_P (op2) | |
5455 | && !(GET_CODE (op2) == REG && op2 != subtarget)) | |
5456 | subtarget = 0; | |
5457 | if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT) | |
5458 | { | |
5459 | binoptab = add_optab; | |
5460 | op2 = negate_rtx (GET_MODE (value), op2); | |
5461 | } | |
5462 | ||
5463 | /* Check for an addition with OP2 a constant integer and our first | |
5464 | operand a PLUS of a virtual register and something else. In that | |
5465 | case, we want to emit the sum of the virtual register and the | |
5466 | constant first and then add the other value. This allows virtual | |
5467 | register instantiation to simply modify the constant rather than | |
5468 | creating another one around this addition. */ | |
5469 | if (binoptab == add_optab && GET_CODE (op2) == CONST_INT | |
5470 | && GET_CODE (XEXP (value, 0)) == PLUS | |
5471 | && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG | |
5472 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER | |
5473 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
5474 | { | |
5475 | rtx temp = expand_binop (GET_MODE (value), binoptab, | |
5476 | XEXP (XEXP (value, 0), 0), op2, | |
5477 | subtarget, 0, OPTAB_LIB_WIDEN); | |
5478 | return expand_binop (GET_MODE (value), binoptab, temp, | |
5479 | force_operand (XEXP (XEXP (value, 0), 1), 0), | |
5480 | target, 0, OPTAB_LIB_WIDEN); | |
5481 | } | |
3a94c984 | 5482 | |
bbf6f052 RK |
5483 | tmp = force_operand (XEXP (value, 0), subtarget); |
5484 | return expand_binop (GET_MODE (value), binoptab, tmp, | |
906c4e36 | 5485 | force_operand (op2, NULL_RTX), |
bbf6f052 | 5486 | target, 0, OPTAB_LIB_WIDEN); |
8008b228 | 5487 | /* We give UNSIGNEDP = 0 to expand_binop |
bbf6f052 RK |
5488 | because the only operations we are expanding here are signed ones. */ |
5489 | } | |
34e81b5a RK |
5490 | |
5491 | #ifdef INSN_SCHEDULING | |
5492 | /* On machines that have insn scheduling, we want all memory reference to be | |
5493 | explicit, so we need to deal with such paradoxical SUBREGs. */ | |
5494 | if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM | |
5495 | && (GET_MODE_SIZE (GET_MODE (value)) | |
5496 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) | |
5497 | value | |
5498 | = simplify_gen_subreg (GET_MODE (value), | |
5499 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5500 | force_operand (SUBREG_REG (value), | |
5501 | NULL_RTX)), | |
5502 | GET_MODE (SUBREG_REG (value)), | |
5503 | SUBREG_BYTE (value)); | |
5504 | #endif | |
5505 | ||
bbf6f052 RK |
5506 | return value; |
5507 | } | |
5508 | \f | |
bbf6f052 | 5509 | /* Subroutine of expand_expr: return nonzero iff there is no way that |
e5e809f4 JL |
5510 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
5511 | call is going to be used to determine whether we need a temporary | |
ff439b5f CB |
5512 | for EXP, as opposed to a recursive call to this function. |
5513 | ||
5514 | It is always safe for this routine to return zero since it merely | |
5515 | searches for optimization opportunities. */ | |
bbf6f052 | 5516 | |
8f17b5c5 | 5517 | int |
e5e809f4 | 5518 | safe_from_p (x, exp, top_p) |
bbf6f052 RK |
5519 | rtx x; |
5520 | tree exp; | |
e5e809f4 | 5521 | int top_p; |
bbf6f052 RK |
5522 | { |
5523 | rtx exp_rtl = 0; | |
5524 | int i, nops; | |
1da68f56 | 5525 | static tree save_expr_list; |
bbf6f052 | 5526 | |
6676e72f RK |
5527 | if (x == 0 |
5528 | /* If EXP has varying size, we MUST use a target since we currently | |
8f6562d0 PB |
5529 | have no way of allocating temporaries of variable size |
5530 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
5531 | So we assume here that something at a higher level has prevented a | |
f4510f37 | 5532 | clash. This is somewhat bogus, but the best we can do. Only |
e5e809f4 | 5533 | do this when X is BLKmode and when we are at the top level. */ |
d0f062fb | 5534 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f4510f37 | 5535 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
8f6562d0 PB |
5536 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
5537 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
5538 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
5539 | != INTEGER_CST) | |
1da68f56 RK |
5540 | && GET_MODE (x) == BLKmode) |
5541 | /* If X is in the outgoing argument area, it is always safe. */ | |
5542 | || (GET_CODE (x) == MEM | |
5543 | && (XEXP (x, 0) == virtual_outgoing_args_rtx | |
5544 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
5545 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
bbf6f052 RK |
5546 | return 1; |
5547 | ||
5548 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
5549 | find the underlying pseudo. */ | |
5550 | if (GET_CODE (x) == SUBREG) | |
5551 | { | |
5552 | x = SUBREG_REG (x); | |
5553 | if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
5554 | return 0; | |
5555 | } | |
5556 | ||
1da68f56 RK |
5557 | /* A SAVE_EXPR might appear many times in the expression passed to the |
5558 | top-level safe_from_p call, and if it has a complex subexpression, | |
5559 | examining it multiple times could result in a combinatorial explosion. | |
5560 | E.g. on an Alpha running at least 200MHz, a Fortran test case compiled | |
5561 | with optimization took about 28 minutes to compile -- even though it was | |
5562 | only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE | |
5563 | and turn that off when we are done. We keep a list of the SAVE_EXPRs | |
5564 | we have processed. Note that the only test of top_p was above. */ | |
5565 | ||
5566 | if (top_p) | |
5567 | { | |
5568 | int rtn; | |
5569 | tree t; | |
5570 | ||
5571 | save_expr_list = 0; | |
5572 | ||
5573 | rtn = safe_from_p (x, exp, 0); | |
5574 | ||
5575 | for (t = save_expr_list; t != 0; t = TREE_CHAIN (t)) | |
5576 | TREE_PRIVATE (TREE_PURPOSE (t)) = 0; | |
5577 | ||
5578 | return rtn; | |
5579 | } | |
bbf6f052 | 5580 | |
1da68f56 | 5581 | /* Now look at our tree code and possibly recurse. */ |
bbf6f052 RK |
5582 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
5583 | { | |
5584 | case 'd': | |
19e7881c | 5585 | exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX; |
bbf6f052 RK |
5586 | break; |
5587 | ||
5588 | case 'c': | |
5589 | return 1; | |
5590 | ||
5591 | case 'x': | |
5592 | if (TREE_CODE (exp) == TREE_LIST) | |
f32fd778 | 5593 | return ((TREE_VALUE (exp) == 0 |
e5e809f4 | 5594 | || safe_from_p (x, TREE_VALUE (exp), 0)) |
bbf6f052 | 5595 | && (TREE_CHAIN (exp) == 0 |
e5e809f4 | 5596 | || safe_from_p (x, TREE_CHAIN (exp), 0))); |
ff439b5f CB |
5597 | else if (TREE_CODE (exp) == ERROR_MARK) |
5598 | return 1; /* An already-visited SAVE_EXPR? */ | |
bbf6f052 RK |
5599 | else |
5600 | return 0; | |
5601 | ||
5602 | case '1': | |
e5e809f4 | 5603 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 RK |
5604 | |
5605 | case '2': | |
5606 | case '<': | |
e5e809f4 JL |
5607 | return (safe_from_p (x, TREE_OPERAND (exp, 0), 0) |
5608 | && safe_from_p (x, TREE_OPERAND (exp, 1), 0)); | |
bbf6f052 RK |
5609 | |
5610 | case 'e': | |
5611 | case 'r': | |
5612 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in | |
5613 | the expression. If it is set, we conflict iff we are that rtx or | |
5614 | both are in memory. Otherwise, we check all operands of the | |
5615 | expression recursively. */ | |
5616 | ||
5617 | switch (TREE_CODE (exp)) | |
5618 | { | |
5619 | case ADDR_EXPR: | |
70072ed9 RK |
5620 | /* If the operand is static or we are static, we can't conflict. |
5621 | Likewise if we don't conflict with the operand at all. */ | |
5622 | if (staticp (TREE_OPERAND (exp, 0)) | |
5623 | || TREE_STATIC (exp) | |
5624 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
5625 | return 1; | |
5626 | ||
5627 | /* Otherwise, the only way this can conflict is if we are taking | |
5628 | the address of a DECL a that address if part of X, which is | |
5629 | very rare. */ | |
5630 | exp = TREE_OPERAND (exp, 0); | |
5631 | if (DECL_P (exp)) | |
5632 | { | |
5633 | if (!DECL_RTL_SET_P (exp) | |
5634 | || GET_CODE (DECL_RTL (exp)) != MEM) | |
5635 | return 0; | |
5636 | else | |
5637 | exp_rtl = XEXP (DECL_RTL (exp), 0); | |
5638 | } | |
5639 | break; | |
bbf6f052 RK |
5640 | |
5641 | case INDIRECT_REF: | |
1da68f56 RK |
5642 | if (GET_CODE (x) == MEM |
5643 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), | |
5644 | get_alias_set (exp))) | |
bbf6f052 RK |
5645 | return 0; |
5646 | break; | |
5647 | ||
5648 | case CALL_EXPR: | |
f9808f81 MM |
5649 | /* Assume that the call will clobber all hard registers and |
5650 | all of memory. */ | |
5651 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
5652 | || GET_CODE (x) == MEM) | |
5653 | return 0; | |
bbf6f052 RK |
5654 | break; |
5655 | ||
5656 | case RTL_EXPR: | |
3bb5826a RK |
5657 | /* If a sequence exists, we would have to scan every instruction |
5658 | in the sequence to see if it was safe. This is probably not | |
5659 | worthwhile. */ | |
5660 | if (RTL_EXPR_SEQUENCE (exp)) | |
bbf6f052 RK |
5661 | return 0; |
5662 | ||
3bb5826a | 5663 | exp_rtl = RTL_EXPR_RTL (exp); |
bbf6f052 RK |
5664 | break; |
5665 | ||
5666 | case WITH_CLEANUP_EXPR: | |
6ad7895a | 5667 | exp_rtl = WITH_CLEANUP_EXPR_RTL (exp); |
bbf6f052 RK |
5668 | break; |
5669 | ||
5dab5552 | 5670 | case CLEANUP_POINT_EXPR: |
e5e809f4 | 5671 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
5dab5552 | 5672 | |
bbf6f052 RK |
5673 | case SAVE_EXPR: |
5674 | exp_rtl = SAVE_EXPR_RTL (exp); | |
ff439b5f CB |
5675 | if (exp_rtl) |
5676 | break; | |
5677 | ||
1da68f56 RK |
5678 | /* If we've already scanned this, don't do it again. Otherwise, |
5679 | show we've scanned it and record for clearing the flag if we're | |
5680 | going on. */ | |
5681 | if (TREE_PRIVATE (exp)) | |
5682 | return 1; | |
ff439b5f | 5683 | |
1da68f56 RK |
5684 | TREE_PRIVATE (exp) = 1; |
5685 | if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
ff59bfe6 | 5686 | { |
1da68f56 RK |
5687 | TREE_PRIVATE (exp) = 0; |
5688 | return 0; | |
ff59bfe6 | 5689 | } |
1da68f56 RK |
5690 | |
5691 | save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list); | |
ff439b5f | 5692 | return 1; |
bbf6f052 | 5693 | |
8129842c RS |
5694 | case BIND_EXPR: |
5695 | /* The only operand we look at is operand 1. The rest aren't | |
5696 | part of the expression. */ | |
e5e809f4 | 5697 | return safe_from_p (x, TREE_OPERAND (exp, 1), 0); |
8129842c | 5698 | |
bbf6f052 | 5699 | case METHOD_CALL_EXPR: |
4fe9b91c | 5700 | /* This takes an rtx argument, but shouldn't appear here. */ |
bbf6f052 | 5701 | abort (); |
3a94c984 | 5702 | |
e9a25f70 JL |
5703 | default: |
5704 | break; | |
bbf6f052 RK |
5705 | } |
5706 | ||
5707 | /* If we have an rtx, we do not need to scan our operands. */ | |
5708 | if (exp_rtl) | |
5709 | break; | |
5710 | ||
8f17b5c5 | 5711 | nops = first_rtl_op (TREE_CODE (exp)); |
bbf6f052 RK |
5712 | for (i = 0; i < nops; i++) |
5713 | if (TREE_OPERAND (exp, i) != 0 | |
e5e809f4 | 5714 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
bbf6f052 | 5715 | return 0; |
8f17b5c5 MM |
5716 | |
5717 | /* If this is a language-specific tree code, it may require | |
5718 | special handling. */ | |
dbbbbf3b JDA |
5719 | if ((unsigned int) TREE_CODE (exp) |
5720 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE | |
ac79cd5a | 5721 | && !(*lang_hooks.safe_from_p) (x, exp)) |
8f17b5c5 | 5722 | return 0; |
bbf6f052 RK |
5723 | } |
5724 | ||
5725 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
5726 | with it. */ | |
5727 | if (exp_rtl) | |
5728 | { | |
5729 | if (GET_CODE (exp_rtl) == SUBREG) | |
5730 | { | |
5731 | exp_rtl = SUBREG_REG (exp_rtl); | |
5732 | if (GET_CODE (exp_rtl) == REG | |
5733 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) | |
5734 | return 0; | |
5735 | } | |
5736 | ||
5737 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
1da68f56 | 5738 | are memory and they conflict. */ |
bbf6f052 RK |
5739 | return ! (rtx_equal_p (x, exp_rtl) |
5740 | || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM | |
21117a17 | 5741 | && true_dependence (exp_rtl, VOIDmode, x, |
1da68f56 | 5742 | rtx_addr_varies_p))); |
bbf6f052 RK |
5743 | } |
5744 | ||
5745 | /* If we reach here, it is safe. */ | |
5746 | return 1; | |
5747 | } | |
5748 | ||
01c8a7c8 RK |
5749 | /* Subroutine of expand_expr: return rtx if EXP is a |
5750 | variable or parameter; else return 0. */ | |
5751 | ||
5752 | static rtx | |
5753 | var_rtx (exp) | |
5754 | tree exp; | |
5755 | { | |
5756 | STRIP_NOPS (exp); | |
5757 | switch (TREE_CODE (exp)) | |
5758 | { | |
5759 | case PARM_DECL: | |
5760 | case VAR_DECL: | |
5761 | return DECL_RTL (exp); | |
5762 | default: | |
5763 | return 0; | |
5764 | } | |
5765 | } | |
dbecbbe4 JL |
5766 | |
5767 | #ifdef MAX_INTEGER_COMPUTATION_MODE | |
400500c4 | 5768 | |
dbecbbe4 JL |
5769 | void |
5770 | check_max_integer_computation_mode (exp) | |
3a94c984 | 5771 | tree exp; |
dbecbbe4 | 5772 | { |
5f652c07 | 5773 | enum tree_code code; |
dbecbbe4 JL |
5774 | enum machine_mode mode; |
5775 | ||
5f652c07 JM |
5776 | /* Strip any NOPs that don't change the mode. */ |
5777 | STRIP_NOPS (exp); | |
5778 | code = TREE_CODE (exp); | |
5779 | ||
71bca506 JL |
5780 | /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */ |
5781 | if (code == NOP_EXPR | |
5782 | && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
5783 | return; | |
5784 | ||
dbecbbe4 JL |
5785 | /* First check the type of the overall operation. We need only look at |
5786 | unary, binary and relational operations. */ | |
5787 | if (TREE_CODE_CLASS (code) == '1' | |
5788 | || TREE_CODE_CLASS (code) == '2' | |
5789 | || TREE_CODE_CLASS (code) == '<') | |
5790 | { | |
5791 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
5792 | if (GET_MODE_CLASS (mode) == MODE_INT | |
5793 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 5794 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
5795 | } |
5796 | ||
5797 | /* Check operand of a unary op. */ | |
5798 | if (TREE_CODE_CLASS (code) == '1') | |
5799 | { | |
5800 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5801 | if (GET_MODE_CLASS (mode) == MODE_INT | |
5802 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 5803 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 | 5804 | } |
3a94c984 | 5805 | |
dbecbbe4 JL |
5806 | /* Check operands of a binary/comparison op. */ |
5807 | if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<') | |
5808 | { | |
5809 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
5810 | if (GET_MODE_CLASS (mode) == MODE_INT | |
5811 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 5812 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
5813 | |
5814 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))); | |
5815 | if (GET_MODE_CLASS (mode) == MODE_INT | |
5816 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 5817 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
5818 | } |
5819 | } | |
5820 | #endif | |
14a774a9 | 5821 | \f |
0d4903b8 RK |
5822 | /* Return the highest power of two that EXP is known to be a multiple of. |
5823 | This is used in updating alignment of MEMs in array references. */ | |
5824 | ||
5825 | static HOST_WIDE_INT | |
5826 | highest_pow2_factor (exp) | |
5827 | tree exp; | |
5828 | { | |
5829 | HOST_WIDE_INT c0, c1; | |
5830 | ||
5831 | switch (TREE_CODE (exp)) | |
5832 | { | |
5833 | case INTEGER_CST: | |
e0f1be5c JJ |
5834 | /* We can find the lowest bit that's a one. If the low |
5835 | HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. | |
5836 | We need to handle this case since we can find it in a COND_EXPR, | |
5837 | a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an | |
5838 | erroneous program, so return BIGGEST_ALIGNMENT to avoid any | |
3a531a8b | 5839 | later ICE. */ |
e0f1be5c | 5840 | if (TREE_CONSTANT_OVERFLOW (exp)) |
1ed1b4fb | 5841 | return BIGGEST_ALIGNMENT; |
e0f1be5c | 5842 | else |
0d4903b8 | 5843 | { |
e0f1be5c JJ |
5844 | /* Note: tree_low_cst is intentionally not used here, |
5845 | we don't care about the upper bits. */ | |
5846 | c0 = TREE_INT_CST_LOW (exp); | |
5847 | c0 &= -c0; | |
5848 | return c0 ? c0 : BIGGEST_ALIGNMENT; | |
0d4903b8 RK |
5849 | } |
5850 | break; | |
5851 | ||
65a07688 | 5852 | case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: |
0d4903b8 RK |
5853 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); |
5854 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5855 | return MIN (c0, c1); | |
5856 | ||
5857 | case MULT_EXPR: | |
5858 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
5859 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5860 | return c0 * c1; | |
5861 | ||
5862 | case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: | |
5863 | case CEIL_DIV_EXPR: | |
65a07688 RK |
5864 | if (integer_pow2p (TREE_OPERAND (exp, 1)) |
5865 | && host_integerp (TREE_OPERAND (exp, 1), 1)) | |
5866 | { | |
5867 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
5868 | c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); | |
5869 | return MAX (1, c0 / c1); | |
5870 | } | |
5871 | break; | |
0d4903b8 RK |
5872 | |
5873 | case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: | |
65a07688 | 5874 | case SAVE_EXPR: case WITH_RECORD_EXPR: |
0d4903b8 RK |
5875 | return highest_pow2_factor (TREE_OPERAND (exp, 0)); |
5876 | ||
65a07688 RK |
5877 | case COMPOUND_EXPR: |
5878 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5879 | ||
0d4903b8 RK |
5880 | case COND_EXPR: |
5881 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
5882 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); | |
5883 | return MIN (c0, c1); | |
5884 | ||
5885 | default: | |
5886 | break; | |
5887 | } | |
5888 | ||
5889 | return 1; | |
5890 | } | |
5891 | \f | |
f47e9b4e RK |
5892 | /* Return an object on the placeholder list that matches EXP, a |
5893 | PLACEHOLDER_EXPR. An object "matches" if it is of the type of the | |
738cc472 | 5894 | PLACEHOLDER_EXPR or a pointer type to it. For further information, see |
70072ed9 RK |
5895 | tree.def. If no such object is found, return 0. If PLIST is nonzero, it |
5896 | is a location which initially points to a starting location in the | |
738cc472 RK |
5897 | placeholder list (zero means start of the list) and where a pointer into |
5898 | the placeholder list at which the object is found is placed. */ | |
f47e9b4e RK |
5899 | |
5900 | tree | |
5901 | find_placeholder (exp, plist) | |
5902 | tree exp; | |
5903 | tree *plist; | |
5904 | { | |
5905 | tree type = TREE_TYPE (exp); | |
5906 | tree placeholder_expr; | |
5907 | ||
738cc472 RK |
5908 | for (placeholder_expr |
5909 | = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list; | |
5910 | placeholder_expr != 0; | |
f47e9b4e RK |
5911 | placeholder_expr = TREE_CHAIN (placeholder_expr)) |
5912 | { | |
5913 | tree need_type = TYPE_MAIN_VARIANT (type); | |
5914 | tree elt; | |
5915 | ||
5916 | /* Find the outermost reference that is of the type we want. If none, | |
5917 | see if any object has a type that is a pointer to the type we | |
5918 | want. */ | |
5919 | for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; | |
5920 | elt = ((TREE_CODE (elt) == COMPOUND_EXPR | |
5921 | || TREE_CODE (elt) == COND_EXPR) | |
5922 | ? TREE_OPERAND (elt, 1) | |
5923 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
5924 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
5925 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
5926 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
5927 | ? TREE_OPERAND (elt, 0) : 0)) | |
5928 | if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) | |
5929 | { | |
5930 | if (plist) | |
5931 | *plist = placeholder_expr; | |
5932 | return elt; | |
5933 | } | |
5934 | ||
5935 | for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; | |
5936 | elt | |
5937 | = ((TREE_CODE (elt) == COMPOUND_EXPR | |
5938 | || TREE_CODE (elt) == COND_EXPR) | |
5939 | ? TREE_OPERAND (elt, 1) | |
5940 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
5941 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
5942 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
5943 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
5944 | ? TREE_OPERAND (elt, 0) : 0)) | |
5945 | if (POINTER_TYPE_P (TREE_TYPE (elt)) | |
5946 | && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) | |
5947 | == need_type)) | |
5948 | { | |
5949 | if (plist) | |
5950 | *plist = placeholder_expr; | |
5951 | return build1 (INDIRECT_REF, need_type, elt); | |
5952 | } | |
5953 | } | |
5954 | ||
70072ed9 | 5955 | return 0; |
f47e9b4e RK |
5956 | } |
5957 | \f | |
bbf6f052 RK |
5958 | /* expand_expr: generate code for computing expression EXP. |
5959 | An rtx for the computed value is returned. The value is never null. | |
5960 | In the case of a void EXP, const0_rtx is returned. | |
5961 | ||
5962 | The value may be stored in TARGET if TARGET is nonzero. | |
5963 | TARGET is just a suggestion; callers must assume that | |
5964 | the rtx returned may not be the same as TARGET. | |
5965 | ||
5966 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
5967 | ||
5968 | If TMODE is not VOIDmode, it suggests generating the | |
5969 | result in mode TMODE. But this is done only when convenient. | |
5970 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
5971 | TMODE is just a suggestion; callers must assume that | |
5972 | the rtx returned may not have mode TMODE. | |
5973 | ||
d6a5ac33 RK |
5974 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
5975 | probably will not be used. | |
bbf6f052 RK |
5976 | |
5977 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
5978 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
5979 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
5980 | products as above, or REG or MEM, or constant. | |
5981 | Ordinarily in such cases we would output mul or add instructions | |
5982 | and then return a pseudo reg containing the sum. | |
5983 | ||
5984 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
5985 | it also marks a label as absolutely required (it can't be dead). | |
26fcb35a | 5986 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d6a5ac33 RK |
5987 | This is used for outputting expressions used in initializers. |
5988 | ||
5989 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
5990 | with a constant address even if that address is not normally legitimate. | |
5991 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */ | |
bbf6f052 RK |
5992 | |
5993 | rtx | |
5994 | expand_expr (exp, target, tmode, modifier) | |
b3694847 | 5995 | tree exp; |
bbf6f052 RK |
5996 | rtx target; |
5997 | enum machine_mode tmode; | |
5998 | enum expand_modifier modifier; | |
5999 | { | |
b3694847 | 6000 | rtx op0, op1, temp; |
bbf6f052 RK |
6001 | tree type = TREE_TYPE (exp); |
6002 | int unsignedp = TREE_UNSIGNED (type); | |
b3694847 SS |
6003 | enum machine_mode mode; |
6004 | enum tree_code code = TREE_CODE (exp); | |
bbf6f052 | 6005 | optab this_optab; |
68557e14 ML |
6006 | rtx subtarget, original_target; |
6007 | int ignore; | |
bbf6f052 RK |
6008 | tree context; |
6009 | ||
3a94c984 | 6010 | /* Handle ERROR_MARK before anybody tries to access its type. */ |
85f3d674 | 6011 | if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK) |
68557e14 ML |
6012 | { |
6013 | op0 = CONST0_RTX (tmode); | |
6014 | if (op0 != 0) | |
6015 | return op0; | |
6016 | return const0_rtx; | |
6017 | } | |
6018 | ||
6019 | mode = TYPE_MODE (type); | |
6020 | /* Use subtarget as the target for operand 0 of a binary operation. */ | |
296b4ed9 | 6021 | subtarget = get_subtarget (target); |
68557e14 ML |
6022 | original_target = target; |
6023 | ignore = (target == const0_rtx | |
6024 | || ((code == NON_LVALUE_EXPR || code == NOP_EXPR | |
6025 | || code == CONVERT_EXPR || code == REFERENCE_EXPR | |
ac79cd5a | 6026 | || code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
68557e14 ML |
6027 | && TREE_CODE (type) == VOID_TYPE)); |
6028 | ||
dd27116b RK |
6029 | /* If we are going to ignore this result, we need only do something |
6030 | if there is a side-effect somewhere in the expression. If there | |
b50d17a1 RK |
6031 | is, short-circuit the most common cases here. Note that we must |
6032 | not call expand_expr with anything but const0_rtx in case this | |
6033 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
bbf6f052 | 6034 | |
dd27116b RK |
6035 | if (ignore) |
6036 | { | |
6037 | if (! TREE_SIDE_EFFECTS (exp)) | |
6038 | return const0_rtx; | |
6039 | ||
14a774a9 RK |
6040 | /* Ensure we reference a volatile object even if value is ignored, but |
6041 | don't do this if all we are doing is taking its address. */ | |
dd27116b RK |
6042 | if (TREE_THIS_VOLATILE (exp) |
6043 | && TREE_CODE (exp) != FUNCTION_DECL | |
14a774a9 RK |
6044 | && mode != VOIDmode && mode != BLKmode |
6045 | && modifier != EXPAND_CONST_ADDRESS) | |
dd27116b | 6046 | { |
37a08a29 | 6047 | temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); |
dd27116b RK |
6048 | if (GET_CODE (temp) == MEM) |
6049 | temp = copy_to_reg (temp); | |
6050 | return const0_rtx; | |
6051 | } | |
6052 | ||
14a774a9 RK |
6053 | if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF |
6054 | || code == INDIRECT_REF || code == BUFFER_REF) | |
37a08a29 RK |
6055 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6056 | modifier); | |
6057 | ||
14a774a9 | 6058 | else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' |
b4e3fabb | 6059 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) |
dd27116b | 6060 | { |
37a08a29 RK |
6061 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6062 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
dd27116b RK |
6063 | return const0_rtx; |
6064 | } | |
6065 | else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) | |
6066 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
6067 | /* If the second operand has no side effects, just evaluate | |
0f41302f | 6068 | the first. */ |
37a08a29 RK |
6069 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6070 | modifier); | |
14a774a9 RK |
6071 | else if (code == BIT_FIELD_REF) |
6072 | { | |
37a08a29 RK |
6073 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6074 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
6075 | expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); | |
14a774a9 RK |
6076 | return const0_rtx; |
6077 | } | |
37a08a29 | 6078 | |
90764a87 | 6079 | target = 0; |
dd27116b | 6080 | } |
bbf6f052 | 6081 | |
dbecbbe4 | 6082 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
5f652c07 JM |
6083 | /* Only check stuff here if the mode we want is different from the mode |
6084 | of the expression; if it's the same, check_max_integer_computiation_mode | |
6085 | will handle it. Do we really need to check this stuff at all? */ | |
6086 | ||
ce3c0b53 | 6087 | if (target |
5f652c07 | 6088 | && GET_MODE (target) != mode |
ce3c0b53 JL |
6089 | && TREE_CODE (exp) != INTEGER_CST |
6090 | && TREE_CODE (exp) != PARM_DECL | |
ee06cc21 | 6091 | && TREE_CODE (exp) != ARRAY_REF |
b4e3fabb | 6092 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
ee06cc21 JL |
6093 | && TREE_CODE (exp) != COMPONENT_REF |
6094 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6095 | && TREE_CODE (exp) != INDIRECT_REF | |
6bcd94ae | 6096 | && TREE_CODE (exp) != CALL_EXPR |
6ab46dff GRK |
6097 | && TREE_CODE (exp) != VAR_DECL |
6098 | && TREE_CODE (exp) != RTL_EXPR) | |
dbecbbe4 JL |
6099 | { |
6100 | enum machine_mode mode = GET_MODE (target); | |
6101 | ||
6102 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6103 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 6104 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6105 | } |
6106 | ||
5f652c07 JM |
6107 | if (tmode != mode |
6108 | && TREE_CODE (exp) != INTEGER_CST | |
ce3c0b53 | 6109 | && TREE_CODE (exp) != PARM_DECL |
ee06cc21 | 6110 | && TREE_CODE (exp) != ARRAY_REF |
b4e3fabb | 6111 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
ee06cc21 JL |
6112 | && TREE_CODE (exp) != COMPONENT_REF |
6113 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6114 | && TREE_CODE (exp) != INDIRECT_REF | |
ce3c0b53 | 6115 | && TREE_CODE (exp) != VAR_DECL |
6bcd94ae | 6116 | && TREE_CODE (exp) != CALL_EXPR |
6ab46dff | 6117 | && TREE_CODE (exp) != RTL_EXPR |
71bca506 | 6118 | && GET_MODE_CLASS (tmode) == MODE_INT |
dbecbbe4 | 6119 | && tmode > MAX_INTEGER_COMPUTATION_MODE) |
400500c4 | 6120 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6121 | |
6122 | check_max_integer_computation_mode (exp); | |
6123 | #endif | |
6124 | ||
e44842fe RK |
6125 | /* If will do cse, generate all results into pseudo registers |
6126 | since 1) that allows cse to find more things | |
6127 | and 2) otherwise cse could produce an insn the machine | |
c24ae149 RK |
6128 | cannot support. And exception is a CONSTRUCTOR into a multi-word |
6129 | MEM: that's much more likely to be most efficient into the MEM. */ | |
e44842fe | 6130 | |
bbf6f052 | 6131 | if (! cse_not_expected && mode != BLKmode && target |
c24ae149 RK |
6132 | && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER) |
6133 | && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)) | |
bbf6f052 RK |
6134 | target = subtarget; |
6135 | ||
bbf6f052 RK |
6136 | switch (code) |
6137 | { | |
6138 | case LABEL_DECL: | |
b552441b RS |
6139 | { |
6140 | tree function = decl_function_context (exp); | |
6141 | /* Handle using a label in a containing function. */ | |
d0977240 RK |
6142 | if (function != current_function_decl |
6143 | && function != inline_function_decl && function != 0) | |
b552441b RS |
6144 | { |
6145 | struct function *p = find_function_data (function); | |
49ad7cfa BS |
6146 | p->expr->x_forced_labels |
6147 | = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp), | |
6148 | p->expr->x_forced_labels); | |
b552441b | 6149 | } |
ab87f8c8 JL |
6150 | else |
6151 | { | |
ab87f8c8 JL |
6152 | if (modifier == EXPAND_INITIALIZER) |
6153 | forced_labels = gen_rtx_EXPR_LIST (VOIDmode, | |
6154 | label_rtx (exp), | |
6155 | forced_labels); | |
6156 | } | |
c5c76735 | 6157 | |
38a448ca RH |
6158 | temp = gen_rtx_MEM (FUNCTION_MODE, |
6159 | gen_rtx_LABEL_REF (Pmode, label_rtx (exp))); | |
d0977240 RK |
6160 | if (function != current_function_decl |
6161 | && function != inline_function_decl && function != 0) | |
26fcb35a RS |
6162 | LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1; |
6163 | return temp; | |
b552441b | 6164 | } |
bbf6f052 RK |
6165 | |
6166 | case PARM_DECL: | |
6167 | if (DECL_RTL (exp) == 0) | |
6168 | { | |
6169 | error_with_decl (exp, "prior parameter's size depends on `%s'"); | |
4af3895e | 6170 | return CONST0_RTX (mode); |
bbf6f052 RK |
6171 | } |
6172 | ||
0f41302f | 6173 | /* ... fall through ... */ |
d6a5ac33 | 6174 | |
bbf6f052 | 6175 | case VAR_DECL: |
2dca20cd RS |
6176 | /* If a static var's type was incomplete when the decl was written, |
6177 | but the type is complete now, lay out the decl now. */ | |
d0f062fb | 6178 | if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
2dca20cd RS |
6179 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) |
6180 | { | |
ed239f5a RK |
6181 | rtx value = DECL_RTL_IF_SET (exp); |
6182 | ||
2dca20cd | 6183 | layout_decl (exp, 0); |
ed239f5a RK |
6184 | |
6185 | /* If the RTL was already set, update its mode and memory | |
6186 | attributes. */ | |
6187 | if (value != 0) | |
6188 | { | |
6189 | PUT_MODE (value, DECL_MODE (exp)); | |
6190 | SET_DECL_RTL (exp, 0); | |
6191 | set_mem_attributes (value, exp, 1); | |
6192 | SET_DECL_RTL (exp, value); | |
6193 | } | |
505ddab6 | 6194 | } |
921b3427 | 6195 | |
0f41302f | 6196 | /* ... fall through ... */ |
d6a5ac33 | 6197 | |
2dca20cd | 6198 | case FUNCTION_DECL: |
bbf6f052 RK |
6199 | case RESULT_DECL: |
6200 | if (DECL_RTL (exp) == 0) | |
6201 | abort (); | |
d6a5ac33 | 6202 | |
e44842fe RK |
6203 | /* Ensure variable marked as used even if it doesn't go through |
6204 | a parser. If it hasn't be used yet, write out an external | |
6205 | definition. */ | |
6206 | if (! TREE_USED (exp)) | |
6207 | { | |
6208 | assemble_external (exp); | |
6209 | TREE_USED (exp) = 1; | |
6210 | } | |
6211 | ||
dc6d66b3 RK |
6212 | /* Show we haven't gotten RTL for this yet. */ |
6213 | temp = 0; | |
6214 | ||
bbf6f052 RK |
6215 | /* Handle variables inherited from containing functions. */ |
6216 | context = decl_function_context (exp); | |
6217 | ||
6218 | /* We treat inline_function_decl as an alias for the current function | |
6219 | because that is the inline function whose vars, types, etc. | |
6220 | are being merged into the current function. | |
6221 | See expand_inline_function. */ | |
d6a5ac33 | 6222 | |
bbf6f052 RK |
6223 | if (context != 0 && context != current_function_decl |
6224 | && context != inline_function_decl | |
6225 | /* If var is static, we don't need a static chain to access it. */ | |
6226 | && ! (GET_CODE (DECL_RTL (exp)) == MEM | |
6227 | && CONSTANT_P (XEXP (DECL_RTL (exp), 0)))) | |
6228 | { | |
6229 | rtx addr; | |
6230 | ||
6231 | /* Mark as non-local and addressable. */ | |
81feeecb | 6232 | DECL_NONLOCAL (exp) = 1; |
38ee6ed9 JM |
6233 | if (DECL_NO_STATIC_CHAIN (current_function_decl)) |
6234 | abort (); | |
bbf6f052 RK |
6235 | mark_addressable (exp); |
6236 | if (GET_CODE (DECL_RTL (exp)) != MEM) | |
6237 | abort (); | |
6238 | addr = XEXP (DECL_RTL (exp), 0); | |
6239 | if (GET_CODE (addr) == MEM) | |
792760b9 RK |
6240 | addr |
6241 | = replace_equiv_address (addr, | |
6242 | fix_lexical_addr (XEXP (addr, 0), exp)); | |
bbf6f052 RK |
6243 | else |
6244 | addr = fix_lexical_addr (addr, exp); | |
3bdf5ad1 | 6245 | |
792760b9 | 6246 | temp = replace_equiv_address (DECL_RTL (exp), addr); |
bbf6f052 | 6247 | } |
4af3895e | 6248 | |
bbf6f052 RK |
6249 | /* This is the case of an array whose size is to be determined |
6250 | from its initializer, while the initializer is still being parsed. | |
6251 | See expand_decl. */ | |
d6a5ac33 | 6252 | |
dc6d66b3 RK |
6253 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6254 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG) | |
792760b9 | 6255 | temp = validize_mem (DECL_RTL (exp)); |
d6a5ac33 RK |
6256 | |
6257 | /* If DECL_RTL is memory, we are in the normal case and either | |
6258 | the address is not valid or it is not a register and -fforce-addr | |
6259 | is specified, get the address into a register. */ | |
6260 | ||
dc6d66b3 RK |
6261 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6262 | && modifier != EXPAND_CONST_ADDRESS | |
6263 | && modifier != EXPAND_SUM | |
6264 | && modifier != EXPAND_INITIALIZER | |
6265 | && (! memory_address_p (DECL_MODE (exp), | |
6266 | XEXP (DECL_RTL (exp), 0)) | |
6267 | || (flag_force_addr | |
6268 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG))) | |
792760b9 RK |
6269 | temp = replace_equiv_address (DECL_RTL (exp), |
6270 | copy_rtx (XEXP (DECL_RTL (exp), 0))); | |
1499e0a8 | 6271 | |
dc6d66b3 | 6272 | /* If we got something, return it. But first, set the alignment |
04956a1a | 6273 | if the address is a register. */ |
dc6d66b3 RK |
6274 | if (temp != 0) |
6275 | { | |
6276 | if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG) | |
bdb429a5 | 6277 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); |
dc6d66b3 RK |
6278 | |
6279 | return temp; | |
6280 | } | |
6281 | ||
1499e0a8 RK |
6282 | /* If the mode of DECL_RTL does not match that of the decl, it |
6283 | must be a promoted value. We return a SUBREG of the wanted mode, | |
6284 | but mark it so that we know that it was already extended. */ | |
6285 | ||
6286 | if (GET_CODE (DECL_RTL (exp)) == REG | |
7254c5fa | 6287 | && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) |
1499e0a8 | 6288 | { |
1499e0a8 RK |
6289 | /* Get the signedness used for this variable. Ensure we get the |
6290 | same mode we got when the variable was declared. */ | |
78911e8b RK |
6291 | if (GET_MODE (DECL_RTL (exp)) |
6292 | != promote_mode (type, DECL_MODE (exp), &unsignedp, 0)) | |
1499e0a8 RK |
6293 | abort (); |
6294 | ||
ddef6bc7 | 6295 | temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); |
1499e0a8 | 6296 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
7879b81e | 6297 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
1499e0a8 RK |
6298 | return temp; |
6299 | } | |
6300 | ||
bbf6f052 RK |
6301 | return DECL_RTL (exp); |
6302 | ||
6303 | case INTEGER_CST: | |
6304 | return immed_double_const (TREE_INT_CST_LOW (exp), | |
05bccae2 | 6305 | TREE_INT_CST_HIGH (exp), mode); |
bbf6f052 RK |
6306 | |
6307 | case CONST_DECL: | |
37a08a29 | 6308 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0); |
bbf6f052 RK |
6309 | |
6310 | case REAL_CST: | |
6311 | /* If optimized, generate immediate CONST_DOUBLE | |
3a94c984 KH |
6312 | which will be turned into memory by reload if necessary. |
6313 | ||
bbf6f052 RK |
6314 | We used to force a register so that loop.c could see it. But |
6315 | this does not allow gen_* patterns to perform optimizations with | |
6316 | the constants. It also produces two insns in cases like "x = 1.0;". | |
6317 | On most machines, floating-point constants are not permitted in | |
6318 | many insns, so we'd end up copying it to a register in any case. | |
6319 | ||
6320 | Now, we do the copying in expand_binop, if appropriate. */ | |
6321 | return immed_real_const (exp); | |
6322 | ||
6323 | case COMPLEX_CST: | |
6324 | case STRING_CST: | |
6325 | if (! TREE_CST_RTL (exp)) | |
bd7cf17e | 6326 | output_constant_def (exp, 1); |
bbf6f052 RK |
6327 | |
6328 | /* TREE_CST_RTL probably contains a constant address. | |
6329 | On RISC machines where a constant address isn't valid, | |
6330 | make some insns to get that address into a register. */ | |
6331 | if (GET_CODE (TREE_CST_RTL (exp)) == MEM | |
6332 | && modifier != EXPAND_CONST_ADDRESS | |
6333 | && modifier != EXPAND_INITIALIZER | |
6334 | && modifier != EXPAND_SUM | |
d6a5ac33 RK |
6335 | && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)) |
6336 | || (flag_force_addr | |
6337 | && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG))) | |
792760b9 RK |
6338 | return replace_equiv_address (TREE_CST_RTL (exp), |
6339 | copy_rtx (XEXP (TREE_CST_RTL (exp), 0))); | |
bbf6f052 RK |
6340 | return TREE_CST_RTL (exp); |
6341 | ||
bf1e5319 | 6342 | case EXPR_WITH_FILE_LOCATION: |
b24f65cd APB |
6343 | { |
6344 | rtx to_return; | |
3b304f5b | 6345 | const char *saved_input_filename = input_filename; |
b24f65cd APB |
6346 | int saved_lineno = lineno; |
6347 | input_filename = EXPR_WFL_FILENAME (exp); | |
6348 | lineno = EXPR_WFL_LINENO (exp); | |
6349 | if (EXPR_WFL_EMIT_LINE_NOTE (exp)) | |
6350 | emit_line_note (input_filename, lineno); | |
6ad7895a | 6351 | /* Possibly avoid switching back and forth here. */ |
b0ca54af | 6352 | to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier); |
b24f65cd APB |
6353 | input_filename = saved_input_filename; |
6354 | lineno = saved_lineno; | |
6355 | return to_return; | |
6356 | } | |
bf1e5319 | 6357 | |
bbf6f052 RK |
6358 | case SAVE_EXPR: |
6359 | context = decl_function_context (exp); | |
d6a5ac33 | 6360 | |
d0977240 RK |
6361 | /* If this SAVE_EXPR was at global context, assume we are an |
6362 | initialization function and move it into our context. */ | |
6363 | if (context == 0) | |
6364 | SAVE_EXPR_CONTEXT (exp) = current_function_decl; | |
6365 | ||
bbf6f052 RK |
6366 | /* We treat inline_function_decl as an alias for the current function |
6367 | because that is the inline function whose vars, types, etc. | |
6368 | are being merged into the current function. | |
6369 | See expand_inline_function. */ | |
6370 | if (context == current_function_decl || context == inline_function_decl) | |
6371 | context = 0; | |
6372 | ||
6373 | /* If this is non-local, handle it. */ | |
6374 | if (context) | |
6375 | { | |
d0977240 RK |
6376 | /* The following call just exists to abort if the context is |
6377 | not of a containing function. */ | |
6378 | find_function_data (context); | |
6379 | ||
bbf6f052 RK |
6380 | temp = SAVE_EXPR_RTL (exp); |
6381 | if (temp && GET_CODE (temp) == REG) | |
6382 | { | |
6383 | put_var_into_stack (exp); | |
6384 | temp = SAVE_EXPR_RTL (exp); | |
6385 | } | |
6386 | if (temp == 0 || GET_CODE (temp) != MEM) | |
6387 | abort (); | |
792760b9 RK |
6388 | return |
6389 | replace_equiv_address (temp, | |
6390 | fix_lexical_addr (XEXP (temp, 0), exp)); | |
bbf6f052 RK |
6391 | } |
6392 | if (SAVE_EXPR_RTL (exp) == 0) | |
6393 | { | |
06089a8b RK |
6394 | if (mode == VOIDmode) |
6395 | temp = const0_rtx; | |
6396 | else | |
1da68f56 RK |
6397 | temp = assign_temp (build_qualified_type (type, |
6398 | (TYPE_QUALS (type) | |
6399 | | TYPE_QUAL_CONST)), | |
6400 | 3, 0, 0); | |
1499e0a8 | 6401 | |
bbf6f052 | 6402 | SAVE_EXPR_RTL (exp) = temp; |
bbf6f052 | 6403 | if (!optimize && GET_CODE (temp) == REG) |
38a448ca RH |
6404 | save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp, |
6405 | save_expr_regs); | |
ff78f773 RK |
6406 | |
6407 | /* If the mode of TEMP does not match that of the expression, it | |
6408 | must be a promoted value. We pass store_expr a SUBREG of the | |
6409 | wanted mode but mark it so that we know that it was already | |
6410 | extended. Note that `unsignedp' was modified above in | |
6411 | this case. */ | |
6412 | ||
6413 | if (GET_CODE (temp) == REG && GET_MODE (temp) != mode) | |
6414 | { | |
ddef6bc7 | 6415 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
ff78f773 | 6416 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
7879b81e | 6417 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
ff78f773 RK |
6418 | } |
6419 | ||
4c7a0be9 | 6420 | if (temp == const0_rtx) |
37a08a29 | 6421 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); |
4c7a0be9 JW |
6422 | else |
6423 | store_expr (TREE_OPERAND (exp, 0), temp, 0); | |
e5e809f4 JL |
6424 | |
6425 | TREE_USED (exp) = 1; | |
bbf6f052 | 6426 | } |
1499e0a8 RK |
6427 | |
6428 | /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it | |
6429 | must be a promoted value. We return a SUBREG of the wanted mode, | |
0f41302f | 6430 | but mark it so that we know that it was already extended. */ |
1499e0a8 RK |
6431 | |
6432 | if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG | |
6433 | && GET_MODE (SAVE_EXPR_RTL (exp)) != mode) | |
6434 | { | |
e70d22c8 RK |
6435 | /* Compute the signedness and make the proper SUBREG. */ |
6436 | promote_mode (type, mode, &unsignedp, 0); | |
ddef6bc7 | 6437 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
1499e0a8 | 6438 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
7879b81e | 6439 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
1499e0a8 RK |
6440 | return temp; |
6441 | } | |
6442 | ||
bbf6f052 RK |
6443 | return SAVE_EXPR_RTL (exp); |
6444 | ||
679163cf MS |
6445 | case UNSAVE_EXPR: |
6446 | { | |
6447 | rtx temp; | |
6448 | temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); | |
24965e7a NB |
6449 | TREE_OPERAND (exp, 0) |
6450 | = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0)); | |
679163cf MS |
6451 | return temp; |
6452 | } | |
6453 | ||
b50d17a1 | 6454 | case PLACEHOLDER_EXPR: |
e9a25f70 | 6455 | { |
f47e9b4e | 6456 | tree old_list = placeholder_list; |
738cc472 | 6457 | tree placeholder_expr = 0; |
e9a25f70 | 6458 | |
f47e9b4e | 6459 | exp = find_placeholder (exp, &placeholder_expr); |
70072ed9 RK |
6460 | if (exp == 0) |
6461 | abort (); | |
6462 | ||
f47e9b4e | 6463 | placeholder_list = TREE_CHAIN (placeholder_expr); |
37a08a29 | 6464 | temp = expand_expr (exp, original_target, tmode, modifier); |
f47e9b4e RK |
6465 | placeholder_list = old_list; |
6466 | return temp; | |
e9a25f70 | 6467 | } |
b50d17a1 RK |
6468 | |
6469 | /* We can't find the object or there was a missing WITH_RECORD_EXPR. */ | |
6470 | abort (); | |
6471 | ||
6472 | case WITH_RECORD_EXPR: | |
6473 | /* Put the object on the placeholder list, expand our first operand, | |
6474 | and pop the list. */ | |
6475 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
6476 | placeholder_list); | |
37a08a29 RK |
6477 | target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode, |
6478 | modifier); | |
b50d17a1 RK |
6479 | placeholder_list = TREE_CHAIN (placeholder_list); |
6480 | return target; | |
6481 | ||
70e6ca43 APB |
6482 | case GOTO_EXPR: |
6483 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) | |
6484 | expand_goto (TREE_OPERAND (exp, 0)); | |
6485 | else | |
6486 | expand_computed_goto (TREE_OPERAND (exp, 0)); | |
6487 | return const0_rtx; | |
6488 | ||
bbf6f052 | 6489 | case EXIT_EXPR: |
df4ae160 | 6490 | expand_exit_loop_if_false (NULL, |
e44842fe | 6491 | invert_truthvalue (TREE_OPERAND (exp, 0))); |
bbf6f052 RK |
6492 | return const0_rtx; |
6493 | ||
f42e28dd APB |
6494 | case LABELED_BLOCK_EXPR: |
6495 | if (LABELED_BLOCK_BODY (exp)) | |
b0832fe1 | 6496 | expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1); |
30f7a378 | 6497 | /* Should perhaps use expand_label, but this is simpler and safer. */ |
0a5fee32 | 6498 | do_pending_stack_adjust (); |
f42e28dd APB |
6499 | emit_label (label_rtx (LABELED_BLOCK_LABEL (exp))); |
6500 | return const0_rtx; | |
6501 | ||
6502 | case EXIT_BLOCK_EXPR: | |
6503 | if (EXIT_BLOCK_RETURN (exp)) | |
ab87f8c8 | 6504 | sorry ("returned value in block_exit_expr"); |
f42e28dd APB |
6505 | expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp))); |
6506 | return const0_rtx; | |
6507 | ||
bbf6f052 | 6508 | case LOOP_EXPR: |
0088fcb1 | 6509 | push_temp_slots (); |
bbf6f052 | 6510 | expand_start_loop (1); |
b0832fe1 | 6511 | expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1); |
bbf6f052 | 6512 | expand_end_loop (); |
0088fcb1 | 6513 | pop_temp_slots (); |
bbf6f052 RK |
6514 | |
6515 | return const0_rtx; | |
6516 | ||
6517 | case BIND_EXPR: | |
6518 | { | |
6519 | tree vars = TREE_OPERAND (exp, 0); | |
6520 | int vars_need_expansion = 0; | |
6521 | ||
6522 | /* Need to open a binding contour here because | |
e976b8b2 | 6523 | if there are any cleanups they must be contained here. */ |
8e91754e | 6524 | expand_start_bindings (2); |
bbf6f052 | 6525 | |
2df53c0b RS |
6526 | /* Mark the corresponding BLOCK for output in its proper place. */ |
6527 | if (TREE_OPERAND (exp, 2) != 0 | |
6528 | && ! TREE_USED (TREE_OPERAND (exp, 2))) | |
6529 | insert_block (TREE_OPERAND (exp, 2)); | |
bbf6f052 RK |
6530 | |
6531 | /* If VARS have not yet been expanded, expand them now. */ | |
6532 | while (vars) | |
6533 | { | |
19e7881c | 6534 | if (!DECL_RTL_SET_P (vars)) |
bbf6f052 RK |
6535 | { |
6536 | vars_need_expansion = 1; | |
6537 | expand_decl (vars); | |
6538 | } | |
6539 | expand_decl_init (vars); | |
6540 | vars = TREE_CHAIN (vars); | |
6541 | } | |
6542 | ||
37a08a29 | 6543 | temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier); |
bbf6f052 RK |
6544 | |
6545 | expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0); | |
6546 | ||
6547 | return temp; | |
6548 | } | |
6549 | ||
6550 | case RTL_EXPR: | |
83b853c9 JM |
6551 | if (RTL_EXPR_SEQUENCE (exp)) |
6552 | { | |
6553 | if (RTL_EXPR_SEQUENCE (exp) == const0_rtx) | |
6554 | abort (); | |
6555 | emit_insns (RTL_EXPR_SEQUENCE (exp)); | |
6556 | RTL_EXPR_SEQUENCE (exp) = const0_rtx; | |
6557 | } | |
64dc53f3 MM |
6558 | preserve_rtl_expr_result (RTL_EXPR_RTL (exp)); |
6559 | free_temps_for_rtl_expr (exp); | |
bbf6f052 RK |
6560 | return RTL_EXPR_RTL (exp); |
6561 | ||
6562 | case CONSTRUCTOR: | |
dd27116b RK |
6563 | /* If we don't need the result, just ensure we evaluate any |
6564 | subexpressions. */ | |
6565 | if (ignore) | |
6566 | { | |
6567 | tree elt; | |
37a08a29 | 6568 | |
dd27116b | 6569 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
37a08a29 RK |
6570 | expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); |
6571 | ||
dd27116b RK |
6572 | return const0_rtx; |
6573 | } | |
3207b172 | 6574 | |
4af3895e JVA |
6575 | /* All elts simple constants => refer to a constant in memory. But |
6576 | if this is a non-BLKmode mode, let it store a field at a time | |
6577 | since that should make a CONST_INT or CONST_DOUBLE when we | |
3207b172 | 6578 | fold. Likewise, if we have a target we can use, it is best to |
d720b9d1 RK |
6579 | store directly into the target unless the type is large enough |
6580 | that memcpy will be used. If we are making an initializer and | |
3207b172 | 6581 | all operands are constant, put it in memory as well. */ |
dd27116b | 6582 | else if ((TREE_STATIC (exp) |
3207b172 | 6583 | && ((mode == BLKmode |
e5e809f4 | 6584 | && ! (target != 0 && safe_from_p (target, exp, 1))) |
d720b9d1 | 6585 | || TREE_ADDRESSABLE (exp) |
19caa751 | 6586 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) |
3a94c984 | 6587 | && (! MOVE_BY_PIECES_P |
19caa751 RK |
6588 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), |
6589 | TYPE_ALIGN (type))) | |
9de08200 | 6590 | && ! mostly_zeros_p (exp)))) |
dd27116b | 6591 | || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp))) |
bbf6f052 | 6592 | { |
bd7cf17e | 6593 | rtx constructor = output_constant_def (exp, 1); |
19caa751 | 6594 | |
b552441b RS |
6595 | if (modifier != EXPAND_CONST_ADDRESS |
6596 | && modifier != EXPAND_INITIALIZER | |
792760b9 RK |
6597 | && modifier != EXPAND_SUM) |
6598 | constructor = validize_mem (constructor); | |
6599 | ||
bbf6f052 RK |
6600 | return constructor; |
6601 | } | |
bbf6f052 RK |
6602 | else |
6603 | { | |
e9ac02a6 JW |
6604 | /* Handle calls that pass values in multiple non-contiguous |
6605 | locations. The Irix 6 ABI has examples of this. */ | |
e5e809f4 | 6606 | if (target == 0 || ! safe_from_p (target, exp, 1) |
e9ac02a6 | 6607 | || GET_CODE (target) == PARALLEL) |
1da68f56 RK |
6608 | target |
6609 | = assign_temp (build_qualified_type (type, | |
6610 | (TYPE_QUALS (type) | |
6611 | | (TREE_READONLY (exp) | |
6612 | * TYPE_QUAL_CONST))), | |
c24ae149 | 6613 | 0, TREE_ADDRESSABLE (exp), 1); |
07604beb | 6614 | |
04050c69 | 6615 | store_constructor (exp, target, 0, |
b7010412 | 6616 | int_size_in_bytes (TREE_TYPE (exp))); |
bbf6f052 RK |
6617 | return target; |
6618 | } | |
6619 | ||
6620 | case INDIRECT_REF: | |
6621 | { | |
6622 | tree exp1 = TREE_OPERAND (exp, 0); | |
7581a30f | 6623 | tree index; |
3a94c984 KH |
6624 | tree string = string_constant (exp1, &index); |
6625 | ||
06eaa86f | 6626 | /* Try to optimize reads from const strings. */ |
7581a30f JW |
6627 | if (string |
6628 | && TREE_CODE (string) == STRING_CST | |
6629 | && TREE_CODE (index) == INTEGER_CST | |
05bccae2 | 6630 | && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 |
7581a30f | 6631 | && GET_MODE_CLASS (mode) == MODE_INT |
06eaa86f | 6632 | && GET_MODE_SIZE (mode) == 1 |
37a08a29 | 6633 | && modifier != EXPAND_WRITE) |
21ef78aa DE |
6634 | return gen_int_mode (TREE_STRING_POINTER (string) |
6635 | [TREE_INT_CST_LOW (index)], mode); | |
bbf6f052 | 6636 | |
405f0da6 JW |
6637 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
6638 | op0 = memory_address (mode, op0); | |
38a448ca | 6639 | temp = gen_rtx_MEM (mode, op0); |
3bdf5ad1 | 6640 | set_mem_attributes (temp, exp, 0); |
1125706f | 6641 | |
14a774a9 RK |
6642 | /* If we are writing to this object and its type is a record with |
6643 | readonly fields, we must mark it as readonly so it will | |
6644 | conflict with readonly references to those fields. */ | |
37a08a29 | 6645 | if (modifier == EXPAND_WRITE && readonly_fields_p (type)) |
14a774a9 RK |
6646 | RTX_UNCHANGING_P (temp) = 1; |
6647 | ||
8c8a8e34 JW |
6648 | return temp; |
6649 | } | |
bbf6f052 RK |
6650 | |
6651 | case ARRAY_REF: | |
742920c7 RK |
6652 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) |
6653 | abort (); | |
bbf6f052 | 6654 | |
bbf6f052 | 6655 | { |
742920c7 RK |
6656 | tree array = TREE_OPERAND (exp, 0); |
6657 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
6658 | tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; | |
fed3cef0 | 6659 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
08293add | 6660 | HOST_WIDE_INT i; |
b50d17a1 | 6661 | |
d4c89139 PB |
6662 | /* Optimize the special-case of a zero lower bound. |
6663 | ||
6664 | We convert the low_bound to sizetype to avoid some problems | |
6665 | with constant folding. (E.g. suppose the lower bound is 1, | |
6666 | and its mode is QI. Without the conversion, (ARRAY | |
6667 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
fed3cef0 | 6668 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
d4c89139 | 6669 | |
742920c7 | 6670 | if (! integer_zerop (low_bound)) |
fed3cef0 | 6671 | index = size_diffop (index, convert (sizetype, low_bound)); |
742920c7 | 6672 | |
742920c7 | 6673 | /* Fold an expression like: "foo"[2]. |
ad2e7dd0 RK |
6674 | This is not done in fold so it won't happen inside &. |
6675 | Don't fold if this is for wide characters since it's too | |
6676 | difficult to do correctly and this is a very rare case. */ | |
742920c7 | 6677 | |
cb5fa0f8 RK |
6678 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
6679 | && TREE_CODE (array) == STRING_CST | |
742920c7 | 6680 | && TREE_CODE (index) == INTEGER_CST |
05bccae2 | 6681 | && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0 |
ad2e7dd0 RK |
6682 | && GET_MODE_CLASS (mode) == MODE_INT |
6683 | && GET_MODE_SIZE (mode) == 1) | |
21ef78aa DE |
6684 | return gen_int_mode (TREE_STRING_POINTER (array) |
6685 | [TREE_INT_CST_LOW (index)], mode); | |
bbf6f052 | 6686 | |
742920c7 RK |
6687 | /* If this is a constant index into a constant array, |
6688 | just get the value from the array. Handle both the cases when | |
6689 | we have an explicit constructor and when our operand is a variable | |
6690 | that was declared const. */ | |
4af3895e | 6691 | |
cb5fa0f8 RK |
6692 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
6693 | && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array) | |
05bccae2 | 6694 | && TREE_CODE (index) == INTEGER_CST |
3a94c984 | 6695 | && 0 > compare_tree_int (index, |
05bccae2 RK |
6696 | list_length (CONSTRUCTOR_ELTS |
6697 | (TREE_OPERAND (exp, 0))))) | |
742920c7 | 6698 | { |
05bccae2 RK |
6699 | tree elem; |
6700 | ||
6701 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
6702 | i = TREE_INT_CST_LOW (index); | |
6703 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) | |
6704 | ; | |
6705 | ||
6706 | if (elem) | |
37a08a29 RK |
6707 | return expand_expr (fold (TREE_VALUE (elem)), target, tmode, |
6708 | modifier); | |
742920c7 | 6709 | } |
3a94c984 | 6710 | |
742920c7 | 6711 | else if (optimize >= 1 |
cb5fa0f8 RK |
6712 | && modifier != EXPAND_CONST_ADDRESS |
6713 | && modifier != EXPAND_INITIALIZER | |
742920c7 RK |
6714 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) |
6715 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
6716 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK) | |
6717 | { | |
08293add | 6718 | if (TREE_CODE (index) == INTEGER_CST) |
742920c7 RK |
6719 | { |
6720 | tree init = DECL_INITIAL (array); | |
6721 | ||
742920c7 RK |
6722 | if (TREE_CODE (init) == CONSTRUCTOR) |
6723 | { | |
665f2503 | 6724 | tree elem; |
742920c7 | 6725 | |
05bccae2 | 6726 | for (elem = CONSTRUCTOR_ELTS (init); |
5cb1bea4 JM |
6727 | (elem |
6728 | && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); | |
05bccae2 RK |
6729 | elem = TREE_CHAIN (elem)) |
6730 | ; | |
6731 | ||
c54b0a5e | 6732 | if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) |
742920c7 | 6733 | return expand_expr (fold (TREE_VALUE (elem)), target, |
37a08a29 | 6734 | tmode, modifier); |
742920c7 RK |
6735 | } |
6736 | else if (TREE_CODE (init) == STRING_CST | |
05bccae2 RK |
6737 | && 0 > compare_tree_int (index, |
6738 | TREE_STRING_LENGTH (init))) | |
5c80f6e6 JJ |
6739 | { |
6740 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
6741 | enum machine_mode mode = TYPE_MODE (type); | |
6742 | ||
6743 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6744 | && GET_MODE_SIZE (mode) == 1) | |
21ef78aa DE |
6745 | return gen_int_mode (TREE_STRING_POINTER (init) |
6746 | [TREE_INT_CST_LOW (index)], mode); | |
5c80f6e6 | 6747 | } |
742920c7 RK |
6748 | } |
6749 | } | |
6750 | } | |
3a94c984 | 6751 | /* Fall through. */ |
bbf6f052 RK |
6752 | |
6753 | case COMPONENT_REF: | |
6754 | case BIT_FIELD_REF: | |
b4e3fabb | 6755 | case ARRAY_RANGE_REF: |
4af3895e | 6756 | /* If the operand is a CONSTRUCTOR, we can just extract the |
7a0b7b9a RK |
6757 | appropriate field if it is present. Don't do this if we have |
6758 | already written the data since we want to refer to that copy | |
6759 | and varasm.c assumes that's what we'll do. */ | |
b4e3fabb | 6760 | if (code == COMPONENT_REF |
7a0b7b9a RK |
6761 | && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR |
6762 | && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0) | |
4af3895e JVA |
6763 | { |
6764 | tree elt; | |
6765 | ||
6766 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
6767 | elt = TREE_CHAIN (elt)) | |
86b5812c RK |
6768 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) |
6769 | /* We can normally use the value of the field in the | |
6770 | CONSTRUCTOR. However, if this is a bitfield in | |
6771 | an integral mode that we can fit in a HOST_WIDE_INT, | |
6772 | we must mask only the number of bits in the bitfield, | |
6773 | since this is done implicitly by the constructor. If | |
6774 | the bitfield does not meet either of those conditions, | |
6775 | we can't do this optimization. */ | |
6776 | && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) | |
6777 | || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) | |
6778 | == MODE_INT) | |
6779 | && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) | |
6780 | <= HOST_BITS_PER_WIDE_INT)))) | |
6781 | { | |
3a94c984 | 6782 | op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); |
86b5812c RK |
6783 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) |
6784 | { | |
9df2c88c RK |
6785 | HOST_WIDE_INT bitsize |
6786 | = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); | |
22273300 JJ |
6787 | enum machine_mode imode |
6788 | = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); | |
86b5812c RK |
6789 | |
6790 | if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) | |
6791 | { | |
6792 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
22273300 | 6793 | op0 = expand_and (imode, op0, op1, target); |
86b5812c RK |
6794 | } |
6795 | else | |
6796 | { | |
6797 | tree count | |
e5e809f4 JL |
6798 | = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize, |
6799 | 0); | |
86b5812c RK |
6800 | |
6801 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
6802 | target, 0); | |
6803 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
6804 | target, 0); | |
6805 | } | |
6806 | } | |
6807 | ||
6808 | return op0; | |
6809 | } | |
4af3895e JVA |
6810 | } |
6811 | ||
bbf6f052 RK |
6812 | { |
6813 | enum machine_mode mode1; | |
770ae6cc | 6814 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 6815 | tree offset; |
bbf6f052 | 6816 | int volatilep = 0; |
839c4796 | 6817 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
a06ef755 | 6818 | &mode1, &unsignedp, &volatilep); |
f47e9b4e | 6819 | rtx orig_op0; |
bbf6f052 | 6820 | |
e7f3c83f RK |
6821 | /* If we got back the original object, something is wrong. Perhaps |
6822 | we are evaluating an expression too early. In any event, don't | |
6823 | infinitely recurse. */ | |
6824 | if (tem == exp) | |
6825 | abort (); | |
6826 | ||
3d27140a | 6827 | /* If TEM's type is a union of variable size, pass TARGET to the inner |
b74f5ff2 RK |
6828 | computation, since it will need a temporary and TARGET is known |
6829 | to have to do. This occurs in unchecked conversion in Ada. */ | |
3a94c984 | 6830 | |
f47e9b4e RK |
6831 | orig_op0 = op0 |
6832 | = expand_expr (tem, | |
6833 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
6834 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
6835 | != INTEGER_CST) | |
6836 | ? target : NULL_RTX), | |
6837 | VOIDmode, | |
6838 | (modifier == EXPAND_INITIALIZER | |
6839 | || modifier == EXPAND_CONST_ADDRESS) | |
6840 | ? modifier : EXPAND_NORMAL); | |
bbf6f052 | 6841 | |
8c8a8e34 | 6842 | /* If this is a constant, put it into a register if it is a |
14a774a9 | 6843 | legitimate constant and OFFSET is 0 and memory if it isn't. */ |
8c8a8e34 JW |
6844 | if (CONSTANT_P (op0)) |
6845 | { | |
6846 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); | |
14a774a9 RK |
6847 | if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) |
6848 | && offset == 0) | |
8c8a8e34 JW |
6849 | op0 = force_reg (mode, op0); |
6850 | else | |
6851 | op0 = validize_mem (force_const_mem (mode, op0)); | |
6852 | } | |
6853 | ||
7bb0943f RS |
6854 | if (offset != 0) |
6855 | { | |
e3c8ea67 | 6856 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
7bb0943f | 6857 | |
a2725049 | 6858 | /* If this object is in a register, put it into memory. |
14a774a9 RK |
6859 | This case can't occur in C, but can in Ada if we have |
6860 | unchecked conversion of an expression from a scalar type to | |
6861 | an array or record type. */ | |
6862 | if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG | |
6863 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) | |
6864 | { | |
d04218c0 RK |
6865 | /* If the operand is a SAVE_EXPR, we can deal with this by |
6866 | forcing the SAVE_EXPR into memory. */ | |
6867 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) | |
eeb35b45 RK |
6868 | { |
6869 | put_var_into_stack (TREE_OPERAND (exp, 0)); | |
6870 | op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); | |
6871 | } | |
d04218c0 RK |
6872 | else |
6873 | { | |
6874 | tree nt | |
6875 | = build_qualified_type (TREE_TYPE (tem), | |
6876 | (TYPE_QUALS (TREE_TYPE (tem)) | |
6877 | | TYPE_QUAL_CONST)); | |
6878 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
6879 | ||
d04218c0 RK |
6880 | emit_move_insn (memloc, op0); |
6881 | op0 = memloc; | |
6882 | } | |
14a774a9 RK |
6883 | } |
6884 | ||
7bb0943f RS |
6885 | if (GET_CODE (op0) != MEM) |
6886 | abort (); | |
2d48c13d JL |
6887 | |
6888 | if (GET_MODE (offset_rtx) != ptr_mode) | |
0d4903b8 RK |
6889 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
6890 | ||
2d48c13d | 6891 | #ifdef POINTERS_EXTEND_UNSIGNED |
4b6c1672 RK |
6892 | if (GET_MODE (offset_rtx) != Pmode) |
6893 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
2d48c13d JL |
6894 | #endif |
6895 | ||
14a774a9 | 6896 | /* A constant address in OP0 can have VOIDmode, we must not try |
efd07ca7 | 6897 | to call force_reg for that case. Avoid that case. */ |
89752202 HB |
6898 | if (GET_CODE (op0) == MEM |
6899 | && GET_MODE (op0) == BLKmode | |
efd07ca7 | 6900 | && GET_MODE (XEXP (op0, 0)) != VOIDmode |
14a774a9 | 6901 | && bitsize != 0 |
3a94c984 | 6902 | && (bitpos % bitsize) == 0 |
89752202 | 6903 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 6904 | && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 6905 | { |
e3c8ea67 | 6906 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
6907 | bitpos = 0; |
6908 | } | |
6909 | ||
0d4903b8 RK |
6910 | op0 = offset_address (op0, offset_rtx, |
6911 | highest_pow2_factor (offset)); | |
7bb0943f RS |
6912 | } |
6913 | ||
bbf6f052 RK |
6914 | /* Don't forget about volatility even if this is a bitfield. */ |
6915 | if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0)) | |
6916 | { | |
f47e9b4e RK |
6917 | if (op0 == orig_op0) |
6918 | op0 = copy_rtx (op0); | |
6919 | ||
bbf6f052 RK |
6920 | MEM_VOLATILE_P (op0) = 1; |
6921 | } | |
6922 | ||
ccc98036 RS |
6923 | /* In cases where an aligned union has an unaligned object |
6924 | as a field, we might be extracting a BLKmode value from | |
6925 | an integer-mode (e.g., SImode) object. Handle this case | |
6926 | by doing the extract into an object as wide as the field | |
6927 | (which we know to be the width of a basic mode), then | |
cb5fa0f8 | 6928 | storing into memory, and changing the mode to BLKmode. */ |
bbf6f052 | 6929 | if (mode1 == VOIDmode |
ccc98036 | 6930 | || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
cb5fa0f8 RK |
6931 | || (mode1 != BLKmode && ! direct_load[(int) mode1] |
6932 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
10c2a453 RK |
6933 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT |
6934 | && modifier != EXPAND_CONST_ADDRESS | |
6935 | && modifier != EXPAND_INITIALIZER) | |
cb5fa0f8 RK |
6936 | /* If the field isn't aligned enough to fetch as a memref, |
6937 | fetch it as a bit field. */ | |
6938 | || (mode1 != BLKmode | |
38b3baae | 6939 | && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)) |
cb5fa0f8 RK |
6940 | && ((TYPE_ALIGN (TREE_TYPE (tem)) |
6941 | < GET_MODE_ALIGNMENT (mode)) | |
6942 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) | |
6943 | /* If the type and the field are a constant size and the | |
6944 | size of the type isn't the same size as the bitfield, | |
6945 | we must use bitfield operations. */ | |
6946 | || (bitsize >= 0 | |
6947 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) | |
6948 | == INTEGER_CST) | |
6949 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | |
a06ef755 | 6950 | bitsize))) |
bbf6f052 | 6951 | { |
bbf6f052 RK |
6952 | enum machine_mode ext_mode = mode; |
6953 | ||
14a774a9 RK |
6954 | if (ext_mode == BLKmode |
6955 | && ! (target != 0 && GET_CODE (op0) == MEM | |
6956 | && GET_CODE (target) == MEM | |
6957 | && bitpos % BITS_PER_UNIT == 0)) | |
bbf6f052 RK |
6958 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); |
6959 | ||
6960 | if (ext_mode == BLKmode) | |
a281e72d RK |
6961 | { |
6962 | /* In this case, BITPOS must start at a byte boundary and | |
6963 | TARGET, if specified, must be a MEM. */ | |
6964 | if (GET_CODE (op0) != MEM | |
6965 | || (target != 0 && GET_CODE (target) != MEM) | |
6966 | || bitpos % BITS_PER_UNIT != 0) | |
6967 | abort (); | |
6968 | ||
f4ef873c | 6969 | op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT); |
a281e72d RK |
6970 | if (target == 0) |
6971 | target = assign_temp (type, 0, 1, 1); | |
6972 | ||
6973 | emit_block_move (target, op0, | |
a06ef755 RK |
6974 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
6975 | / BITS_PER_UNIT)); | |
3a94c984 | 6976 | |
a281e72d RK |
6977 | return target; |
6978 | } | |
bbf6f052 | 6979 | |
dc6d66b3 RK |
6980 | op0 = validize_mem (op0); |
6981 | ||
6982 | if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG) | |
04050c69 | 6983 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 RK |
6984 | |
6985 | op0 = extract_bit_field (op0, bitsize, bitpos, | |
bbf6f052 | 6986 | unsignedp, target, ext_mode, ext_mode, |
bbf6f052 | 6987 | int_size_in_bytes (TREE_TYPE (tem))); |
ef19912d RK |
6988 | |
6989 | /* If the result is a record type and BITSIZE is narrower than | |
6990 | the mode of OP0, an integral mode, and this is a big endian | |
6991 | machine, we must put the field into the high-order bits. */ | |
6992 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
6993 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
65a07688 | 6994 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) |
ef19912d RK |
6995 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, |
6996 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
6997 | - bitsize), | |
6998 | op0, 1); | |
6999 | ||
bbf6f052 RK |
7000 | if (mode == BLKmode) |
7001 | { | |
c3d32120 RK |
7002 | rtx new = assign_temp (build_qualified_type |
7003 | (type_for_mode (ext_mode, 0), | |
7004 | TYPE_QUAL_CONST), 0, 1, 1); | |
bbf6f052 RK |
7005 | |
7006 | emit_move_insn (new, op0); | |
7007 | op0 = copy_rtx (new); | |
7008 | PUT_MODE (op0, BLKmode); | |
c3d32120 | 7009 | set_mem_attributes (op0, exp, 1); |
bbf6f052 RK |
7010 | } |
7011 | ||
7012 | return op0; | |
7013 | } | |
7014 | ||
05019f83 RK |
7015 | /* If the result is BLKmode, use that to access the object |
7016 | now as well. */ | |
7017 | if (mode == BLKmode) | |
7018 | mode1 = BLKmode; | |
7019 | ||
bbf6f052 RK |
7020 | /* Get a reference to just this component. */ |
7021 | if (modifier == EXPAND_CONST_ADDRESS | |
7022 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
f1ec5147 | 7023 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
bbf6f052 | 7024 | else |
f4ef873c | 7025 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
41472af8 | 7026 | |
f47e9b4e RK |
7027 | if (op0 == orig_op0) |
7028 | op0 = copy_rtx (op0); | |
7029 | ||
3bdf5ad1 | 7030 | set_mem_attributes (op0, exp, 0); |
dc6d66b3 | 7031 | if (GET_CODE (XEXP (op0, 0)) == REG) |
a06ef755 | 7032 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
dc6d66b3 | 7033 | |
bbf6f052 | 7034 | MEM_VOLATILE_P (op0) |= volatilep; |
0d15e60c | 7035 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode |
08bbd316 | 7036 | || modifier == EXPAND_CONST_ADDRESS |
0d15e60c | 7037 | || modifier == EXPAND_INITIALIZER) |
bbf6f052 | 7038 | return op0; |
0d15e60c | 7039 | else if (target == 0) |
bbf6f052 | 7040 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
0d15e60c | 7041 | |
bbf6f052 RK |
7042 | convert_move (target, op0, unsignedp); |
7043 | return target; | |
7044 | } | |
7045 | ||
4a8d0c9c RH |
7046 | case VTABLE_REF: |
7047 | { | |
7048 | rtx insn, before = get_last_insn (), vtbl_ref; | |
7049 | ||
7050 | /* Evaluate the interior expression. */ | |
7051 | subtarget = expand_expr (TREE_OPERAND (exp, 0), target, | |
7052 | tmode, modifier); | |
7053 | ||
7054 | /* Get or create an instruction off which to hang a note. */ | |
7055 | if (REG_P (subtarget)) | |
7056 | { | |
7057 | target = subtarget; | |
7058 | insn = get_last_insn (); | |
7059 | if (insn == before) | |
7060 | abort (); | |
7061 | if (! INSN_P (insn)) | |
7062 | insn = prev_nonnote_insn (insn); | |
7063 | } | |
7064 | else | |
7065 | { | |
7066 | target = gen_reg_rtx (GET_MODE (subtarget)); | |
7067 | insn = emit_move_insn (target, subtarget); | |
7068 | } | |
7069 | ||
7070 | /* Collect the data for the note. */ | |
7071 | vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0); | |
7072 | vtbl_ref = plus_constant (vtbl_ref, | |
7073 | tree_low_cst (TREE_OPERAND (exp, 2), 0)); | |
7074 | /* Discard the initial CONST that was added. */ | |
7075 | vtbl_ref = XEXP (vtbl_ref, 0); | |
7076 | ||
7077 | REG_NOTES (insn) | |
7078 | = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn)); | |
7079 | ||
7080 | return target; | |
7081 | } | |
7082 | ||
bbf6f052 RK |
7083 | /* Intended for a reference to a buffer of a file-object in Pascal. |
7084 | But it's not certain that a special tree code will really be | |
7085 | necessary for these. INDIRECT_REF might work for them. */ | |
7086 | case BUFFER_REF: | |
7087 | abort (); | |
7088 | ||
7308a047 | 7089 | case IN_EXPR: |
7308a047 | 7090 | { |
d6a5ac33 RK |
7091 | /* Pascal set IN expression. |
7092 | ||
7093 | Algorithm: | |
7094 | rlo = set_low - (set_low%bits_per_word); | |
7095 | the_word = set [ (index - rlo)/bits_per_word ]; | |
7096 | bit_index = index % bits_per_word; | |
7097 | bitmask = 1 << bit_index; | |
7098 | return !!(the_word & bitmask); */ | |
7099 | ||
7308a047 RS |
7100 | tree set = TREE_OPERAND (exp, 0); |
7101 | tree index = TREE_OPERAND (exp, 1); | |
d6a5ac33 | 7102 | int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index)); |
7308a047 | 7103 | tree set_type = TREE_TYPE (set); |
7308a047 RS |
7104 | tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type)); |
7105 | tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type)); | |
d6a5ac33 RK |
7106 | rtx index_val = expand_expr (index, 0, VOIDmode, 0); |
7107 | rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0); | |
7108 | rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0); | |
7109 | rtx setval = expand_expr (set, 0, VOIDmode, 0); | |
7110 | rtx setaddr = XEXP (setval, 0); | |
7111 | enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index)); | |
7308a047 RS |
7112 | rtx rlow; |
7113 | rtx diff, quo, rem, addr, bit, result; | |
7308a047 | 7114 | |
d6a5ac33 RK |
7115 | /* If domain is empty, answer is no. Likewise if index is constant |
7116 | and out of bounds. */ | |
51723711 | 7117 | if (((TREE_CODE (set_high_bound) == INTEGER_CST |
d6a5ac33 | 7118 | && TREE_CODE (set_low_bound) == INTEGER_CST |
51723711 | 7119 | && tree_int_cst_lt (set_high_bound, set_low_bound)) |
d6a5ac33 RK |
7120 | || (TREE_CODE (index) == INTEGER_CST |
7121 | && TREE_CODE (set_low_bound) == INTEGER_CST | |
7122 | && tree_int_cst_lt (index, set_low_bound)) | |
7123 | || (TREE_CODE (set_high_bound) == INTEGER_CST | |
7124 | && TREE_CODE (index) == INTEGER_CST | |
7125 | && tree_int_cst_lt (set_high_bound, index)))) | |
7308a047 RS |
7126 | return const0_rtx; |
7127 | ||
d6a5ac33 RK |
7128 | if (target == 0) |
7129 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
7308a047 RS |
7130 | |
7131 | /* If we get here, we have to generate the code for both cases | |
7132 | (in range and out of range). */ | |
7133 | ||
7134 | op0 = gen_label_rtx (); | |
7135 | op1 = gen_label_rtx (); | |
7136 | ||
7137 | if (! (GET_CODE (index_val) == CONST_INT | |
7138 | && GET_CODE (lo_r) == CONST_INT)) | |
a06ef755 RK |
7139 | emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, |
7140 | GET_MODE (index_val), iunsignedp, op1); | |
7308a047 RS |
7141 | |
7142 | if (! (GET_CODE (index_val) == CONST_INT | |
7143 | && GET_CODE (hi_r) == CONST_INT)) | |
a06ef755 RK |
7144 | emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, |
7145 | GET_MODE (index_val), iunsignedp, op1); | |
7308a047 RS |
7146 | |
7147 | /* Calculate the element number of bit zero in the first word | |
7148 | of the set. */ | |
7149 | if (GET_CODE (lo_r) == CONST_INT) | |
17938e57 | 7150 | rlow = GEN_INT (INTVAL (lo_r) |
3a94c984 | 7151 | & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); |
7308a047 | 7152 | else |
17938e57 RK |
7153 | rlow = expand_binop (index_mode, and_optab, lo_r, |
7154 | GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)), | |
d6a5ac33 | 7155 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); |
7308a047 | 7156 | |
d6a5ac33 RK |
7157 | diff = expand_binop (index_mode, sub_optab, index_val, rlow, |
7158 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); | |
7308a047 RS |
7159 | |
7160 | quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff, | |
d6a5ac33 | 7161 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7308a047 | 7162 | rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val, |
d6a5ac33 RK |
7163 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7164 | ||
7308a047 | 7165 | addr = memory_address (byte_mode, |
d6a5ac33 RK |
7166 | expand_binop (index_mode, add_optab, diff, |
7167 | setaddr, NULL_RTX, iunsignedp, | |
17938e57 | 7168 | OPTAB_LIB_WIDEN)); |
d6a5ac33 | 7169 | |
3a94c984 | 7170 | /* Extract the bit we want to examine. */ |
7308a047 | 7171 | bit = expand_shift (RSHIFT_EXPR, byte_mode, |
38a448ca | 7172 | gen_rtx_MEM (byte_mode, addr), |
17938e57 RK |
7173 | make_tree (TREE_TYPE (index), rem), |
7174 | NULL_RTX, 1); | |
7175 | result = expand_binop (byte_mode, and_optab, bit, const1_rtx, | |
7176 | GET_MODE (target) == byte_mode ? target : 0, | |
7308a047 | 7177 | 1, OPTAB_LIB_WIDEN); |
17938e57 RK |
7178 | |
7179 | if (result != target) | |
7180 | convert_move (target, result, 1); | |
7308a047 RS |
7181 | |
7182 | /* Output the code to handle the out-of-range case. */ | |
7183 | emit_jump (op0); | |
7184 | emit_label (op1); | |
7185 | emit_move_insn (target, const0_rtx); | |
7186 | emit_label (op0); | |
7187 | return target; | |
7188 | } | |
7189 | ||
bbf6f052 | 7190 | case WITH_CLEANUP_EXPR: |
6ad7895a | 7191 | if (WITH_CLEANUP_EXPR_RTL (exp) == 0) |
bbf6f052 | 7192 | { |
6ad7895a | 7193 | WITH_CLEANUP_EXPR_RTL (exp) |
37a08a29 | 7194 | = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
6ad7895a | 7195 | expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1)); |
e976b8b2 | 7196 | |
bbf6f052 | 7197 | /* That's it for this cleanup. */ |
6ad7895a | 7198 | TREE_OPERAND (exp, 1) = 0; |
bbf6f052 | 7199 | } |
6ad7895a | 7200 | return WITH_CLEANUP_EXPR_RTL (exp); |
bbf6f052 | 7201 | |
5dab5552 MS |
7202 | case CLEANUP_POINT_EXPR: |
7203 | { | |
e976b8b2 MS |
7204 | /* Start a new binding layer that will keep track of all cleanup |
7205 | actions to be performed. */ | |
8e91754e | 7206 | expand_start_bindings (2); |
e976b8b2 | 7207 | |
d93d4205 | 7208 | target_temp_slot_level = temp_slot_level; |
e976b8b2 | 7209 | |
37a08a29 | 7210 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
f283f66b JM |
7211 | /* If we're going to use this value, load it up now. */ |
7212 | if (! ignore) | |
7213 | op0 = force_not_mem (op0); | |
d93d4205 | 7214 | preserve_temp_slots (op0); |
e976b8b2 | 7215 | expand_end_bindings (NULL_TREE, 0, 0); |
5dab5552 MS |
7216 | } |
7217 | return op0; | |
7218 | ||
bbf6f052 RK |
7219 | case CALL_EXPR: |
7220 | /* Check for a built-in function. */ | |
7221 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
d6a5ac33 RK |
7222 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7223 | == FUNCTION_DECL) | |
bbf6f052 | 7224 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
c70eaeaf KG |
7225 | { |
7226 | if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | |
7227 | == BUILT_IN_FRONTEND) | |
7228 | return (*lang_expand_expr) (exp, original_target, tmode, modifier); | |
7229 | else | |
7230 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
7231 | } | |
d6a5ac33 | 7232 | |
8129842c | 7233 | return expand_call (exp, target, ignore); |
bbf6f052 RK |
7234 | |
7235 | case NON_LVALUE_EXPR: | |
7236 | case NOP_EXPR: | |
7237 | case CONVERT_EXPR: | |
7238 | case REFERENCE_EXPR: | |
4a53008b | 7239 | if (TREE_OPERAND (exp, 0) == error_mark_node) |
a592f288 | 7240 | return const0_rtx; |
4a53008b | 7241 | |
bbf6f052 RK |
7242 | if (TREE_CODE (type) == UNION_TYPE) |
7243 | { | |
7244 | tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
14a774a9 | 7245 | |
c3d32120 RK |
7246 | /* If both input and output are BLKmode, this conversion isn't doing |
7247 | anything except possibly changing memory attribute. */ | |
7248 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) | |
7249 | { | |
7250 | rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, | |
7251 | modifier); | |
7252 | ||
7253 | result = copy_rtx (result); | |
7254 | set_mem_attributes (result, exp, 0); | |
7255 | return result; | |
7256 | } | |
14a774a9 | 7257 | |
bbf6f052 | 7258 | if (target == 0) |
1da68f56 | 7259 | target = assign_temp (type, 0, 1, 1); |
d6a5ac33 | 7260 | |
bbf6f052 RK |
7261 | if (GET_CODE (target) == MEM) |
7262 | /* Store data into beginning of memory target. */ | |
7263 | store_expr (TREE_OPERAND (exp, 0), | |
f4ef873c | 7264 | adjust_address (target, TYPE_MODE (valtype), 0), 0); |
1499e0a8 | 7265 | |
bbf6f052 RK |
7266 | else if (GET_CODE (target) == REG) |
7267 | /* Store this field into a union of the proper type. */ | |
14a774a9 RK |
7268 | store_field (target, |
7269 | MIN ((int_size_in_bytes (TREE_TYPE | |
7270 | (TREE_OPERAND (exp, 0))) | |
7271 | * BITS_PER_UNIT), | |
8752c357 | 7272 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
14a774a9 | 7273 | 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), |
a06ef755 | 7274 | VOIDmode, 0, type, 0); |
bbf6f052 RK |
7275 | else |
7276 | abort (); | |
7277 | ||
7278 | /* Return the entire union. */ | |
7279 | return target; | |
7280 | } | |
d6a5ac33 | 7281 | |
7f62854a RK |
7282 | if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
7283 | { | |
7284 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, | |
37a08a29 | 7285 | modifier); |
7f62854a RK |
7286 | |
7287 | /* If the signedness of the conversion differs and OP0 is | |
7288 | a promoted SUBREG, clear that indication since we now | |
7289 | have to do the proper extension. */ | |
7290 | if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp | |
7291 | && GET_CODE (op0) == SUBREG) | |
7292 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
7293 | ||
7294 | return op0; | |
7295 | } | |
7296 | ||
fdf473ae | 7297 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
12342f90 RS |
7298 | if (GET_MODE (op0) == mode) |
7299 | return op0; | |
12342f90 | 7300 | |
d6a5ac33 RK |
7301 | /* If OP0 is a constant, just convert it into the proper mode. */ |
7302 | if (CONSTANT_P (op0)) | |
fdf473ae RH |
7303 | { |
7304 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
7305 | enum machine_mode inner_mode = TYPE_MODE (inner_type); | |
7306 | ||
7307 | if (modifier == EXPAND_INITIALIZER) | |
7308 | return simplify_gen_subreg (mode, op0, inner_mode, | |
7309 | subreg_lowpart_offset (mode, | |
7310 | inner_mode)); | |
7311 | else | |
7312 | return convert_modes (mode, inner_mode, op0, | |
7313 | TREE_UNSIGNED (inner_type)); | |
7314 | } | |
12342f90 | 7315 | |
26fcb35a | 7316 | if (modifier == EXPAND_INITIALIZER) |
38a448ca | 7317 | return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
d6a5ac33 | 7318 | |
bbf6f052 | 7319 | if (target == 0) |
d6a5ac33 RK |
7320 | return |
7321 | convert_to_mode (mode, op0, | |
7322 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
bbf6f052 | 7323 | else |
d6a5ac33 RK |
7324 | convert_move (target, op0, |
7325 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
bbf6f052 RK |
7326 | return target; |
7327 | ||
ed239f5a | 7328 | case VIEW_CONVERT_EXPR: |
37a08a29 | 7329 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
ed239f5a RK |
7330 | |
7331 | /* If the input and output modes are both the same, we are done. | |
7332 | Otherwise, if neither mode is BLKmode and both are within a word, we | |
c11c10d8 RK |
7333 | can use gen_lowpart. If neither is true, make sure the operand is |
7334 | in memory and convert the MEM to the new mode. */ | |
ed239f5a RK |
7335 | if (TYPE_MODE (type) == GET_MODE (op0)) |
7336 | ; | |
7337 | else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode | |
7338 | && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD | |
7339 | && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) | |
7340 | op0 = gen_lowpart (TYPE_MODE (type), op0); | |
c11c10d8 | 7341 | else if (GET_CODE (op0) != MEM) |
ed239f5a | 7342 | { |
c11c10d8 RK |
7343 | /* If the operand is not a MEM, force it into memory. Since we |
7344 | are going to be be changing the mode of the MEM, don't call | |
7345 | force_const_mem for constants because we don't allow pool | |
7346 | constants to change mode. */ | |
ed239f5a | 7347 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
ed239f5a | 7348 | |
c11c10d8 RK |
7349 | if (TREE_ADDRESSABLE (exp)) |
7350 | abort (); | |
ed239f5a | 7351 | |
c11c10d8 RK |
7352 | if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) |
7353 | target | |
7354 | = assign_stack_temp_for_type | |
7355 | (TYPE_MODE (inner_type), | |
7356 | GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); | |
ed239f5a | 7357 | |
c11c10d8 RK |
7358 | emit_move_insn (target, op0); |
7359 | op0 = target; | |
ed239f5a RK |
7360 | } |
7361 | ||
c11c10d8 RK |
7362 | /* At this point, OP0 is in the correct mode. If the output type is such |
7363 | that the operand is known to be aligned, indicate that it is. | |
7364 | Otherwise, we need only be concerned about alignment for non-BLKmode | |
7365 | results. */ | |
ed239f5a RK |
7366 | if (GET_CODE (op0) == MEM) |
7367 | { | |
7368 | op0 = copy_rtx (op0); | |
7369 | ||
ed239f5a RK |
7370 | if (TYPE_ALIGN_OK (type)) |
7371 | set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); | |
7372 | else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT | |
7373 | && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) | |
7374 | { | |
7375 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
65a07688 RK |
7376 | HOST_WIDE_INT temp_size |
7377 | = MAX (int_size_in_bytes (inner_type), | |
7378 | (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); | |
ed239f5a RK |
7379 | rtx new = assign_stack_temp_for_type (TYPE_MODE (type), |
7380 | temp_size, 0, type); | |
c4e59f51 | 7381 | rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); |
ed239f5a | 7382 | |
c11c10d8 RK |
7383 | if (TREE_ADDRESSABLE (exp)) |
7384 | abort (); | |
7385 | ||
ed239f5a RK |
7386 | if (GET_MODE (op0) == BLKmode) |
7387 | emit_block_move (new_with_op0_mode, op0, | |
7388 | GEN_INT (GET_MODE_SIZE (TYPE_MODE (type)))); | |
7389 | else | |
7390 | emit_move_insn (new_with_op0_mode, op0); | |
7391 | ||
7392 | op0 = new; | |
7393 | } | |
7394 | ||
c4e59f51 | 7395 | op0 = adjust_address (op0, TYPE_MODE (type), 0); |
ed239f5a RK |
7396 | } |
7397 | ||
7398 | return op0; | |
7399 | ||
bbf6f052 | 7400 | case PLUS_EXPR: |
0f41302f MS |
7401 | /* We come here from MINUS_EXPR when the second operand is a |
7402 | constant. */ | |
bbf6f052 | 7403 | plus_expr: |
91ce572a | 7404 | this_optab = ! unsignedp && flag_trapv |
a9785c70 | 7405 | && (GET_MODE_CLASS (mode) == MODE_INT) |
91ce572a | 7406 | ? addv_optab : add_optab; |
bbf6f052 RK |
7407 | |
7408 | /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and | |
7409 | something else, make sure we add the register to the constant and | |
7410 | then to the other thing. This case can occur during strength | |
7411 | reduction and doing it this way will produce better code if the | |
7412 | frame pointer or argument pointer is eliminated. | |
7413 | ||
7414 | fold-const.c will ensure that the constant is always in the inner | |
7415 | PLUS_EXPR, so the only case we need to do anything about is if | |
7416 | sp, ap, or fp is our second argument, in which case we must swap | |
7417 | the innermost first argument and our second argument. */ | |
7418 | ||
7419 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR | |
7420 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST | |
7421 | && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR | |
7422 | && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx | |
7423 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx | |
7424 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) | |
7425 | { | |
7426 | tree t = TREE_OPERAND (exp, 1); | |
7427 | ||
7428 | TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
7429 | TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; | |
7430 | } | |
7431 | ||
88f63c77 | 7432 | /* If the result is to be ptr_mode and we are adding an integer to |
bbf6f052 RK |
7433 | something, we might be forming a constant. So try to use |
7434 | plus_constant. If it produces a sum and we can't accept it, | |
7435 | use force_operand. This allows P = &ARR[const] to generate | |
7436 | efficient code on machines where a SYMBOL_REF is not a valid | |
7437 | address. | |
7438 | ||
7439 | If this is an EXPAND_SUM call, always return the sum. */ | |
c980ac49 | 7440 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
91ce572a | 7441 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
bbf6f052 | 7442 | { |
c980ac49 RS |
7443 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
7444 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7445 | && TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
7446 | { | |
cbbc503e JL |
7447 | rtx constant_part; |
7448 | ||
c980ac49 RS |
7449 | op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
7450 | EXPAND_SUM); | |
cbbc503e JL |
7451 | /* Use immed_double_const to ensure that the constant is |
7452 | truncated according to the mode of OP1, then sign extended | |
7453 | to a HOST_WIDE_INT. Using the constant directly can result | |
7454 | in non-canonical RTL in a 64x32 cross compile. */ | |
7455 | constant_part | |
7456 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), | |
7457 | (HOST_WIDE_INT) 0, | |
a5efcd63 | 7458 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
7f401c74 | 7459 | op1 = plus_constant (op1, INTVAL (constant_part)); |
c980ac49 RS |
7460 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7461 | op1 = force_operand (op1, target); | |
7462 | return op1; | |
7463 | } | |
bbf6f052 | 7464 | |
c980ac49 RS |
7465 | else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
7466 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT | |
7467 | && TREE_CONSTANT (TREE_OPERAND (exp, 0))) | |
7468 | { | |
cbbc503e JL |
7469 | rtx constant_part; |
7470 | ||
c980ac49 RS |
7471 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7472 | EXPAND_SUM); | |
7473 | if (! CONSTANT_P (op0)) | |
7474 | { | |
7475 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7476 | VOIDmode, modifier); | |
709f5be1 RS |
7477 | /* Don't go to both_summands if modifier |
7478 | says it's not right to return a PLUS. */ | |
7479 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7480 | goto binop2; | |
c980ac49 RS |
7481 | goto both_summands; |
7482 | } | |
cbbc503e JL |
7483 | /* Use immed_double_const to ensure that the constant is |
7484 | truncated according to the mode of OP1, then sign extended | |
7485 | to a HOST_WIDE_INT. Using the constant directly can result | |
7486 | in non-canonical RTL in a 64x32 cross compile. */ | |
7487 | constant_part | |
7488 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), | |
7489 | (HOST_WIDE_INT) 0, | |
2a94e396 | 7490 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
7f401c74 | 7491 | op0 = plus_constant (op0, INTVAL (constant_part)); |
c980ac49 RS |
7492 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7493 | op0 = force_operand (op0, target); | |
7494 | return op0; | |
7495 | } | |
bbf6f052 RK |
7496 | } |
7497 | ||
7498 | /* No sense saving up arithmetic to be done | |
7499 | if it's all in the wrong mode to form part of an address. | |
7500 | And force_operand won't know whether to sign-extend or | |
7501 | zero-extend. */ | |
7502 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
88f63c77 | 7503 | || mode != ptr_mode) |
c980ac49 | 7504 | goto binop; |
bbf6f052 | 7505 | |
e5e809f4 | 7506 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7507 | subtarget = 0; |
7508 | ||
37a08a29 RK |
7509 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier); |
7510 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier); | |
bbf6f052 | 7511 | |
c980ac49 | 7512 | both_summands: |
bbf6f052 RK |
7513 | /* Make sure any term that's a sum with a constant comes last. */ |
7514 | if (GET_CODE (op0) == PLUS | |
7515 | && CONSTANT_P (XEXP (op0, 1))) | |
7516 | { | |
7517 | temp = op0; | |
7518 | op0 = op1; | |
7519 | op1 = temp; | |
7520 | } | |
7521 | /* If adding to a sum including a constant, | |
7522 | associate it to put the constant outside. */ | |
7523 | if (GET_CODE (op1) == PLUS | |
7524 | && CONSTANT_P (XEXP (op1, 1))) | |
7525 | { | |
7526 | rtx constant_term = const0_rtx; | |
7527 | ||
7528 | temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0); | |
7529 | if (temp != 0) | |
7530 | op0 = temp; | |
6f90e075 JW |
7531 | /* Ensure that MULT comes first if there is one. */ |
7532 | else if (GET_CODE (op0) == MULT) | |
38a448ca | 7533 | op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0)); |
bbf6f052 | 7534 | else |
38a448ca | 7535 | op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0); |
bbf6f052 RK |
7536 | |
7537 | /* Let's also eliminate constants from op0 if possible. */ | |
7538 | op0 = eliminate_constant_term (op0, &constant_term); | |
7539 | ||
7540 | /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so | |
3a94c984 | 7541 | their sum should be a constant. Form it into OP1, since the |
bbf6f052 RK |
7542 | result we want will then be OP0 + OP1. */ |
7543 | ||
7544 | temp = simplify_binary_operation (PLUS, mode, constant_term, | |
7545 | XEXP (op1, 1)); | |
7546 | if (temp != 0) | |
7547 | op1 = temp; | |
7548 | else | |
38a448ca | 7549 | op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1)); |
bbf6f052 RK |
7550 | } |
7551 | ||
7552 | /* Put a constant term last and put a multiplication first. */ | |
7553 | if (CONSTANT_P (op0) || GET_CODE (op1) == MULT) | |
7554 | temp = op1, op1 = op0, op0 = temp; | |
7555 | ||
7556 | temp = simplify_binary_operation (PLUS, mode, op0, op1); | |
38a448ca | 7557 | return temp ? temp : gen_rtx_PLUS (mode, op0, op1); |
bbf6f052 RK |
7558 | |
7559 | case MINUS_EXPR: | |
ea87523e RK |
7560 | /* For initializers, we are allowed to return a MINUS of two |
7561 | symbolic constants. Here we handle all cases when both operands | |
7562 | are constant. */ | |
bbf6f052 RK |
7563 | /* Handle difference of two symbolic constants, |
7564 | for the sake of an initializer. */ | |
7565 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
7566 | && really_constant_p (TREE_OPERAND (exp, 0)) | |
7567 | && really_constant_p (TREE_OPERAND (exp, 1))) | |
7568 | { | |
37a08a29 RK |
7569 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, |
7570 | modifier); | |
7571 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, | |
7572 | modifier); | |
ea87523e | 7573 | |
ea87523e RK |
7574 | /* If the last operand is a CONST_INT, use plus_constant of |
7575 | the negated constant. Else make the MINUS. */ | |
7576 | if (GET_CODE (op1) == CONST_INT) | |
7577 | return plus_constant (op0, - INTVAL (op1)); | |
7578 | else | |
38a448ca | 7579 | return gen_rtx_MINUS (mode, op0, op1); |
bbf6f052 RK |
7580 | } |
7581 | /* Convert A - const to A + (-const). */ | |
7582 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
7583 | { | |
ae431183 RK |
7584 | tree negated = fold (build1 (NEGATE_EXPR, type, |
7585 | TREE_OPERAND (exp, 1))); | |
7586 | ||
ae431183 | 7587 | if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated)) |
6fbfac92 JM |
7588 | /* If we can't negate the constant in TYPE, leave it alone and |
7589 | expand_binop will negate it for us. We used to try to do it | |
7590 | here in the signed version of TYPE, but that doesn't work | |
7591 | on POINTER_TYPEs. */; | |
ae431183 RK |
7592 | else |
7593 | { | |
7594 | exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated); | |
7595 | goto plus_expr; | |
7596 | } | |
bbf6f052 | 7597 | } |
91ce572a CC |
7598 | this_optab = ! unsignedp && flag_trapv |
7599 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7600 | ? subv_optab : sub_optab; | |
bbf6f052 RK |
7601 | goto binop; |
7602 | ||
7603 | case MULT_EXPR: | |
bbf6f052 RK |
7604 | /* If first operand is constant, swap them. |
7605 | Thus the following special case checks need only | |
7606 | check the second operand. */ | |
7607 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
7608 | { | |
b3694847 | 7609 | tree t1 = TREE_OPERAND (exp, 0); |
bbf6f052 RK |
7610 | TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); |
7611 | TREE_OPERAND (exp, 1) = t1; | |
7612 | } | |
7613 | ||
7614 | /* Attempt to return something suitable for generating an | |
7615 | indexed address, for machines that support that. */ | |
7616 | ||
88f63c77 | 7617 | if (modifier == EXPAND_SUM && mode == ptr_mode |
3b40e71b | 7618 | && host_integerp (TREE_OPERAND (exp, 1), 0)) |
bbf6f052 | 7619 | { |
921b3427 RK |
7620 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7621 | EXPAND_SUM); | |
bbf6f052 | 7622 | |
3b40e71b RH |
7623 | /* If we knew for certain that this is arithmetic for an array |
7624 | reference, and we knew the bounds of the array, then we could | |
7625 | apply the distributive law across (PLUS X C) for constant C. | |
7626 | Without such knowledge, we risk overflowing the computation | |
7627 | when both X and C are large, but X+C isn't. */ | |
7628 | /* ??? Could perhaps special-case EXP being unsigned and C being | |
7629 | positive. In that case we are certain that X+C is no smaller | |
7630 | than X and so the transformed expression will overflow iff the | |
7631 | original would have. */ | |
bbf6f052 RK |
7632 | |
7633 | if (GET_CODE (op0) != REG) | |
906c4e36 | 7634 | op0 = force_operand (op0, NULL_RTX); |
bbf6f052 RK |
7635 | if (GET_CODE (op0) != REG) |
7636 | op0 = copy_to_mode_reg (mode, op0); | |
7637 | ||
c5c76735 JL |
7638 | return |
7639 | gen_rtx_MULT (mode, op0, | |
3b40e71b | 7640 | GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0))); |
bbf6f052 RK |
7641 | } |
7642 | ||
e5e809f4 | 7643 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7644 | subtarget = 0; |
7645 | ||
7646 | /* Check for multiplying things that have been extended | |
7647 | from a narrower type. If this machine supports multiplying | |
7648 | in that narrower type with a result in the desired type, | |
7649 | do it that way, and avoid the explicit type-conversion. */ | |
7650 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR | |
7651 | && TREE_CODE (type) == INTEGER_TYPE | |
7652 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7653 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
7654 | && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
7655 | && int_fits_type_p (TREE_OPERAND (exp, 1), | |
7656 | TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7657 | /* Don't use a widening multiply if a shift will do. */ | |
7658 | && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
906c4e36 | 7659 | > HOST_BITS_PER_WIDE_INT) |
bbf6f052 RK |
7660 | || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
7661 | || | |
7662 | (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
7663 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7664 | == | |
7665 | TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))) | |
7666 | /* If both operands are extended, they must either both | |
7667 | be zero-extended or both be sign-extended. */ | |
7668 | && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7669 | == | |
7670 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))))) | |
7671 | { | |
7672 | enum machine_mode innermode | |
7673 | = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))); | |
b10af0c8 TG |
7674 | optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
7675 | ? smul_widen_optab : umul_widen_optab); | |
bbf6f052 RK |
7676 | this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
7677 | ? umul_widen_optab : smul_widen_optab); | |
b10af0c8 | 7678 | if (mode == GET_MODE_WIDER_MODE (innermode)) |
bbf6f052 | 7679 | { |
b10af0c8 TG |
7680 | if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
7681 | { | |
7682 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
7683 | NULL_RTX, VOIDmode, 0); | |
7684 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
7685 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7686 | VOIDmode, 0); | |
7687 | else | |
7688 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7689 | NULL_RTX, VOIDmode, 0); | |
7690 | goto binop2; | |
7691 | } | |
7692 | else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
7693 | && innermode == word_mode) | |
7694 | { | |
7695 | rtx htem; | |
7696 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
7697 | NULL_RTX, VOIDmode, 0); | |
7698 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
8c118062 GK |
7699 | op1 = convert_modes (innermode, mode, |
7700 | expand_expr (TREE_OPERAND (exp, 1), | |
7701 | NULL_RTX, VOIDmode, 0), | |
7702 | unsignedp); | |
b10af0c8 TG |
7703 | else |
7704 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7705 | NULL_RTX, VOIDmode, 0); | |
7706 | temp = expand_binop (mode, other_optab, op0, op1, target, | |
7707 | unsignedp, OPTAB_LIB_WIDEN); | |
7708 | htem = expand_mult_highpart_adjust (innermode, | |
7709 | gen_highpart (innermode, temp), | |
7710 | op0, op1, | |
7711 | gen_highpart (innermode, temp), | |
7712 | unsignedp); | |
7713 | emit_move_insn (gen_highpart (innermode, temp), htem); | |
7714 | return temp; | |
7715 | } | |
bbf6f052 RK |
7716 | } |
7717 | } | |
7718 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7719 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7720 | return expand_mult (mode, op0, op1, target, unsignedp); |
7721 | ||
7722 | case TRUNC_DIV_EXPR: | |
7723 | case FLOOR_DIV_EXPR: | |
7724 | case CEIL_DIV_EXPR: | |
7725 | case ROUND_DIV_EXPR: | |
7726 | case EXACT_DIV_EXPR: | |
e5e809f4 | 7727 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7728 | subtarget = 0; |
7729 | /* Possible optimization: compute the dividend with EXPAND_SUM | |
7730 | then if the divisor is constant can optimize the case | |
7731 | where some terms of the dividend have coeffs divisible by it. */ | |
7732 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7733 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7734 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
7735 | ||
7736 | case RDIV_EXPR: | |
b7e9703c JH |
7737 | /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving |
7738 | expensive divide. If not, combine will rebuild the original | |
7739 | computation. */ | |
7740 | if (flag_unsafe_math_optimizations && optimize && !optimize_size | |
7741 | && !real_onep (TREE_OPERAND (exp, 0))) | |
7742 | return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0), | |
7743 | build (RDIV_EXPR, type, | |
7744 | build_real (type, dconst1), | |
7745 | TREE_OPERAND (exp, 1))), | |
7746 | target, tmode, unsignedp); | |
ef89d648 | 7747 | this_optab = sdiv_optab; |
bbf6f052 RK |
7748 | goto binop; |
7749 | ||
7750 | case TRUNC_MOD_EXPR: | |
7751 | case FLOOR_MOD_EXPR: | |
7752 | case CEIL_MOD_EXPR: | |
7753 | case ROUND_MOD_EXPR: | |
e5e809f4 | 7754 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7755 | subtarget = 0; |
7756 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7757 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7758 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
7759 | ||
7760 | case FIX_ROUND_EXPR: | |
7761 | case FIX_FLOOR_EXPR: | |
7762 | case FIX_CEIL_EXPR: | |
7763 | abort (); /* Not used for C. */ | |
7764 | ||
7765 | case FIX_TRUNC_EXPR: | |
906c4e36 | 7766 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7767 | if (target == 0) |
7768 | target = gen_reg_rtx (mode); | |
7769 | expand_fix (target, op0, unsignedp); | |
7770 | return target; | |
7771 | ||
7772 | case FLOAT_EXPR: | |
906c4e36 | 7773 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7774 | if (target == 0) |
7775 | target = gen_reg_rtx (mode); | |
7776 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
7777 | So give it the correct mode. With -O, cse will optimize this. */ | |
7778 | if (GET_MODE (op0) == VOIDmode) | |
7779 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
7780 | op0); | |
7781 | expand_float (target, op0, | |
7782 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
7783 | return target; | |
7784 | ||
7785 | case NEGATE_EXPR: | |
5b22bee8 | 7786 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
91ce572a CC |
7787 | temp = expand_unop (mode, |
7788 | ! unsignedp && flag_trapv | |
7789 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7790 | ? negv_optab : neg_optab, op0, target, 0); | |
bbf6f052 RK |
7791 | if (temp == 0) |
7792 | abort (); | |
7793 | return temp; | |
7794 | ||
7795 | case ABS_EXPR: | |
7796 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7797 | ||
2d7050fd | 7798 | /* Handle complex values specially. */ |
d6a5ac33 RK |
7799 | if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT |
7800 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) | |
7801 | return expand_complex_abs (mode, op0, target, unsignedp); | |
2d7050fd | 7802 | |
bbf6f052 RK |
7803 | /* Unsigned abs is simply the operand. Testing here means we don't |
7804 | risk generating incorrect code below. */ | |
7805 | if (TREE_UNSIGNED (type)) | |
7806 | return op0; | |
7807 | ||
91ce572a | 7808 | return expand_abs (mode, op0, target, unsignedp, |
e5e809f4 | 7809 | safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
bbf6f052 RK |
7810 | |
7811 | case MAX_EXPR: | |
7812 | case MIN_EXPR: | |
7813 | target = original_target; | |
e5e809f4 | 7814 | if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1) |
fc155707 | 7815 | || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
d6a5ac33 | 7816 | || GET_MODE (target) != mode |
bbf6f052 RK |
7817 | || (GET_CODE (target) == REG |
7818 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
7819 | target = gen_reg_rtx (mode); | |
906c4e36 | 7820 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7821 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); |
7822 | ||
7823 | /* First try to do it with a special MIN or MAX instruction. | |
7824 | If that does not win, use a conditional jump to select the proper | |
7825 | value. */ | |
7826 | this_optab = (TREE_UNSIGNED (type) | |
7827 | ? (code == MIN_EXPR ? umin_optab : umax_optab) | |
7828 | : (code == MIN_EXPR ? smin_optab : smax_optab)); | |
7829 | ||
7830 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
7831 | OPTAB_WIDEN); | |
7832 | if (temp != 0) | |
7833 | return temp; | |
7834 | ||
fa2981d8 JW |
7835 | /* At this point, a MEM target is no longer useful; we will get better |
7836 | code without it. */ | |
3a94c984 | 7837 | |
fa2981d8 JW |
7838 | if (GET_CODE (target) == MEM) |
7839 | target = gen_reg_rtx (mode); | |
7840 | ||
ee456b1c RK |
7841 | if (target != op0) |
7842 | emit_move_insn (target, op0); | |
d6a5ac33 | 7843 | |
bbf6f052 | 7844 | op0 = gen_label_rtx (); |
d6a5ac33 | 7845 | |
f81497d9 RS |
7846 | /* If this mode is an integer too wide to compare properly, |
7847 | compare word by word. Rely on cse to optimize constant cases. */ | |
1eb8759b RH |
7848 | if (GET_MODE_CLASS (mode) == MODE_INT |
7849 | && ! can_compare_p (GE, mode, ccp_jump)) | |
bbf6f052 | 7850 | { |
f81497d9 | 7851 | if (code == MAX_EXPR) |
d6a5ac33 RK |
7852 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
7853 | target, op1, NULL_RTX, op0); | |
bbf6f052 | 7854 | else |
d6a5ac33 RK |
7855 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
7856 | op1, target, NULL_RTX, op0); | |
bbf6f052 | 7857 | } |
f81497d9 RS |
7858 | else |
7859 | { | |
b30f05db BS |
7860 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))); |
7861 | do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, | |
a06ef755 | 7862 | unsignedp, mode, NULL_RTX, NULL_RTX, |
b30f05db | 7863 | op0); |
f81497d9 | 7864 | } |
b30f05db | 7865 | emit_move_insn (target, op1); |
bbf6f052 RK |
7866 | emit_label (op0); |
7867 | return target; | |
7868 | ||
bbf6f052 RK |
7869 | case BIT_NOT_EXPR: |
7870 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7871 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); | |
7872 | if (temp == 0) | |
7873 | abort (); | |
7874 | return temp; | |
7875 | ||
7876 | case FFS_EXPR: | |
7877 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7878 | temp = expand_unop (mode, ffs_optab, op0, target, 1); | |
7879 | if (temp == 0) | |
7880 | abort (); | |
7881 | return temp; | |
7882 | ||
d6a5ac33 RK |
7883 | /* ??? Can optimize bitwise operations with one arg constant. |
7884 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
7885 | and (a bitwise1 b) bitwise2 b (etc) | |
7886 | but that is probably not worth while. */ | |
7887 | ||
7888 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two | |
7889 | boolean values when we want in all cases to compute both of them. In | |
7890 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
7891 | as actual zero-or-1 values and then bitwise anding. In cases where | |
7892 | there cannot be any side effects, better code would be made by | |
7893 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
7894 | how to recognize those cases. */ | |
7895 | ||
bbf6f052 RK |
7896 | case TRUTH_AND_EXPR: |
7897 | case BIT_AND_EXPR: | |
7898 | this_optab = and_optab; | |
7899 | goto binop; | |
7900 | ||
bbf6f052 RK |
7901 | case TRUTH_OR_EXPR: |
7902 | case BIT_IOR_EXPR: | |
7903 | this_optab = ior_optab; | |
7904 | goto binop; | |
7905 | ||
874726a8 | 7906 | case TRUTH_XOR_EXPR: |
bbf6f052 RK |
7907 | case BIT_XOR_EXPR: |
7908 | this_optab = xor_optab; | |
7909 | goto binop; | |
7910 | ||
7911 | case LSHIFT_EXPR: | |
7912 | case RSHIFT_EXPR: | |
7913 | case LROTATE_EXPR: | |
7914 | case RROTATE_EXPR: | |
e5e809f4 | 7915 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7916 | subtarget = 0; |
7917 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7918 | return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, | |
7919 | unsignedp); | |
7920 | ||
d6a5ac33 RK |
7921 | /* Could determine the answer when only additive constants differ. Also, |
7922 | the addition of one can be handled by changing the condition. */ | |
bbf6f052 RK |
7923 | case LT_EXPR: |
7924 | case LE_EXPR: | |
7925 | case GT_EXPR: | |
7926 | case GE_EXPR: | |
7927 | case EQ_EXPR: | |
7928 | case NE_EXPR: | |
1eb8759b RH |
7929 | case UNORDERED_EXPR: |
7930 | case ORDERED_EXPR: | |
7931 | case UNLT_EXPR: | |
7932 | case UNLE_EXPR: | |
7933 | case UNGT_EXPR: | |
7934 | case UNGE_EXPR: | |
7935 | case UNEQ_EXPR: | |
bbf6f052 RK |
7936 | temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0); |
7937 | if (temp != 0) | |
7938 | return temp; | |
d6a5ac33 | 7939 | |
0f41302f | 7940 | /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
bbf6f052 RK |
7941 | if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
7942 | && original_target | |
7943 | && GET_CODE (original_target) == REG | |
7944 | && (GET_MODE (original_target) | |
7945 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
7946 | { | |
d6a5ac33 RK |
7947 | temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
7948 | VOIDmode, 0); | |
7949 | ||
c0a3eeac UW |
7950 | /* If temp is constant, we can just compute the result. */ |
7951 | if (GET_CODE (temp) == CONST_INT) | |
7952 | { | |
7953 | if (INTVAL (temp) != 0) | |
7954 | emit_move_insn (target, const1_rtx); | |
7955 | else | |
7956 | emit_move_insn (target, const0_rtx); | |
7957 | ||
7958 | return target; | |
7959 | } | |
7960 | ||
bbf6f052 | 7961 | if (temp != original_target) |
c0a3eeac UW |
7962 | { |
7963 | enum machine_mode mode1 = GET_MODE (temp); | |
7964 | if (mode1 == VOIDmode) | |
7965 | mode1 = tmode != VOIDmode ? tmode : mode; | |
7966 | ||
7967 | temp = copy_to_mode_reg (mode1, temp); | |
7968 | } | |
d6a5ac33 | 7969 | |
bbf6f052 | 7970 | op1 = gen_label_rtx (); |
c5d5d461 | 7971 | emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
a06ef755 | 7972 | GET_MODE (temp), unsignedp, op1); |
bbf6f052 RK |
7973 | emit_move_insn (temp, const1_rtx); |
7974 | emit_label (op1); | |
7975 | return temp; | |
7976 | } | |
d6a5ac33 | 7977 | |
bbf6f052 RK |
7978 | /* If no set-flag instruction, must generate a conditional |
7979 | store into a temporary variable. Drop through | |
7980 | and handle this like && and ||. */ | |
7981 | ||
7982 | case TRUTH_ANDIF_EXPR: | |
7983 | case TRUTH_ORIF_EXPR: | |
e44842fe | 7984 | if (! ignore |
e5e809f4 | 7985 | && (target == 0 || ! safe_from_p (target, exp, 1) |
e44842fe RK |
7986 | /* Make sure we don't have a hard reg (such as function's return |
7987 | value) live across basic blocks, if not optimizing. */ | |
7988 | || (!optimize && GET_CODE (target) == REG | |
7989 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
bbf6f052 | 7990 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
e44842fe RK |
7991 | |
7992 | if (target) | |
7993 | emit_clr_insn (target); | |
7994 | ||
bbf6f052 RK |
7995 | op1 = gen_label_rtx (); |
7996 | jumpifnot (exp, op1); | |
e44842fe RK |
7997 | |
7998 | if (target) | |
7999 | emit_0_to_1_insn (target); | |
8000 | ||
bbf6f052 | 8001 | emit_label (op1); |
e44842fe | 8002 | return ignore ? const0_rtx : target; |
bbf6f052 RK |
8003 | |
8004 | case TRUTH_NOT_EXPR: | |
8005 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); | |
8006 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
8007 | only with operands that are always zero or one. */ | |
906c4e36 | 8008 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
bbf6f052 RK |
8009 | target, 1, OPTAB_LIB_WIDEN); |
8010 | if (temp == 0) | |
8011 | abort (); | |
8012 | return temp; | |
8013 | ||
8014 | case COMPOUND_EXPR: | |
8015 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
8016 | emit_queue (); | |
8017 | return expand_expr (TREE_OPERAND (exp, 1), | |
8018 | (ignore ? const0_rtx : target), | |
8019 | VOIDmode, 0); | |
8020 | ||
8021 | case COND_EXPR: | |
ac01eace RK |
8022 | /* If we would have a "singleton" (see below) were it not for a |
8023 | conversion in each arm, bring that conversion back out. */ | |
8024 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8025 | && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR | |
8026 | && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) | |
8027 | == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0)))) | |
8028 | { | |
d6edb99e ZW |
8029 | tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); |
8030 | tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); | |
8031 | ||
8032 | if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2' | |
8033 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8034 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2' | |
8035 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)) | |
8036 | || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1' | |
8037 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8038 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1' | |
8039 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))) | |
ac01eace | 8040 | return expand_expr (build1 (NOP_EXPR, type, |
d6edb99e | 8041 | build (COND_EXPR, TREE_TYPE (iftrue), |
ac01eace | 8042 | TREE_OPERAND (exp, 0), |
d6edb99e | 8043 | iftrue, iffalse)), |
ac01eace RK |
8044 | target, tmode, modifier); |
8045 | } | |
8046 | ||
bbf6f052 RK |
8047 | { |
8048 | /* Note that COND_EXPRs whose type is a structure or union | |
8049 | are required to be constructed to contain assignments of | |
8050 | a temporary variable, so that we can evaluate them here | |
8051 | for side effect only. If type is void, we must do likewise. */ | |
8052 | ||
8053 | /* If an arm of the branch requires a cleanup, | |
8054 | only that cleanup is performed. */ | |
8055 | ||
8056 | tree singleton = 0; | |
8057 | tree binary_op = 0, unary_op = 0; | |
bbf6f052 RK |
8058 | |
8059 | /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and | |
8060 | convert it to our mode, if necessary. */ | |
8061 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
8062 | && integer_zerop (TREE_OPERAND (exp, 2)) | |
8063 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') | |
8064 | { | |
dd27116b RK |
8065 | if (ignore) |
8066 | { | |
8067 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, | |
37a08a29 | 8068 | modifier); |
dd27116b RK |
8069 | return const0_rtx; |
8070 | } | |
8071 | ||
37a08a29 | 8072 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier); |
bbf6f052 RK |
8073 | if (GET_MODE (op0) == mode) |
8074 | return op0; | |
d6a5ac33 | 8075 | |
bbf6f052 RK |
8076 | if (target == 0) |
8077 | target = gen_reg_rtx (mode); | |
8078 | convert_move (target, op0, unsignedp); | |
8079 | return target; | |
8080 | } | |
8081 | ||
ac01eace RK |
8082 | /* Check for X ? A + B : A. If we have this, we can copy A to the |
8083 | output and conditionally add B. Similarly for unary operations. | |
8084 | Don't do this if X has side-effects because those side effects | |
8085 | might affect A or B and the "?" operation is a sequence point in | |
8086 | ANSI. (operand_equal_p tests for side effects.) */ | |
bbf6f052 RK |
8087 | |
8088 | if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2' | |
8089 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8090 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8091 | singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1); | |
8092 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2' | |
8093 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8094 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8095 | singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2); | |
8096 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1' | |
8097 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8098 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8099 | singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1); | |
8100 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1' | |
8101 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8102 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8103 | singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2); | |
8104 | ||
01c8a7c8 RK |
8105 | /* If we are not to produce a result, we have no target. Otherwise, |
8106 | if a target was specified use it; it will not be used as an | |
3a94c984 | 8107 | intermediate target unless it is safe. If no target, use a |
01c8a7c8 RK |
8108 | temporary. */ |
8109 | ||
8110 | if (ignore) | |
8111 | temp = 0; | |
8112 | else if (original_target | |
e5e809f4 | 8113 | && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) |
01c8a7c8 RK |
8114 | || (singleton && GET_CODE (original_target) == REG |
8115 | && REGNO (original_target) >= FIRST_PSEUDO_REGISTER | |
8116 | && original_target == var_rtx (singleton))) | |
8117 | && GET_MODE (original_target) == mode | |
7c00d1fe RK |
8118 | #ifdef HAVE_conditional_move |
8119 | && (! can_conditionally_move_p (mode) | |
8120 | || GET_CODE (original_target) == REG | |
8121 | || TREE_ADDRESSABLE (type)) | |
8122 | #endif | |
8125d7e9 BS |
8123 | && (GET_CODE (original_target) != MEM |
8124 | || TREE_ADDRESSABLE (type))) | |
01c8a7c8 RK |
8125 | temp = original_target; |
8126 | else if (TREE_ADDRESSABLE (type)) | |
8127 | abort (); | |
8128 | else | |
8129 | temp = assign_temp (type, 0, 0, 1); | |
8130 | ||
ac01eace RK |
8131 | /* If we had X ? A + C : A, with C a constant power of 2, and we can |
8132 | do the test of X as a store-flag operation, do this as | |
8133 | A + ((X != 0) << log C). Similarly for other simple binary | |
8134 | operators. Only do for C == 1 if BRANCH_COST is low. */ | |
dd27116b | 8135 | if (temp && singleton && binary_op |
bbf6f052 RK |
8136 | && (TREE_CODE (binary_op) == PLUS_EXPR |
8137 | || TREE_CODE (binary_op) == MINUS_EXPR | |
8138 | || TREE_CODE (binary_op) == BIT_IOR_EXPR | |
9fbd9f58 | 8139 | || TREE_CODE (binary_op) == BIT_XOR_EXPR) |
ac01eace RK |
8140 | && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1)) |
8141 | : integer_onep (TREE_OPERAND (binary_op, 1))) | |
bbf6f052 RK |
8142 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') |
8143 | { | |
8144 | rtx result; | |
91ce572a CC |
8145 | optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR |
8146 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8147 | ? addv_optab : add_optab) | |
8148 | : TREE_CODE (binary_op) == MINUS_EXPR | |
8149 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8150 | ? subv_optab : sub_optab) | |
8151 | : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab | |
8152 | : xor_optab); | |
bbf6f052 RK |
8153 | |
8154 | /* If we had X ? A : A + 1, do this as A + (X == 0). | |
8155 | ||
8156 | We have to invert the truth value here and then put it | |
8157 | back later if do_store_flag fails. We cannot simply copy | |
8158 | TREE_OPERAND (exp, 0) to another variable and modify that | |
8159 | because invert_truthvalue can modify the tree pointed to | |
8160 | by its argument. */ | |
8161 | if (singleton == TREE_OPERAND (exp, 1)) | |
8162 | TREE_OPERAND (exp, 0) | |
8163 | = invert_truthvalue (TREE_OPERAND (exp, 0)); | |
8164 | ||
8165 | result = do_store_flag (TREE_OPERAND (exp, 0), | |
e5e809f4 | 8166 | (safe_from_p (temp, singleton, 1) |
906c4e36 | 8167 | ? temp : NULL_RTX), |
bbf6f052 RK |
8168 | mode, BRANCH_COST <= 1); |
8169 | ||
ac01eace RK |
8170 | if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1))) |
8171 | result = expand_shift (LSHIFT_EXPR, mode, result, | |
8172 | build_int_2 (tree_log2 | |
8173 | (TREE_OPERAND | |
8174 | (binary_op, 1)), | |
8175 | 0), | |
e5e809f4 | 8176 | (safe_from_p (temp, singleton, 1) |
ac01eace RK |
8177 | ? temp : NULL_RTX), 0); |
8178 | ||
bbf6f052 RK |
8179 | if (result) |
8180 | { | |
906c4e36 | 8181 | op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8182 | return expand_binop (mode, boptab, op1, result, temp, |
8183 | unsignedp, OPTAB_LIB_WIDEN); | |
8184 | } | |
8185 | else if (singleton == TREE_OPERAND (exp, 1)) | |
8186 | TREE_OPERAND (exp, 0) | |
8187 | = invert_truthvalue (TREE_OPERAND (exp, 0)); | |
8188 | } | |
3a94c984 | 8189 | |
dabf8373 | 8190 | do_pending_stack_adjust (); |
bbf6f052 RK |
8191 | NO_DEFER_POP; |
8192 | op0 = gen_label_rtx (); | |
8193 | ||
8194 | if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))) | |
8195 | { | |
8196 | if (temp != 0) | |
8197 | { | |
8198 | /* If the target conflicts with the other operand of the | |
8199 | binary op, we can't use it. Also, we can't use the target | |
8200 | if it is a hard register, because evaluating the condition | |
8201 | might clobber it. */ | |
8202 | if ((binary_op | |
e5e809f4 | 8203 | && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1)) |
bbf6f052 RK |
8204 | || (GET_CODE (temp) == REG |
8205 | && REGNO (temp) < FIRST_PSEUDO_REGISTER)) | |
8206 | temp = gen_reg_rtx (mode); | |
8207 | store_expr (singleton, temp, 0); | |
8208 | } | |
8209 | else | |
906c4e36 | 8210 | expand_expr (singleton, |
2937cf87 | 8211 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8212 | if (singleton == TREE_OPERAND (exp, 1)) |
8213 | jumpif (TREE_OPERAND (exp, 0), op0); | |
8214 | else | |
8215 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
8216 | ||
956d6950 | 8217 | start_cleanup_deferral (); |
bbf6f052 RK |
8218 | if (binary_op && temp == 0) |
8219 | /* Just touch the other operand. */ | |
8220 | expand_expr (TREE_OPERAND (binary_op, 1), | |
906c4e36 | 8221 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8222 | else if (binary_op) |
8223 | store_expr (build (TREE_CODE (binary_op), type, | |
8224 | make_tree (type, temp), | |
8225 | TREE_OPERAND (binary_op, 1)), | |
8226 | temp, 0); | |
8227 | else | |
8228 | store_expr (build1 (TREE_CODE (unary_op), type, | |
8229 | make_tree (type, temp)), | |
8230 | temp, 0); | |
8231 | op1 = op0; | |
bbf6f052 | 8232 | } |
bbf6f052 RK |
8233 | /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any |
8234 | comparison operator. If we have one of these cases, set the | |
8235 | output to A, branch on A (cse will merge these two references), | |
8236 | then set the output to FOO. */ | |
8237 | else if (temp | |
8238 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8239 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8240 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8241 | TREE_OPERAND (exp, 1), 0) | |
e9a25f70 JL |
8242 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8243 | || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR) | |
e5e809f4 | 8244 | && safe_from_p (temp, TREE_OPERAND (exp, 2), 1)) |
bbf6f052 | 8245 | { |
3a94c984 KH |
8246 | if (GET_CODE (temp) == REG |
8247 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
bbf6f052 RK |
8248 | temp = gen_reg_rtx (mode); |
8249 | store_expr (TREE_OPERAND (exp, 1), temp, 0); | |
8250 | jumpif (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8251 | |
956d6950 | 8252 | start_cleanup_deferral (); |
bbf6f052 RK |
8253 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8254 | op1 = op0; | |
8255 | } | |
8256 | else if (temp | |
8257 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8258 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8259 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8260 | TREE_OPERAND (exp, 2), 0) | |
e9a25f70 JL |
8261 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8262 | || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR) | |
e5e809f4 | 8263 | && safe_from_p (temp, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 | 8264 | { |
3a94c984 KH |
8265 | if (GET_CODE (temp) == REG |
8266 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
bbf6f052 RK |
8267 | temp = gen_reg_rtx (mode); |
8268 | store_expr (TREE_OPERAND (exp, 2), temp, 0); | |
8269 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8270 | |
956d6950 | 8271 | start_cleanup_deferral (); |
bbf6f052 RK |
8272 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8273 | op1 = op0; | |
8274 | } | |
8275 | else | |
8276 | { | |
8277 | op1 = gen_label_rtx (); | |
8278 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8279 | |
956d6950 | 8280 | start_cleanup_deferral (); |
3a94c984 | 8281 | |
2ac84cfe | 8282 | /* One branch of the cond can be void, if it never returns. For |
3a94c984 | 8283 | example A ? throw : E */ |
2ac84cfe | 8284 | if (temp != 0 |
3a94c984 | 8285 | && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) |
bbf6f052 RK |
8286 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8287 | else | |
906c4e36 RK |
8288 | expand_expr (TREE_OPERAND (exp, 1), |
8289 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
956d6950 | 8290 | end_cleanup_deferral (); |
bbf6f052 RK |
8291 | emit_queue (); |
8292 | emit_jump_insn (gen_jump (op1)); | |
8293 | emit_barrier (); | |
8294 | emit_label (op0); | |
956d6950 | 8295 | start_cleanup_deferral (); |
2ac84cfe | 8296 | if (temp != 0 |
3a94c984 | 8297 | && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) |
bbf6f052 RK |
8298 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8299 | else | |
906c4e36 RK |
8300 | expand_expr (TREE_OPERAND (exp, 2), |
8301 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
bbf6f052 RK |
8302 | } |
8303 | ||
956d6950 | 8304 | end_cleanup_deferral (); |
bbf6f052 RK |
8305 | |
8306 | emit_queue (); | |
8307 | emit_label (op1); | |
8308 | OK_DEFER_POP; | |
5dab5552 | 8309 | |
bbf6f052 RK |
8310 | return temp; |
8311 | } | |
8312 | ||
8313 | case TARGET_EXPR: | |
8314 | { | |
8315 | /* Something needs to be initialized, but we didn't know | |
8316 | where that thing was when building the tree. For example, | |
8317 | it could be the return value of a function, or a parameter | |
8318 | to a function which lays down in the stack, or a temporary | |
8319 | variable which must be passed by reference. | |
8320 | ||
8321 | We guarantee that the expression will either be constructed | |
8322 | or copied into our original target. */ | |
8323 | ||
8324 | tree slot = TREE_OPERAND (exp, 0); | |
2a888d4c | 8325 | tree cleanups = NULL_TREE; |
5c062816 | 8326 | tree exp1; |
bbf6f052 RK |
8327 | |
8328 | if (TREE_CODE (slot) != VAR_DECL) | |
8329 | abort (); | |
8330 | ||
9c51f375 RK |
8331 | if (! ignore) |
8332 | target = original_target; | |
8333 | ||
6fbfac92 JM |
8334 | /* Set this here so that if we get a target that refers to a |
8335 | register variable that's already been used, put_reg_into_stack | |
3a94c984 | 8336 | knows that it should fix up those uses. */ |
6fbfac92 JM |
8337 | TREE_USED (slot) = 1; |
8338 | ||
bbf6f052 RK |
8339 | if (target == 0) |
8340 | { | |
19e7881c | 8341 | if (DECL_RTL_SET_P (slot)) |
ac993f4f MS |
8342 | { |
8343 | target = DECL_RTL (slot); | |
5c062816 | 8344 | /* If we have already expanded the slot, so don't do |
ac993f4f | 8345 | it again. (mrs) */ |
5c062816 MS |
8346 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8347 | return target; | |
ac993f4f | 8348 | } |
bbf6f052 RK |
8349 | else |
8350 | { | |
e9a25f70 | 8351 | target = assign_temp (type, 2, 0, 1); |
bbf6f052 RK |
8352 | /* All temp slots at this level must not conflict. */ |
8353 | preserve_temp_slots (target); | |
19e7881c | 8354 | SET_DECL_RTL (slot, target); |
e9a25f70 | 8355 | if (TREE_ADDRESSABLE (slot)) |
4361b41d | 8356 | put_var_into_stack (slot); |
bbf6f052 | 8357 | |
e287fd6e RK |
8358 | /* Since SLOT is not known to the called function |
8359 | to belong to its stack frame, we must build an explicit | |
8360 | cleanup. This case occurs when we must build up a reference | |
8361 | to pass the reference as an argument. In this case, | |
8362 | it is very likely that such a reference need not be | |
8363 | built here. */ | |
8364 | ||
8365 | if (TREE_OPERAND (exp, 2) == 0) | |
8366 | TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot); | |
2a888d4c | 8367 | cleanups = TREE_OPERAND (exp, 2); |
e287fd6e | 8368 | } |
bbf6f052 RK |
8369 | } |
8370 | else | |
8371 | { | |
8372 | /* This case does occur, when expanding a parameter which | |
8373 | needs to be constructed on the stack. The target | |
8374 | is the actual stack address that we want to initialize. | |
8375 | The function we call will perform the cleanup in this case. */ | |
8376 | ||
8c042b47 RS |
8377 | /* If we have already assigned it space, use that space, |
8378 | not target that we were passed in, as our target | |
8379 | parameter is only a hint. */ | |
19e7881c | 8380 | if (DECL_RTL_SET_P (slot)) |
3a94c984 KH |
8381 | { |
8382 | target = DECL_RTL (slot); | |
8383 | /* If we have already expanded the slot, so don't do | |
8c042b47 | 8384 | it again. (mrs) */ |
3a94c984 KH |
8385 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8386 | return target; | |
8c042b47 | 8387 | } |
21002281 JW |
8388 | else |
8389 | { | |
19e7881c | 8390 | SET_DECL_RTL (slot, target); |
21002281 JW |
8391 | /* If we must have an addressable slot, then make sure that |
8392 | the RTL that we just stored in slot is OK. */ | |
8393 | if (TREE_ADDRESSABLE (slot)) | |
4361b41d | 8394 | put_var_into_stack (slot); |
21002281 | 8395 | } |
bbf6f052 RK |
8396 | } |
8397 | ||
4847c938 | 8398 | exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1); |
5c062816 MS |
8399 | /* Mark it as expanded. */ |
8400 | TREE_OPERAND (exp, 1) = NULL_TREE; | |
8401 | ||
41531e5b | 8402 | store_expr (exp1, target, 0); |
61d6b1cc | 8403 | |
e976b8b2 | 8404 | expand_decl_cleanup (NULL_TREE, cleanups); |
3a94c984 | 8405 | |
41531e5b | 8406 | return target; |
bbf6f052 RK |
8407 | } |
8408 | ||
8409 | case INIT_EXPR: | |
8410 | { | |
8411 | tree lhs = TREE_OPERAND (exp, 0); | |
8412 | tree rhs = TREE_OPERAND (exp, 1); | |
bbf6f052 RK |
8413 | |
8414 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); | |
bbf6f052 RK |
8415 | return temp; |
8416 | } | |
8417 | ||
8418 | case MODIFY_EXPR: | |
8419 | { | |
8420 | /* If lhs is complex, expand calls in rhs before computing it. | |
6d0a3f67 NS |
8421 | That's so we don't compute a pointer and save it over a |
8422 | call. If lhs is simple, compute it first so we can give it | |
8423 | as a target if the rhs is just a call. This avoids an | |
8424 | extra temp and copy and that prevents a partial-subsumption | |
8425 | which makes bad code. Actually we could treat | |
8426 | component_ref's of vars like vars. */ | |
bbf6f052 RK |
8427 | |
8428 | tree lhs = TREE_OPERAND (exp, 0); | |
8429 | tree rhs = TREE_OPERAND (exp, 1); | |
bbf6f052 RK |
8430 | |
8431 | temp = 0; | |
8432 | ||
bbf6f052 RK |
8433 | /* Check for |= or &= of a bitfield of size one into another bitfield |
8434 | of size 1. In this case, (unless we need the result of the | |
8435 | assignment) we can do this more efficiently with a | |
8436 | test followed by an assignment, if necessary. | |
8437 | ||
8438 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
8439 | things change so we do, this code should be enhanced to | |
8440 | support it. */ | |
8441 | if (ignore | |
8442 | && TREE_CODE (lhs) == COMPONENT_REF | |
8443 | && (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8444 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
8445 | && TREE_OPERAND (rhs, 0) == lhs | |
8446 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
05bccae2 RK |
8447 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
8448 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
bbf6f052 RK |
8449 | { |
8450 | rtx label = gen_label_rtx (); | |
8451 | ||
8452 | do_jump (TREE_OPERAND (rhs, 1), | |
8453 | TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0, | |
8454 | TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0); | |
8455 | expand_assignment (lhs, convert (TREE_TYPE (rhs), | |
8456 | (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8457 | ? integer_one_node | |
8458 | : integer_zero_node)), | |
8459 | 0, 0); | |
e7c33f54 | 8460 | do_pending_stack_adjust (); |
bbf6f052 RK |
8461 | emit_label (label); |
8462 | return const0_rtx; | |
8463 | } | |
8464 | ||
bbf6f052 | 8465 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); |
6d0a3f67 | 8466 | |
bbf6f052 RK |
8467 | return temp; |
8468 | } | |
8469 | ||
6e7f84a7 APB |
8470 | case RETURN_EXPR: |
8471 | if (!TREE_OPERAND (exp, 0)) | |
8472 | expand_null_return (); | |
8473 | else | |
8474 | expand_return (TREE_OPERAND (exp, 0)); | |
8475 | return const0_rtx; | |
8476 | ||
bbf6f052 RK |
8477 | case PREINCREMENT_EXPR: |
8478 | case PREDECREMENT_EXPR: | |
7b8b9722 | 8479 | return expand_increment (exp, 0, ignore); |
bbf6f052 RK |
8480 | |
8481 | case POSTINCREMENT_EXPR: | |
8482 | case POSTDECREMENT_EXPR: | |
8483 | /* Faster to treat as pre-increment if result is not used. */ | |
7b8b9722 | 8484 | return expand_increment (exp, ! ignore, ignore); |
bbf6f052 RK |
8485 | |
8486 | case ADDR_EXPR: | |
8487 | /* Are we taking the address of a nested function? */ | |
8488 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL | |
38ee6ed9 | 8489 | && decl_function_context (TREE_OPERAND (exp, 0)) != 0 |
e5e809f4 JL |
8490 | && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)) |
8491 | && ! TREE_STATIC (exp)) | |
bbf6f052 RK |
8492 | { |
8493 | op0 = trampoline_address (TREE_OPERAND (exp, 0)); | |
8494 | op0 = force_operand (op0, target); | |
8495 | } | |
682ba3a6 RK |
8496 | /* If we are taking the address of something erroneous, just |
8497 | return a zero. */ | |
8498 | else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) | |
8499 | return const0_rtx; | |
d6b6783b RK |
8500 | /* If we are taking the address of a constant and are at the |
8501 | top level, we have to use output_constant_def since we can't | |
8502 | call force_const_mem at top level. */ | |
8503 | else if (cfun == 0 | |
8504 | && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR | |
8505 | || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) | |
8506 | == 'c'))) | |
8507 | op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0); | |
bbf6f052 RK |
8508 | else |
8509 | { | |
e287fd6e RK |
8510 | /* We make sure to pass const0_rtx down if we came in with |
8511 | ignore set, to avoid doing the cleanups twice for something. */ | |
8512 | op0 = expand_expr (TREE_OPERAND (exp, 0), | |
8513 | ignore ? const0_rtx : NULL_RTX, VOIDmode, | |
bbf6f052 RK |
8514 | (modifier == EXPAND_INITIALIZER |
8515 | ? modifier : EXPAND_CONST_ADDRESS)); | |
896102d0 | 8516 | |
119af78a RK |
8517 | /* If we are going to ignore the result, OP0 will have been set |
8518 | to const0_rtx, so just return it. Don't get confused and | |
8519 | think we are taking the address of the constant. */ | |
8520 | if (ignore) | |
8521 | return op0; | |
8522 | ||
73b7f58c BS |
8523 | /* Pass 1 for MODIFY, so that protect_from_queue doesn't get |
8524 | clever and returns a REG when given a MEM. */ | |
8525 | op0 = protect_from_queue (op0, 1); | |
3539e816 | 8526 | |
c5c76735 JL |
8527 | /* We would like the object in memory. If it is a constant, we can |
8528 | have it be statically allocated into memory. For a non-constant, | |
8529 | we need to allocate some memory and store the value into it. */ | |
896102d0 RK |
8530 | |
8531 | if (CONSTANT_P (op0)) | |
8532 | op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8533 | op0); | |
682ba3a6 | 8534 | else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
df6018fd JJ |
8535 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF |
8536 | || GET_CODE (op0) == PARALLEL) | |
896102d0 | 8537 | { |
6c7d86ec RK |
8538 | /* If the operand is a SAVE_EXPR, we can deal with this by |
8539 | forcing the SAVE_EXPR into memory. */ | |
8540 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) | |
8541 | { | |
8542 | put_var_into_stack (TREE_OPERAND (exp, 0)); | |
8543 | op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); | |
8544 | } | |
df6018fd | 8545 | else |
6c7d86ec RK |
8546 | { |
8547 | /* If this object is in a register, it can't be BLKmode. */ | |
8548 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
19f90fad | 8549 | rtx memloc = assign_temp (inner_type, 1, 1, 1); |
6c7d86ec RK |
8550 | |
8551 | if (GET_CODE (op0) == PARALLEL) | |
8552 | /* Handle calls that pass values in multiple | |
8553 | non-contiguous locations. The Irix 6 ABI has examples | |
8554 | of this. */ | |
8555 | emit_group_store (memloc, op0, | |
8556 | int_size_in_bytes (inner_type)); | |
8557 | else | |
8558 | emit_move_insn (memloc, op0); | |
8559 | ||
8560 | op0 = memloc; | |
8561 | } | |
896102d0 RK |
8562 | } |
8563 | ||
bbf6f052 RK |
8564 | if (GET_CODE (op0) != MEM) |
8565 | abort (); | |
3a94c984 | 8566 | |
34e81b5a | 8567 | mark_temp_addr_taken (op0); |
bbf6f052 | 8568 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
88f63c77 | 8569 | { |
34e81b5a | 8570 | op0 = XEXP (op0, 0); |
88f63c77 | 8571 | #ifdef POINTERS_EXTEND_UNSIGNED |
34e81b5a | 8572 | if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode |
88f63c77 | 8573 | && mode == ptr_mode) |
34e81b5a | 8574 | op0 = convert_memory_address (ptr_mode, op0); |
88f63c77 | 8575 | #endif |
34e81b5a | 8576 | return op0; |
88f63c77 | 8577 | } |
987c71d9 | 8578 | |
c952ff4b RK |
8579 | /* If OP0 is not aligned as least as much as the type requires, we |
8580 | need to make a temporary, copy OP0 to it, and take the address of | |
8581 | the temporary. We want to use the alignment of the type, not of | |
8582 | the operand. Note that this is incorrect for FUNCTION_TYPE, but | |
8583 | the test for BLKmode means that can't happen. The test for | |
8584 | BLKmode is because we never make mis-aligned MEMs with | |
8585 | non-BLKmode. | |
8586 | ||
8587 | We don't need to do this at all if the machine doesn't have | |
8588 | strict alignment. */ | |
8589 | if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode | |
8590 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
ed239f5a RK |
8591 | > MEM_ALIGN (op0)) |
8592 | && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT) | |
a06ef755 RK |
8593 | { |
8594 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
8595 | rtx new | |
8596 | = assign_stack_temp_for_type | |
8597 | (TYPE_MODE (inner_type), | |
8598 | MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0)) | |
e27cc84b | 8599 | : int_size_in_bytes (inner_type), |
a06ef755 RK |
8600 | 1, build_qualified_type (inner_type, |
8601 | (TYPE_QUALS (inner_type) | |
8602 | | TYPE_QUAL_CONST))); | |
8603 | ||
c3d32120 RK |
8604 | if (TYPE_ALIGN_OK (inner_type)) |
8605 | abort (); | |
8606 | ||
a06ef755 RK |
8607 | emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0))); |
8608 | op0 = new; | |
8609 | } | |
8610 | ||
bbf6f052 RK |
8611 | op0 = force_operand (XEXP (op0, 0), target); |
8612 | } | |
987c71d9 | 8613 | |
05c8e58b HPN |
8614 | if (flag_force_addr |
8615 | && GET_CODE (op0) != REG | |
8616 | && modifier != EXPAND_CONST_ADDRESS | |
8617 | && modifier != EXPAND_INITIALIZER | |
8618 | && modifier != EXPAND_SUM) | |
987c71d9 RK |
8619 | op0 = force_reg (Pmode, op0); |
8620 | ||
dc6d66b3 RK |
8621 | if (GET_CODE (op0) == REG |
8622 | && ! REG_USERVAR_P (op0)) | |
bdb429a5 | 8623 | mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type))); |
987c71d9 | 8624 | |
88f63c77 RK |
8625 | #ifdef POINTERS_EXTEND_UNSIGNED |
8626 | if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode | |
8627 | && mode == ptr_mode) | |
9fcfcce7 | 8628 | op0 = convert_memory_address (ptr_mode, op0); |
88f63c77 RK |
8629 | #endif |
8630 | ||
bbf6f052 RK |
8631 | return op0; |
8632 | ||
8633 | case ENTRY_VALUE_EXPR: | |
8634 | abort (); | |
8635 | ||
7308a047 RS |
8636 | /* COMPLEX type for Extended Pascal & Fortran */ |
8637 | case COMPLEX_EXPR: | |
8638 | { | |
8639 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6551fa4d | 8640 | rtx insns; |
7308a047 RS |
8641 | |
8642 | /* Get the rtx code of the operands. */ | |
8643 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8644 | op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0); | |
8645 | ||
8646 | if (! target) | |
8647 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
8648 | ||
6551fa4d | 8649 | start_sequence (); |
7308a047 RS |
8650 | |
8651 | /* Move the real (op0) and imaginary (op1) parts to their location. */ | |
2d7050fd RS |
8652 | emit_move_insn (gen_realpart (mode, target), op0); |
8653 | emit_move_insn (gen_imagpart (mode, target), op1); | |
7308a047 | 8654 | |
6551fa4d JW |
8655 | insns = get_insns (); |
8656 | end_sequence (); | |
8657 | ||
7308a047 | 8658 | /* Complex construction should appear as a single unit. */ |
6551fa4d JW |
8659 | /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS, |
8660 | each with a separate pseudo as destination. | |
8661 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8662 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8663 | emit_no_conflict_block (insns, target, op0, op1, NULL_RTX); |
8664 | else | |
8665 | emit_insns (insns); | |
7308a047 RS |
8666 | |
8667 | return target; | |
8668 | } | |
8669 | ||
8670 | case REALPART_EXPR: | |
2d7050fd RS |
8671 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8672 | return gen_realpart (mode, op0); | |
3a94c984 | 8673 | |
7308a047 | 8674 | case IMAGPART_EXPR: |
2d7050fd RS |
8675 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8676 | return gen_imagpart (mode, op0); | |
7308a047 RS |
8677 | |
8678 | case CONJ_EXPR: | |
8679 | { | |
62acb978 | 8680 | enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
7308a047 | 8681 | rtx imag_t; |
6551fa4d | 8682 | rtx insns; |
3a94c984 KH |
8683 | |
8684 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
7308a047 RS |
8685 | |
8686 | if (! target) | |
d6a5ac33 | 8687 | target = gen_reg_rtx (mode); |
3a94c984 | 8688 | |
6551fa4d | 8689 | start_sequence (); |
7308a047 RS |
8690 | |
8691 | /* Store the realpart and the negated imagpart to target. */ | |
62acb978 RK |
8692 | emit_move_insn (gen_realpart (partmode, target), |
8693 | gen_realpart (partmode, op0)); | |
7308a047 | 8694 | |
62acb978 | 8695 | imag_t = gen_imagpart (partmode, target); |
91ce572a CC |
8696 | temp = expand_unop (partmode, |
8697 | ! unsignedp && flag_trapv | |
8698 | && (GET_MODE_CLASS(partmode) == MODE_INT) | |
8699 | ? negv_optab : neg_optab, | |
3a94c984 | 8700 | gen_imagpart (partmode, op0), imag_t, 0); |
7308a047 RS |
8701 | if (temp != imag_t) |
8702 | emit_move_insn (imag_t, temp); | |
8703 | ||
6551fa4d JW |
8704 | insns = get_insns (); |
8705 | end_sequence (); | |
8706 | ||
3a94c984 | 8707 | /* Conjugate should appear as a single unit |
d6a5ac33 | 8708 | If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS, |
6551fa4d JW |
8709 | each with a separate pseudo as destination. |
8710 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8711 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8712 | emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX); |
8713 | else | |
8714 | emit_insns (insns); | |
7308a047 RS |
8715 | |
8716 | return target; | |
8717 | } | |
8718 | ||
e976b8b2 MS |
8719 | case TRY_CATCH_EXPR: |
8720 | { | |
8721 | tree handler = TREE_OPERAND (exp, 1); | |
8722 | ||
8723 | expand_eh_region_start (); | |
8724 | ||
8725 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8726 | ||
52a11cbf | 8727 | expand_eh_region_end_cleanup (handler); |
e976b8b2 MS |
8728 | |
8729 | return op0; | |
8730 | } | |
8731 | ||
b335b813 PB |
8732 | case TRY_FINALLY_EXPR: |
8733 | { | |
8734 | tree try_block = TREE_OPERAND (exp, 0); | |
8735 | tree finally_block = TREE_OPERAND (exp, 1); | |
8736 | rtx finally_label = gen_label_rtx (); | |
8737 | rtx done_label = gen_label_rtx (); | |
8738 | rtx return_link = gen_reg_rtx (Pmode); | |
8739 | tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node, | |
8740 | (tree) finally_label, (tree) return_link); | |
8741 | TREE_SIDE_EFFECTS (cleanup) = 1; | |
8742 | ||
8743 | /* Start a new binding layer that will keep track of all cleanup | |
8744 | actions to be performed. */ | |
8e91754e | 8745 | expand_start_bindings (2); |
b335b813 PB |
8746 | |
8747 | target_temp_slot_level = temp_slot_level; | |
8748 | ||
8749 | expand_decl_cleanup (NULL_TREE, cleanup); | |
8750 | op0 = expand_expr (try_block, target, tmode, modifier); | |
8751 | ||
8752 | preserve_temp_slots (op0); | |
8753 | expand_end_bindings (NULL_TREE, 0, 0); | |
8754 | emit_jump (done_label); | |
8755 | emit_label (finally_label); | |
8756 | expand_expr (finally_block, const0_rtx, VOIDmode, 0); | |
8757 | emit_indirect_jump (return_link); | |
8758 | emit_label (done_label); | |
8759 | return op0; | |
8760 | } | |
8761 | ||
3a94c984 | 8762 | case GOTO_SUBROUTINE_EXPR: |
b335b813 PB |
8763 | { |
8764 | rtx subr = (rtx) TREE_OPERAND (exp, 0); | |
8765 | rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1); | |
8766 | rtx return_address = gen_label_rtx (); | |
3a94c984 KH |
8767 | emit_move_insn (return_link, |
8768 | gen_rtx_LABEL_REF (Pmode, return_address)); | |
b335b813 PB |
8769 | emit_jump (subr); |
8770 | emit_label (return_address); | |
8771 | return const0_rtx; | |
8772 | } | |
8773 | ||
d3707adb RH |
8774 | case VA_ARG_EXPR: |
8775 | return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type); | |
8776 | ||
52a11cbf | 8777 | case EXC_PTR_EXPR: |
86c99549 | 8778 | return get_exception_pointer (cfun); |
52a11cbf | 8779 | |
67231816 RH |
8780 | case FDESC_EXPR: |
8781 | /* Function descriptors are not valid except for as | |
8782 | initialization constants, and should not be expanded. */ | |
8783 | abort (); | |
8784 | ||
bbf6f052 | 8785 | default: |
90764a87 | 8786 | return (*lang_expand_expr) (exp, original_target, tmode, modifier); |
bbf6f052 RK |
8787 | } |
8788 | ||
8789 | /* Here to do an ordinary binary operator, generating an instruction | |
8790 | from the optab already placed in `this_optab'. */ | |
8791 | binop: | |
e5e809f4 | 8792 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
8793 | subtarget = 0; |
8794 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 8795 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8796 | binop2: |
8797 | temp = expand_binop (mode, this_optab, op0, op1, target, | |
8798 | unsignedp, OPTAB_LIB_WIDEN); | |
8799 | if (temp == 0) | |
8800 | abort (); | |
8801 | return temp; | |
8802 | } | |
b93a436e | 8803 | \f |
fed3cef0 RK |
8804 | /* Return the tree node if a ARG corresponds to a string constant or zero |
8805 | if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset | |
8806 | in bytes within the string that ARG is accessing. The type of the | |
8807 | offset will be `sizetype'. */ | |
b93a436e | 8808 | |
28f4ec01 | 8809 | tree |
b93a436e JL |
8810 | string_constant (arg, ptr_offset) |
8811 | tree arg; | |
8812 | tree *ptr_offset; | |
8813 | { | |
8814 | STRIP_NOPS (arg); | |
8815 | ||
8816 | if (TREE_CODE (arg) == ADDR_EXPR | |
8817 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) | |
8818 | { | |
fed3cef0 | 8819 | *ptr_offset = size_zero_node; |
b93a436e JL |
8820 | return TREE_OPERAND (arg, 0); |
8821 | } | |
8822 | else if (TREE_CODE (arg) == PLUS_EXPR) | |
8823 | { | |
8824 | tree arg0 = TREE_OPERAND (arg, 0); | |
8825 | tree arg1 = TREE_OPERAND (arg, 1); | |
8826 | ||
8827 | STRIP_NOPS (arg0); | |
8828 | STRIP_NOPS (arg1); | |
8829 | ||
8830 | if (TREE_CODE (arg0) == ADDR_EXPR | |
8831 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) | |
bbf6f052 | 8832 | { |
fed3cef0 | 8833 | *ptr_offset = convert (sizetype, arg1); |
b93a436e | 8834 | return TREE_OPERAND (arg0, 0); |
bbf6f052 | 8835 | } |
b93a436e JL |
8836 | else if (TREE_CODE (arg1) == ADDR_EXPR |
8837 | && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) | |
bbf6f052 | 8838 | { |
fed3cef0 | 8839 | *ptr_offset = convert (sizetype, arg0); |
b93a436e | 8840 | return TREE_OPERAND (arg1, 0); |
bbf6f052 | 8841 | } |
b93a436e | 8842 | } |
ca695ac9 | 8843 | |
b93a436e JL |
8844 | return 0; |
8845 | } | |
ca695ac9 | 8846 | \f |
b93a436e JL |
8847 | /* Expand code for a post- or pre- increment or decrement |
8848 | and return the RTX for the result. | |
8849 | POST is 1 for postinc/decrements and 0 for preinc/decrements. */ | |
1499e0a8 | 8850 | |
b93a436e JL |
8851 | static rtx |
8852 | expand_increment (exp, post, ignore) | |
b3694847 | 8853 | tree exp; |
b93a436e | 8854 | int post, ignore; |
ca695ac9 | 8855 | { |
b3694847 SS |
8856 | rtx op0, op1; |
8857 | rtx temp, value; | |
8858 | tree incremented = TREE_OPERAND (exp, 0); | |
b93a436e JL |
8859 | optab this_optab = add_optab; |
8860 | int icode; | |
8861 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
8862 | int op0_is_copy = 0; | |
8863 | int single_insn = 0; | |
8864 | /* 1 means we can't store into OP0 directly, | |
8865 | because it is a subreg narrower than a word, | |
8866 | and we don't dare clobber the rest of the word. */ | |
8867 | int bad_subreg = 0; | |
1499e0a8 | 8868 | |
b93a436e JL |
8869 | /* Stabilize any component ref that might need to be |
8870 | evaluated more than once below. */ | |
8871 | if (!post | |
8872 | || TREE_CODE (incremented) == BIT_FIELD_REF | |
8873 | || (TREE_CODE (incremented) == COMPONENT_REF | |
8874 | && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF | |
8875 | || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) | |
8876 | incremented = stabilize_reference (incremented); | |
8877 | /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost | |
8878 | ones into save exprs so that they don't accidentally get evaluated | |
8879 | more than once by the code below. */ | |
8880 | if (TREE_CODE (incremented) == PREINCREMENT_EXPR | |
8881 | || TREE_CODE (incremented) == PREDECREMENT_EXPR) | |
8882 | incremented = save_expr (incremented); | |
e9a25f70 | 8883 | |
b93a436e JL |
8884 | /* Compute the operands as RTX. |
8885 | Note whether OP0 is the actual lvalue or a copy of it: | |
8886 | I believe it is a copy iff it is a register or subreg | |
6d2f8887 | 8887 | and insns were generated in computing it. */ |
e9a25f70 | 8888 | |
b93a436e | 8889 | temp = get_last_insn (); |
37a08a29 | 8890 | op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); |
e9a25f70 | 8891 | |
b93a436e JL |
8892 | /* If OP0 is a SUBREG made for a promoted variable, we cannot increment |
8893 | in place but instead must do sign- or zero-extension during assignment, | |
8894 | so we copy it into a new register and let the code below use it as | |
8895 | a copy. | |
e9a25f70 | 8896 | |
b93a436e JL |
8897 | Note that we can safely modify this SUBREG since it is know not to be |
8898 | shared (it was made by the expand_expr call above). */ | |
8899 | ||
8900 | if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) | |
8901 | { | |
8902 | if (post) | |
8903 | SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); | |
8904 | else | |
8905 | bad_subreg = 1; | |
8906 | } | |
8907 | else if (GET_CODE (op0) == SUBREG | |
8908 | && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD) | |
8909 | { | |
8910 | /* We cannot increment this SUBREG in place. If we are | |
8911 | post-incrementing, get a copy of the old value. Otherwise, | |
8912 | just mark that we cannot increment in place. */ | |
8913 | if (post) | |
8914 | op0 = copy_to_reg (op0); | |
8915 | else | |
8916 | bad_subreg = 1; | |
e9a25f70 JL |
8917 | } |
8918 | ||
b93a436e JL |
8919 | op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) |
8920 | && temp != get_last_insn ()); | |
37a08a29 | 8921 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
1499e0a8 | 8922 | |
b93a436e JL |
8923 | /* Decide whether incrementing or decrementing. */ |
8924 | if (TREE_CODE (exp) == POSTDECREMENT_EXPR | |
8925 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
8926 | this_optab = sub_optab; | |
8927 | ||
8928 | /* Convert decrement by a constant into a negative increment. */ | |
8929 | if (this_optab == sub_optab | |
8930 | && GET_CODE (op1) == CONST_INT) | |
ca695ac9 | 8931 | { |
3a94c984 | 8932 | op1 = GEN_INT (-INTVAL (op1)); |
b93a436e | 8933 | this_optab = add_optab; |
ca695ac9 | 8934 | } |
1499e0a8 | 8935 | |
91ce572a | 8936 | if (TYPE_TRAP_SIGNED (TREE_TYPE (exp))) |
505ddab6 | 8937 | this_optab = this_optab == add_optab ? addv_optab : subv_optab; |
91ce572a | 8938 | |
b93a436e JL |
8939 | /* For a preincrement, see if we can do this with a single instruction. */ |
8940 | if (!post) | |
8941 | { | |
8942 | icode = (int) this_optab->handlers[(int) mode].insn_code; | |
8943 | if (icode != (int) CODE_FOR_nothing | |
8944 | /* Make sure that OP0 is valid for operands 0 and 1 | |
8945 | of the insn we want to queue. */ | |
a995e389 RH |
8946 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
8947 | && (*insn_data[icode].operand[1].predicate) (op0, mode) | |
8948 | && (*insn_data[icode].operand[2].predicate) (op1, mode)) | |
b93a436e JL |
8949 | single_insn = 1; |
8950 | } | |
bbf6f052 | 8951 | |
b93a436e JL |
8952 | /* If OP0 is not the actual lvalue, but rather a copy in a register, |
8953 | then we cannot just increment OP0. We must therefore contrive to | |
8954 | increment the original value. Then, for postincrement, we can return | |
8955 | OP0 since it is a copy of the old value. For preincrement, expand here | |
8956 | unless we can do it with a single insn. | |
bbf6f052 | 8957 | |
b93a436e JL |
8958 | Likewise if storing directly into OP0 would clobber high bits |
8959 | we need to preserve (bad_subreg). */ | |
8960 | if (op0_is_copy || (!post && !single_insn) || bad_subreg) | |
a358cee0 | 8961 | { |
b93a436e JL |
8962 | /* This is the easiest way to increment the value wherever it is. |
8963 | Problems with multiple evaluation of INCREMENTED are prevented | |
8964 | because either (1) it is a component_ref or preincrement, | |
8965 | in which case it was stabilized above, or (2) it is an array_ref | |
8966 | with constant index in an array in a register, which is | |
8967 | safe to reevaluate. */ | |
8968 | tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR | |
8969 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
8970 | ? MINUS_EXPR : PLUS_EXPR), | |
8971 | TREE_TYPE (exp), | |
8972 | incremented, | |
8973 | TREE_OPERAND (exp, 1)); | |
a358cee0 | 8974 | |
b93a436e JL |
8975 | while (TREE_CODE (incremented) == NOP_EXPR |
8976 | || TREE_CODE (incremented) == CONVERT_EXPR) | |
8977 | { | |
8978 | newexp = convert (TREE_TYPE (incremented), newexp); | |
8979 | incremented = TREE_OPERAND (incremented, 0); | |
8980 | } | |
bbf6f052 | 8981 | |
b93a436e JL |
8982 | temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0); |
8983 | return post ? op0 : temp; | |
8984 | } | |
bbf6f052 | 8985 | |
b93a436e JL |
8986 | if (post) |
8987 | { | |
8988 | /* We have a true reference to the value in OP0. | |
8989 | If there is an insn to add or subtract in this mode, queue it. | |
8990 | Queueing the increment insn avoids the register shuffling | |
8991 | that often results if we must increment now and first save | |
8992 | the old value for subsequent use. */ | |
bbf6f052 | 8993 | |
b93a436e JL |
8994 | #if 0 /* Turned off to avoid making extra insn for indexed memref. */ |
8995 | op0 = stabilize (op0); | |
8996 | #endif | |
41dfd40c | 8997 | |
b93a436e JL |
8998 | icode = (int) this_optab->handlers[(int) mode].insn_code; |
8999 | if (icode != (int) CODE_FOR_nothing | |
9000 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9001 | of the insn we want to queue. */ | |
a995e389 RH |
9002 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9003 | && (*insn_data[icode].operand[1].predicate) (op0, mode)) | |
b93a436e | 9004 | { |
a995e389 | 9005 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9006 | op1 = force_reg (mode, op1); |
bbf6f052 | 9007 | |
b93a436e JL |
9008 | return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); |
9009 | } | |
9010 | if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM) | |
9011 | { | |
9012 | rtx addr = (general_operand (XEXP (op0, 0), mode) | |
9013 | ? force_reg (Pmode, XEXP (op0, 0)) | |
9014 | : copy_to_reg (XEXP (op0, 0))); | |
9015 | rtx temp, result; | |
ca695ac9 | 9016 | |
792760b9 | 9017 | op0 = replace_equiv_address (op0, addr); |
b93a436e | 9018 | temp = force_reg (GET_MODE (op0), op0); |
a995e389 | 9019 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9020 | op1 = force_reg (mode, op1); |
ca695ac9 | 9021 | |
b93a436e JL |
9022 | /* The increment queue is LIFO, thus we have to `queue' |
9023 | the instructions in reverse order. */ | |
9024 | enqueue_insn (op0, gen_move_insn (op0, temp)); | |
9025 | result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1)); | |
9026 | return result; | |
bbf6f052 RK |
9027 | } |
9028 | } | |
ca695ac9 | 9029 | |
b93a436e JL |
9030 | /* Preincrement, or we can't increment with one simple insn. */ |
9031 | if (post) | |
9032 | /* Save a copy of the value before inc or dec, to return it later. */ | |
9033 | temp = value = copy_to_reg (op0); | |
9034 | else | |
9035 | /* Arrange to return the incremented value. */ | |
9036 | /* Copy the rtx because expand_binop will protect from the queue, | |
9037 | and the results of that would be invalid for us to return | |
9038 | if our caller does emit_queue before using our result. */ | |
9039 | temp = copy_rtx (value = op0); | |
bbf6f052 | 9040 | |
b93a436e | 9041 | /* Increment however we can. */ |
37a08a29 | 9042 | op1 = expand_binop (mode, this_optab, value, op1, op0, |
b93a436e | 9043 | TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); |
37a08a29 | 9044 | |
b93a436e JL |
9045 | /* Make sure the value is stored into OP0. */ |
9046 | if (op1 != op0) | |
9047 | emit_move_insn (op0, op1); | |
5718612f | 9048 | |
b93a436e JL |
9049 | return temp; |
9050 | } | |
9051 | \f | |
b93a436e JL |
9052 | /* At the start of a function, record that we have no previously-pushed |
9053 | arguments waiting to be popped. */ | |
bbf6f052 | 9054 | |
b93a436e JL |
9055 | void |
9056 | init_pending_stack_adjust () | |
9057 | { | |
9058 | pending_stack_adjust = 0; | |
9059 | } | |
bbf6f052 | 9060 | |
b93a436e | 9061 | /* When exiting from function, if safe, clear out any pending stack adjust |
060fbabf JL |
9062 | so the adjustment won't get done. |
9063 | ||
9064 | Note, if the current function calls alloca, then it must have a | |
9065 | frame pointer regardless of the value of flag_omit_frame_pointer. */ | |
bbf6f052 | 9066 | |
b93a436e JL |
9067 | void |
9068 | clear_pending_stack_adjust () | |
9069 | { | |
9070 | #ifdef EXIT_IGNORE_STACK | |
9071 | if (optimize > 0 | |
060fbabf JL |
9072 | && (! flag_omit_frame_pointer || current_function_calls_alloca) |
9073 | && EXIT_IGNORE_STACK | |
b93a436e JL |
9074 | && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) |
9075 | && ! flag_inline_functions) | |
1503a7ec JH |
9076 | { |
9077 | stack_pointer_delta -= pending_stack_adjust, | |
9078 | pending_stack_adjust = 0; | |
9079 | } | |
b93a436e JL |
9080 | #endif |
9081 | } | |
bbf6f052 | 9082 | |
b93a436e JL |
9083 | /* Pop any previously-pushed arguments that have not been popped yet. */ |
9084 | ||
9085 | void | |
9086 | do_pending_stack_adjust () | |
9087 | { | |
9088 | if (inhibit_defer_pop == 0) | |
ca695ac9 | 9089 | { |
b93a436e JL |
9090 | if (pending_stack_adjust != 0) |
9091 | adjust_stack (GEN_INT (pending_stack_adjust)); | |
9092 | pending_stack_adjust = 0; | |
bbf6f052 | 9093 | } |
bbf6f052 RK |
9094 | } |
9095 | \f | |
b93a436e | 9096 | /* Expand conditional expressions. */ |
bbf6f052 | 9097 | |
b93a436e JL |
9098 | /* Generate code to evaluate EXP and jump to LABEL if the value is zero. |
9099 | LABEL is an rtx of code CODE_LABEL, in this function and all the | |
9100 | functions here. */ | |
bbf6f052 | 9101 | |
b93a436e JL |
9102 | void |
9103 | jumpifnot (exp, label) | |
ca695ac9 | 9104 | tree exp; |
b93a436e | 9105 | rtx label; |
bbf6f052 | 9106 | { |
b93a436e JL |
9107 | do_jump (exp, label, NULL_RTX); |
9108 | } | |
bbf6f052 | 9109 | |
b93a436e | 9110 | /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ |
ca695ac9 | 9111 | |
b93a436e JL |
9112 | void |
9113 | jumpif (exp, label) | |
9114 | tree exp; | |
9115 | rtx label; | |
9116 | { | |
9117 | do_jump (exp, NULL_RTX, label); | |
9118 | } | |
ca695ac9 | 9119 | |
b93a436e JL |
9120 | /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if |
9121 | the result is zero, or IF_TRUE_LABEL if the result is one. | |
9122 | Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, | |
9123 | meaning fall through in that case. | |
ca695ac9 | 9124 | |
b93a436e JL |
9125 | do_jump always does any pending stack adjust except when it does not |
9126 | actually perform a jump. An example where there is no jump | |
9127 | is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. | |
ca695ac9 | 9128 | |
b93a436e JL |
9129 | This function is responsible for optimizing cases such as |
9130 | &&, || and comparison operators in EXP. */ | |
5718612f | 9131 | |
b93a436e JL |
9132 | void |
9133 | do_jump (exp, if_false_label, if_true_label) | |
9134 | tree exp; | |
9135 | rtx if_false_label, if_true_label; | |
9136 | { | |
b3694847 | 9137 | enum tree_code code = TREE_CODE (exp); |
b93a436e JL |
9138 | /* Some cases need to create a label to jump to |
9139 | in order to properly fall through. | |
9140 | These cases set DROP_THROUGH_LABEL nonzero. */ | |
9141 | rtx drop_through_label = 0; | |
9142 | rtx temp; | |
b93a436e JL |
9143 | int i; |
9144 | tree type; | |
9145 | enum machine_mode mode; | |
ca695ac9 | 9146 | |
dbecbbe4 JL |
9147 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
9148 | check_max_integer_computation_mode (exp); | |
9149 | #endif | |
9150 | ||
b93a436e | 9151 | emit_queue (); |
ca695ac9 | 9152 | |
b93a436e | 9153 | switch (code) |
ca695ac9 | 9154 | { |
b93a436e | 9155 | case ERROR_MARK: |
ca695ac9 | 9156 | break; |
bbf6f052 | 9157 | |
b93a436e JL |
9158 | case INTEGER_CST: |
9159 | temp = integer_zerop (exp) ? if_false_label : if_true_label; | |
9160 | if (temp) | |
9161 | emit_jump (temp); | |
9162 | break; | |
bbf6f052 | 9163 | |
b93a436e JL |
9164 | #if 0 |
9165 | /* This is not true with #pragma weak */ | |
9166 | case ADDR_EXPR: | |
9167 | /* The address of something can never be zero. */ | |
9168 | if (if_true_label) | |
9169 | emit_jump (if_true_label); | |
9170 | break; | |
9171 | #endif | |
bbf6f052 | 9172 | |
b93a436e JL |
9173 | case NOP_EXPR: |
9174 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF | |
9175 | || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF | |
b4e3fabb RK |
9176 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF |
9177 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) | |
b93a436e JL |
9178 | goto normal; |
9179 | case CONVERT_EXPR: | |
9180 | /* If we are narrowing the operand, we have to do the compare in the | |
9181 | narrower mode. */ | |
9182 | if ((TYPE_PRECISION (TREE_TYPE (exp)) | |
9183 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
9184 | goto normal; | |
9185 | case NON_LVALUE_EXPR: | |
9186 | case REFERENCE_EXPR: | |
9187 | case ABS_EXPR: | |
9188 | case NEGATE_EXPR: | |
9189 | case LROTATE_EXPR: | |
9190 | case RROTATE_EXPR: | |
9191 | /* These cannot change zero->non-zero or vice versa. */ | |
9192 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9193 | break; | |
bbf6f052 | 9194 | |
14a774a9 RK |
9195 | case WITH_RECORD_EXPR: |
9196 | /* Put the object on the placeholder list, recurse through our first | |
9197 | operand, and pop the list. */ | |
9198 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
9199 | placeholder_list); | |
9200 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9201 | placeholder_list = TREE_CHAIN (placeholder_list); | |
9202 | break; | |
9203 | ||
b93a436e JL |
9204 | #if 0 |
9205 | /* This is never less insns than evaluating the PLUS_EXPR followed by | |
9206 | a test and can be longer if the test is eliminated. */ | |
9207 | case PLUS_EXPR: | |
9208 | /* Reduce to minus. */ | |
9209 | exp = build (MINUS_EXPR, TREE_TYPE (exp), | |
9210 | TREE_OPERAND (exp, 0), | |
9211 | fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), | |
9212 | TREE_OPERAND (exp, 1)))); | |
9213 | /* Process as MINUS. */ | |
ca695ac9 | 9214 | #endif |
bbf6f052 | 9215 | |
b93a436e JL |
9216 | case MINUS_EXPR: |
9217 | /* Non-zero iff operands of minus differ. */ | |
b30f05db BS |
9218 | do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), |
9219 | TREE_OPERAND (exp, 0), | |
9220 | TREE_OPERAND (exp, 1)), | |
9221 | NE, NE, if_false_label, if_true_label); | |
b93a436e | 9222 | break; |
bbf6f052 | 9223 | |
b93a436e JL |
9224 | case BIT_AND_EXPR: |
9225 | /* If we are AND'ing with a small constant, do this comparison in the | |
9226 | smallest type that fits. If the machine doesn't have comparisons | |
9227 | that small, it will be converted back to the wider comparison. | |
9228 | This helps if we are testing the sign bit of a narrower object. | |
9229 | combine can't do this for us because it can't know whether a | |
9230 | ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ | |
bbf6f052 | 9231 | |
b93a436e JL |
9232 | if (! SLOW_BYTE_ACCESS |
9233 | && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
9234 | && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT | |
05bccae2 | 9235 | && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 |
b93a436e JL |
9236 | && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode |
9237 | && (type = type_for_mode (mode, 1)) != 0 | |
9238 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
9239 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9240 | != CODE_FOR_nothing)) | |
9241 | { | |
9242 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9243 | break; | |
9244 | } | |
9245 | goto normal; | |
bbf6f052 | 9246 | |
b93a436e JL |
9247 | case TRUTH_NOT_EXPR: |
9248 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9249 | break; | |
bbf6f052 | 9250 | |
b93a436e JL |
9251 | case TRUTH_ANDIF_EXPR: |
9252 | if (if_false_label == 0) | |
9253 | if_false_label = drop_through_label = gen_label_rtx (); | |
9254 | do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); | |
9255 | start_cleanup_deferral (); | |
9256 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9257 | end_cleanup_deferral (); | |
9258 | break; | |
bbf6f052 | 9259 | |
b93a436e JL |
9260 | case TRUTH_ORIF_EXPR: |
9261 | if (if_true_label == 0) | |
9262 | if_true_label = drop_through_label = gen_label_rtx (); | |
9263 | do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); | |
9264 | start_cleanup_deferral (); | |
9265 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9266 | end_cleanup_deferral (); | |
9267 | break; | |
bbf6f052 | 9268 | |
b93a436e JL |
9269 | case COMPOUND_EXPR: |
9270 | push_temp_slots (); | |
9271 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
9272 | preserve_temp_slots (NULL_RTX); | |
9273 | free_temp_slots (); | |
9274 | pop_temp_slots (); | |
9275 | emit_queue (); | |
9276 | do_pending_stack_adjust (); | |
9277 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9278 | break; | |
bbf6f052 | 9279 | |
b93a436e JL |
9280 | case COMPONENT_REF: |
9281 | case BIT_FIELD_REF: | |
9282 | case ARRAY_REF: | |
b4e3fabb | 9283 | case ARRAY_RANGE_REF: |
b93a436e | 9284 | { |
770ae6cc RK |
9285 | HOST_WIDE_INT bitsize, bitpos; |
9286 | int unsignedp; | |
b93a436e JL |
9287 | enum machine_mode mode; |
9288 | tree type; | |
9289 | tree offset; | |
9290 | int volatilep = 0; | |
bbf6f052 | 9291 | |
b93a436e JL |
9292 | /* Get description of this reference. We don't actually care |
9293 | about the underlying object here. */ | |
19caa751 | 9294 | get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, |
a06ef755 | 9295 | &unsignedp, &volatilep); |
bbf6f052 | 9296 | |
b93a436e JL |
9297 | type = type_for_size (bitsize, unsignedp); |
9298 | if (! SLOW_BYTE_ACCESS | |
9299 | && type != 0 && bitsize >= 0 | |
9300 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
9301 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9302 | != CODE_FOR_nothing)) | |
9303 | { | |
9304 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9305 | break; | |
9306 | } | |
9307 | goto normal; | |
9308 | } | |
bbf6f052 | 9309 | |
b93a436e JL |
9310 | case COND_EXPR: |
9311 | /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ | |
9312 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
9313 | && integer_zerop (TREE_OPERAND (exp, 2))) | |
9314 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
bbf6f052 | 9315 | |
b93a436e JL |
9316 | else if (integer_zerop (TREE_OPERAND (exp, 1)) |
9317 | && integer_onep (TREE_OPERAND (exp, 2))) | |
9318 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
bbf6f052 | 9319 | |
b93a436e JL |
9320 | else |
9321 | { | |
b3694847 | 9322 | rtx label1 = gen_label_rtx (); |
b93a436e | 9323 | drop_through_label = gen_label_rtx (); |
bbf6f052 | 9324 | |
b93a436e | 9325 | do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); |
bbf6f052 | 9326 | |
b93a436e JL |
9327 | start_cleanup_deferral (); |
9328 | /* Now the THEN-expression. */ | |
9329 | do_jump (TREE_OPERAND (exp, 1), | |
9330 | if_false_label ? if_false_label : drop_through_label, | |
9331 | if_true_label ? if_true_label : drop_through_label); | |
9332 | /* In case the do_jump just above never jumps. */ | |
9333 | do_pending_stack_adjust (); | |
9334 | emit_label (label1); | |
bbf6f052 | 9335 | |
b93a436e JL |
9336 | /* Now the ELSE-expression. */ |
9337 | do_jump (TREE_OPERAND (exp, 2), | |
9338 | if_false_label ? if_false_label : drop_through_label, | |
9339 | if_true_label ? if_true_label : drop_through_label); | |
9340 | end_cleanup_deferral (); | |
9341 | } | |
9342 | break; | |
bbf6f052 | 9343 | |
b93a436e JL |
9344 | case EQ_EXPR: |
9345 | { | |
9346 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
bbf6f052 | 9347 | |
9ec36da5 JL |
9348 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9349 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
8d62b411 AS |
9350 | { |
9351 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9352 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9353 | do_jump | |
9354 | (fold | |
9355 | (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp), | |
9356 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9357 | fold (build1 (REALPART_EXPR, | |
9358 | TREE_TYPE (inner_type), | |
9359 | exp0)), | |
9360 | fold (build1 (REALPART_EXPR, | |
9361 | TREE_TYPE (inner_type), | |
9362 | exp1)))), | |
9363 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9364 | fold (build1 (IMAGPART_EXPR, | |
9365 | TREE_TYPE (inner_type), | |
9366 | exp0)), | |
9367 | fold (build1 (IMAGPART_EXPR, | |
9368 | TREE_TYPE (inner_type), | |
9369 | exp1)))))), | |
9370 | if_false_label, if_true_label); | |
9371 | } | |
9ec36da5 JL |
9372 | |
9373 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9374 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9375 | ||
b93a436e | 9376 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
1eb8759b | 9377 | && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) |
b93a436e JL |
9378 | do_jump_by_parts_equality (exp, if_false_label, if_true_label); |
9379 | else | |
b30f05db | 9380 | do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); |
b93a436e JL |
9381 | break; |
9382 | } | |
bbf6f052 | 9383 | |
b93a436e JL |
9384 | case NE_EXPR: |
9385 | { | |
9386 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
bbf6f052 | 9387 | |
9ec36da5 JL |
9388 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9389 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
8d62b411 AS |
9390 | { |
9391 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9392 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9393 | do_jump | |
9394 | (fold | |
9395 | (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), | |
9396 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9397 | fold (build1 (REALPART_EXPR, | |
9398 | TREE_TYPE (inner_type), | |
9399 | exp0)), | |
9400 | fold (build1 (REALPART_EXPR, | |
9401 | TREE_TYPE (inner_type), | |
9402 | exp1)))), | |
9403 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9404 | fold (build1 (IMAGPART_EXPR, | |
9405 | TREE_TYPE (inner_type), | |
9406 | exp0)), | |
9407 | fold (build1 (IMAGPART_EXPR, | |
9408 | TREE_TYPE (inner_type), | |
9409 | exp1)))))), | |
9410 | if_false_label, if_true_label); | |
9411 | } | |
9ec36da5 JL |
9412 | |
9413 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9414 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9415 | ||
b93a436e | 9416 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
1eb8759b | 9417 | && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) |
b93a436e JL |
9418 | do_jump_by_parts_equality (exp, if_true_label, if_false_label); |
9419 | else | |
b30f05db | 9420 | do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); |
b93a436e JL |
9421 | break; |
9422 | } | |
bbf6f052 | 9423 | |
b93a436e | 9424 | case LT_EXPR: |
1c0290ea BS |
9425 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9426 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9427 | && ! can_compare_p (LT, mode, ccp_jump)) |
b93a436e JL |
9428 | do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); |
9429 | else | |
b30f05db | 9430 | do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); |
b93a436e | 9431 | break; |
bbf6f052 | 9432 | |
b93a436e | 9433 | case LE_EXPR: |
1c0290ea BS |
9434 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9435 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9436 | && ! can_compare_p (LE, mode, ccp_jump)) |
b93a436e JL |
9437 | do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); |
9438 | else | |
b30f05db | 9439 | do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); |
b93a436e | 9440 | break; |
bbf6f052 | 9441 | |
b93a436e | 9442 | case GT_EXPR: |
1c0290ea BS |
9443 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9444 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9445 | && ! can_compare_p (GT, mode, ccp_jump)) |
b93a436e JL |
9446 | do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); |
9447 | else | |
b30f05db | 9448 | do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); |
b93a436e | 9449 | break; |
bbf6f052 | 9450 | |
b93a436e | 9451 | case GE_EXPR: |
1c0290ea BS |
9452 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9453 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9454 | && ! can_compare_p (GE, mode, ccp_jump)) |
b93a436e JL |
9455 | do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); |
9456 | else | |
b30f05db | 9457 | do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); |
b93a436e | 9458 | break; |
bbf6f052 | 9459 | |
1eb8759b RH |
9460 | case UNORDERED_EXPR: |
9461 | case ORDERED_EXPR: | |
9462 | { | |
9463 | enum rtx_code cmp, rcmp; | |
9464 | int do_rev; | |
9465 | ||
9466 | if (code == UNORDERED_EXPR) | |
9467 | cmp = UNORDERED, rcmp = ORDERED; | |
9468 | else | |
9469 | cmp = ORDERED, rcmp = UNORDERED; | |
3a94c984 | 9470 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
1eb8759b RH |
9471 | |
9472 | do_rev = 0; | |
9473 | if (! can_compare_p (cmp, mode, ccp_jump) | |
9474 | && (can_compare_p (rcmp, mode, ccp_jump) | |
9475 | /* If the target doesn't provide either UNORDERED or ORDERED | |
9476 | comparisons, canonicalize on UNORDERED for the library. */ | |
9477 | || rcmp == UNORDERED)) | |
9478 | do_rev = 1; | |
9479 | ||
9480 | if (! do_rev) | |
9481 | do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); | |
9482 | else | |
9483 | do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); | |
9484 | } | |
9485 | break; | |
9486 | ||
9487 | { | |
9488 | enum rtx_code rcode1; | |
9489 | enum tree_code tcode2; | |
9490 | ||
9491 | case UNLT_EXPR: | |
9492 | rcode1 = UNLT; | |
9493 | tcode2 = LT_EXPR; | |
9494 | goto unordered_bcc; | |
9495 | case UNLE_EXPR: | |
9496 | rcode1 = UNLE; | |
9497 | tcode2 = LE_EXPR; | |
9498 | goto unordered_bcc; | |
9499 | case UNGT_EXPR: | |
9500 | rcode1 = UNGT; | |
9501 | tcode2 = GT_EXPR; | |
9502 | goto unordered_bcc; | |
9503 | case UNGE_EXPR: | |
9504 | rcode1 = UNGE; | |
9505 | tcode2 = GE_EXPR; | |
9506 | goto unordered_bcc; | |
9507 | case UNEQ_EXPR: | |
9508 | rcode1 = UNEQ; | |
9509 | tcode2 = EQ_EXPR; | |
9510 | goto unordered_bcc; | |
7913f3d0 | 9511 | |
1eb8759b RH |
9512 | unordered_bcc: |
9513 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
9514 | if (can_compare_p (rcode1, mode, ccp_jump)) | |
9515 | do_compare_and_jump (exp, rcode1, rcode1, if_false_label, | |
9516 | if_true_label); | |
9517 | else | |
9518 | { | |
9519 | tree op0 = save_expr (TREE_OPERAND (exp, 0)); | |
9520 | tree op1 = save_expr (TREE_OPERAND (exp, 1)); | |
9521 | tree cmp0, cmp1; | |
9522 | ||
3a94c984 | 9523 | /* If the target doesn't support combined unordered |
1eb8759b RH |
9524 | compares, decompose into UNORDERED + comparison. */ |
9525 | cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); | |
9526 | cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); | |
9527 | exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); | |
9528 | do_jump (exp, if_false_label, if_true_label); | |
9529 | } | |
9530 | } | |
9531 | break; | |
9532 | ||
5f2d6cfa MM |
9533 | /* Special case: |
9534 | __builtin_expect (<test>, 0) and | |
9535 | __builtin_expect (<test>, 1) | |
9536 | ||
9537 | We need to do this here, so that <test> is not converted to a SCC | |
9538 | operation on machines that use condition code registers and COMPARE | |
9539 | like the PowerPC, and then the jump is done based on whether the SCC | |
9540 | operation produced a 1 or 0. */ | |
9541 | case CALL_EXPR: | |
9542 | /* Check for a built-in function. */ | |
9543 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | |
9544 | { | |
9545 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
9546 | tree arglist = TREE_OPERAND (exp, 1); | |
9547 | ||
9548 | if (TREE_CODE (fndecl) == FUNCTION_DECL | |
9549 | && DECL_BUILT_IN (fndecl) | |
9550 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
9551 | && arglist != NULL_TREE | |
9552 | && TREE_CHAIN (arglist) != NULL_TREE) | |
9553 | { | |
9554 | rtx seq = expand_builtin_expect_jump (exp, if_false_label, | |
9555 | if_true_label); | |
9556 | ||
9557 | if (seq != NULL_RTX) | |
9558 | { | |
9559 | emit_insn (seq); | |
9560 | return; | |
9561 | } | |
9562 | } | |
9563 | } | |
9564 | /* fall through and generate the normal code. */ | |
9565 | ||
b93a436e JL |
9566 | default: |
9567 | normal: | |
9568 | temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); | |
9569 | #if 0 | |
9570 | /* This is not needed any more and causes poor code since it causes | |
9571 | comparisons and tests from non-SI objects to have different code | |
9572 | sequences. */ | |
9573 | /* Copy to register to avoid generating bad insns by cse | |
9574 | from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ | |
9575 | if (!cse_not_expected && GET_CODE (temp) == MEM) | |
9576 | temp = copy_to_reg (temp); | |
ca695ac9 | 9577 | #endif |
b93a436e | 9578 | do_pending_stack_adjust (); |
b30f05db BS |
9579 | /* Do any postincrements in the expression that was tested. */ |
9580 | emit_queue (); | |
9581 | ||
998a298e GK |
9582 | if (GET_CODE (temp) == CONST_INT |
9583 | || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) | |
9584 | || GET_CODE (temp) == LABEL_REF) | |
b30f05db BS |
9585 | { |
9586 | rtx target = temp == const0_rtx ? if_false_label : if_true_label; | |
9587 | if (target) | |
9588 | emit_jump (target); | |
9589 | } | |
b93a436e | 9590 | else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT |
1eb8759b | 9591 | && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) |
b93a436e JL |
9592 | /* Note swapping the labels gives us not-equal. */ |
9593 | do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); | |
9594 | else if (GET_MODE (temp) != VOIDmode) | |
b30f05db BS |
9595 | do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), |
9596 | NE, TREE_UNSIGNED (TREE_TYPE (exp)), | |
a06ef755 | 9597 | GET_MODE (temp), NULL_RTX, |
b30f05db | 9598 | if_false_label, if_true_label); |
b93a436e JL |
9599 | else |
9600 | abort (); | |
9601 | } | |
bbf6f052 | 9602 | |
b93a436e JL |
9603 | if (drop_through_label) |
9604 | { | |
9605 | /* If do_jump produces code that might be jumped around, | |
9606 | do any stack adjusts from that code, before the place | |
9607 | where control merges in. */ | |
9608 | do_pending_stack_adjust (); | |
9609 | emit_label (drop_through_label); | |
9610 | } | |
bbf6f052 | 9611 | } |
b93a436e JL |
9612 | \f |
9613 | /* Given a comparison expression EXP for values too wide to be compared | |
9614 | with one insn, test the comparison and jump to the appropriate label. | |
9615 | The code of EXP is ignored; we always test GT if SWAP is 0, | |
9616 | and LT if SWAP is 1. */ | |
bbf6f052 | 9617 | |
b93a436e JL |
9618 | static void |
9619 | do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) | |
9620 | tree exp; | |
9621 | int swap; | |
9622 | rtx if_false_label, if_true_label; | |
9623 | { | |
9624 | rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); | |
9625 | rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); | |
9626 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
b93a436e | 9627 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); |
bbf6f052 | 9628 | |
b30f05db | 9629 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label); |
f81497d9 RS |
9630 | } |
9631 | ||
b93a436e JL |
9632 | /* Compare OP0 with OP1, word at a time, in mode MODE. |
9633 | UNSIGNEDP says to do unsigned comparison. | |
9634 | Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ | |
f81497d9 | 9635 | |
b93a436e JL |
9636 | void |
9637 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label) | |
9638 | enum machine_mode mode; | |
9639 | int unsignedp; | |
9640 | rtx op0, op1; | |
9641 | rtx if_false_label, if_true_label; | |
f81497d9 | 9642 | { |
b93a436e JL |
9643 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); |
9644 | rtx drop_through_label = 0; | |
9645 | int i; | |
f81497d9 | 9646 | |
b93a436e JL |
9647 | if (! if_true_label || ! if_false_label) |
9648 | drop_through_label = gen_label_rtx (); | |
9649 | if (! if_true_label) | |
9650 | if_true_label = drop_through_label; | |
9651 | if (! if_false_label) | |
9652 | if_false_label = drop_through_label; | |
f81497d9 | 9653 | |
b93a436e JL |
9654 | /* Compare a word at a time, high order first. */ |
9655 | for (i = 0; i < nwords; i++) | |
9656 | { | |
b93a436e | 9657 | rtx op0_word, op1_word; |
bbf6f052 | 9658 | |
b93a436e JL |
9659 | if (WORDS_BIG_ENDIAN) |
9660 | { | |
9661 | op0_word = operand_subword_force (op0, i, mode); | |
9662 | op1_word = operand_subword_force (op1, i, mode); | |
9663 | } | |
9664 | else | |
9665 | { | |
9666 | op0_word = operand_subword_force (op0, nwords - 1 - i, mode); | |
9667 | op1_word = operand_subword_force (op1, nwords - 1 - i, mode); | |
9668 | } | |
bbf6f052 | 9669 | |
b93a436e | 9670 | /* All but high-order word must be compared as unsigned. */ |
b30f05db | 9671 | do_compare_rtx_and_jump (op0_word, op1_word, GT, |
a06ef755 | 9672 | (unsignedp || i > 0), word_mode, NULL_RTX, |
b30f05db | 9673 | NULL_RTX, if_true_label); |
bbf6f052 | 9674 | |
b93a436e | 9675 | /* Consider lower words only if these are equal. */ |
b30f05db | 9676 | do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, |
a06ef755 | 9677 | NULL_RTX, NULL_RTX, if_false_label); |
b93a436e | 9678 | } |
bbf6f052 | 9679 | |
b93a436e JL |
9680 | if (if_false_label) |
9681 | emit_jump (if_false_label); | |
9682 | if (drop_through_label) | |
9683 | emit_label (drop_through_label); | |
bbf6f052 RK |
9684 | } |
9685 | ||
b93a436e JL |
9686 | /* Given an EQ_EXPR expression EXP for values too wide to be compared |
9687 | with one insn, test the comparison and jump to the appropriate label. */ | |
bbf6f052 | 9688 | |
b93a436e JL |
9689 | static void |
9690 | do_jump_by_parts_equality (exp, if_false_label, if_true_label) | |
9691 | tree exp; | |
9692 | rtx if_false_label, if_true_label; | |
bbf6f052 | 9693 | { |
b93a436e JL |
9694 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
9695 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); | |
9696 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
9697 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); | |
9698 | int i; | |
9699 | rtx drop_through_label = 0; | |
bbf6f052 | 9700 | |
b93a436e JL |
9701 | if (! if_false_label) |
9702 | drop_through_label = if_false_label = gen_label_rtx (); | |
bbf6f052 | 9703 | |
b93a436e | 9704 | for (i = 0; i < nwords; i++) |
b30f05db BS |
9705 | do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), |
9706 | operand_subword_force (op1, i, mode), | |
9707 | EQ, TREE_UNSIGNED (TREE_TYPE (exp)), | |
a06ef755 | 9708 | word_mode, NULL_RTX, if_false_label, NULL_RTX); |
bbf6f052 | 9709 | |
b93a436e JL |
9710 | if (if_true_label) |
9711 | emit_jump (if_true_label); | |
9712 | if (drop_through_label) | |
9713 | emit_label (drop_through_label); | |
bbf6f052 | 9714 | } |
b93a436e JL |
9715 | \f |
9716 | /* Jump according to whether OP0 is 0. | |
9717 | We assume that OP0 has an integer mode that is too wide | |
9718 | for the available compare insns. */ | |
bbf6f052 | 9719 | |
f5963e61 | 9720 | void |
b93a436e JL |
9721 | do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) |
9722 | rtx op0; | |
9723 | rtx if_false_label, if_true_label; | |
ca695ac9 | 9724 | { |
b93a436e JL |
9725 | int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; |
9726 | rtx part; | |
9727 | int i; | |
9728 | rtx drop_through_label = 0; | |
bbf6f052 | 9729 | |
b93a436e JL |
9730 | /* The fastest way of doing this comparison on almost any machine is to |
9731 | "or" all the words and compare the result. If all have to be loaded | |
9732 | from memory and this is a very wide item, it's possible this may | |
9733 | be slower, but that's highly unlikely. */ | |
bbf6f052 | 9734 | |
b93a436e JL |
9735 | part = gen_reg_rtx (word_mode); |
9736 | emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0))); | |
9737 | for (i = 1; i < nwords && part != 0; i++) | |
9738 | part = expand_binop (word_mode, ior_optab, part, | |
9739 | operand_subword_force (op0, i, GET_MODE (op0)), | |
9740 | part, 1, OPTAB_WIDEN); | |
bbf6f052 | 9741 | |
b93a436e JL |
9742 | if (part != 0) |
9743 | { | |
b30f05db | 9744 | do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, |
a06ef755 | 9745 | NULL_RTX, if_false_label, if_true_label); |
bbf6f052 | 9746 | |
b93a436e JL |
9747 | return; |
9748 | } | |
bbf6f052 | 9749 | |
b93a436e JL |
9750 | /* If we couldn't do the "or" simply, do this with a series of compares. */ |
9751 | if (! if_false_label) | |
9752 | drop_through_label = if_false_label = gen_label_rtx (); | |
bbf6f052 | 9753 | |
b93a436e | 9754 | for (i = 0; i < nwords; i++) |
b30f05db | 9755 | do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), |
a06ef755 | 9756 | const0_rtx, EQ, 1, word_mode, NULL_RTX, |
b30f05db | 9757 | if_false_label, NULL_RTX); |
bbf6f052 | 9758 | |
b93a436e JL |
9759 | if (if_true_label) |
9760 | emit_jump (if_true_label); | |
0f41302f | 9761 | |
b93a436e JL |
9762 | if (drop_through_label) |
9763 | emit_label (drop_through_label); | |
bbf6f052 | 9764 | } |
b93a436e | 9765 | \f |
b30f05db | 9766 | /* Generate code for a comparison of OP0 and OP1 with rtx code CODE. |
b93a436e JL |
9767 | (including code to compute the values to be compared) |
9768 | and set (CC0) according to the result. | |
b30f05db | 9769 | The decision as to signed or unsigned comparison must be made by the caller. |
bbf6f052 | 9770 | |
b93a436e | 9771 | We force a stack adjustment unless there are currently |
b30f05db | 9772 | things pushed on the stack that aren't yet used. |
ca695ac9 | 9773 | |
b30f05db | 9774 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
a06ef755 | 9775 | compared. */ |
b30f05db BS |
9776 | |
9777 | rtx | |
a06ef755 | 9778 | compare_from_rtx (op0, op1, code, unsignedp, mode, size) |
b3694847 | 9779 | rtx op0, op1; |
b30f05db BS |
9780 | enum rtx_code code; |
9781 | int unsignedp; | |
9782 | enum machine_mode mode; | |
9783 | rtx size; | |
b93a436e | 9784 | { |
b30f05db | 9785 | rtx tem; |
76bbe028 | 9786 | |
b30f05db BS |
9787 | /* If one operand is constant, make it the second one. Only do this |
9788 | if the other operand is not constant as well. */ | |
ca695ac9 | 9789 | |
8c9864f3 | 9790 | if (swap_commutative_operands_p (op0, op1)) |
bbf6f052 | 9791 | { |
b30f05db BS |
9792 | tem = op0; |
9793 | op0 = op1; | |
9794 | op1 = tem; | |
9795 | code = swap_condition (code); | |
ca695ac9 | 9796 | } |
bbf6f052 | 9797 | |
b30f05db | 9798 | if (flag_force_mem) |
b93a436e | 9799 | { |
b30f05db BS |
9800 | op0 = force_not_mem (op0); |
9801 | op1 = force_not_mem (op1); | |
9802 | } | |
bbf6f052 | 9803 | |
b30f05db BS |
9804 | do_pending_stack_adjust (); |
9805 | ||
9806 | if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT | |
9807 | && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) | |
9808 | return tem; | |
9809 | ||
9810 | #if 0 | |
9811 | /* There's no need to do this now that combine.c can eliminate lots of | |
9812 | sign extensions. This can be less efficient in certain cases on other | |
9813 | machines. */ | |
9814 | ||
9815 | /* If this is a signed equality comparison, we can do it as an | |
9816 | unsigned comparison since zero-extension is cheaper than sign | |
9817 | extension and comparisons with zero are done as unsigned. This is | |
9818 | the case even on machines that can do fast sign extension, since | |
9819 | zero-extension is easier to combine with other operations than | |
9820 | sign-extension is. If we are comparing against a constant, we must | |
9821 | convert it to what it would look like unsigned. */ | |
9822 | if ((code == EQ || code == NE) && ! unsignedp | |
9823 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
9824 | { | |
9825 | if (GET_CODE (op1) == CONST_INT | |
9826 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
9827 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
9828 | unsignedp = 1; | |
b93a436e JL |
9829 | } |
9830 | #endif | |
3a94c984 | 9831 | |
a06ef755 | 9832 | emit_cmp_insn (op0, op1, code, size, mode, unsignedp); |
0f41302f | 9833 | |
b30f05db | 9834 | return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); |
ca695ac9 | 9835 | } |
bbf6f052 | 9836 | |
b30f05db | 9837 | /* Like do_compare_and_jump but expects the values to compare as two rtx's. |
b93a436e | 9838 | The decision as to signed or unsigned comparison must be made by the caller. |
bbf6f052 | 9839 | |
b93a436e | 9840 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
a06ef755 | 9841 | compared. */ |
ca695ac9 | 9842 | |
b30f05db | 9843 | void |
a06ef755 | 9844 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, |
b30f05db | 9845 | if_false_label, if_true_label) |
b3694847 | 9846 | rtx op0, op1; |
b93a436e JL |
9847 | enum rtx_code code; |
9848 | int unsignedp; | |
9849 | enum machine_mode mode; | |
9850 | rtx size; | |
b30f05db | 9851 | rtx if_false_label, if_true_label; |
bbf6f052 | 9852 | { |
b93a436e | 9853 | rtx tem; |
b30f05db BS |
9854 | int dummy_true_label = 0; |
9855 | ||
9856 | /* Reverse the comparison if that is safe and we want to jump if it is | |
9857 | false. */ | |
9858 | if (! if_true_label && ! FLOAT_MODE_P (mode)) | |
9859 | { | |
9860 | if_true_label = if_false_label; | |
9861 | if_false_label = 0; | |
9862 | code = reverse_condition (code); | |
9863 | } | |
bbf6f052 | 9864 | |
b93a436e JL |
9865 | /* If one operand is constant, make it the second one. Only do this |
9866 | if the other operand is not constant as well. */ | |
e7c33f54 | 9867 | |
8c9864f3 | 9868 | if (swap_commutative_operands_p (op0, op1)) |
ca695ac9 | 9869 | { |
b93a436e JL |
9870 | tem = op0; |
9871 | op0 = op1; | |
9872 | op1 = tem; | |
9873 | code = swap_condition (code); | |
9874 | } | |
bbf6f052 | 9875 | |
b93a436e JL |
9876 | if (flag_force_mem) |
9877 | { | |
9878 | op0 = force_not_mem (op0); | |
9879 | op1 = force_not_mem (op1); | |
9880 | } | |
bbf6f052 | 9881 | |
b93a436e | 9882 | do_pending_stack_adjust (); |
ca695ac9 | 9883 | |
b93a436e JL |
9884 | if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT |
9885 | && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) | |
b30f05db BS |
9886 | { |
9887 | if (tem == const_true_rtx) | |
9888 | { | |
9889 | if (if_true_label) | |
9890 | emit_jump (if_true_label); | |
9891 | } | |
9892 | else | |
9893 | { | |
9894 | if (if_false_label) | |
9895 | emit_jump (if_false_label); | |
9896 | } | |
9897 | return; | |
9898 | } | |
ca695ac9 | 9899 | |
b93a436e JL |
9900 | #if 0 |
9901 | /* There's no need to do this now that combine.c can eliminate lots of | |
9902 | sign extensions. This can be less efficient in certain cases on other | |
9903 | machines. */ | |
ca695ac9 | 9904 | |
b93a436e JL |
9905 | /* If this is a signed equality comparison, we can do it as an |
9906 | unsigned comparison since zero-extension is cheaper than sign | |
9907 | extension and comparisons with zero are done as unsigned. This is | |
9908 | the case even on machines that can do fast sign extension, since | |
9909 | zero-extension is easier to combine with other operations than | |
9910 | sign-extension is. If we are comparing against a constant, we must | |
9911 | convert it to what it would look like unsigned. */ | |
9912 | if ((code == EQ || code == NE) && ! unsignedp | |
9913 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
9914 | { | |
9915 | if (GET_CODE (op1) == CONST_INT | |
9916 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
9917 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
9918 | unsignedp = 1; | |
9919 | } | |
9920 | #endif | |
ca695ac9 | 9921 | |
b30f05db BS |
9922 | if (! if_true_label) |
9923 | { | |
9924 | dummy_true_label = 1; | |
9925 | if_true_label = gen_label_rtx (); | |
9926 | } | |
9927 | ||
a06ef755 | 9928 | emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, |
b30f05db BS |
9929 | if_true_label); |
9930 | ||
9931 | if (if_false_label) | |
9932 | emit_jump (if_false_label); | |
9933 | if (dummy_true_label) | |
9934 | emit_label (if_true_label); | |
9935 | } | |
9936 | ||
9937 | /* Generate code for a comparison expression EXP (including code to compute | |
9938 | the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or | |
9939 | IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the | |
9940 | generated code will drop through. | |
9941 | SIGNED_CODE should be the rtx operation for this comparison for | |
9942 | signed data; UNSIGNED_CODE, likewise for use if data is unsigned. | |
9943 | ||
9944 | We force a stack adjustment unless there are currently | |
9945 | things pushed on the stack that aren't yet used. */ | |
9946 | ||
9947 | static void | |
9948 | do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label, | |
9949 | if_true_label) | |
b3694847 | 9950 | tree exp; |
b30f05db BS |
9951 | enum rtx_code signed_code, unsigned_code; |
9952 | rtx if_false_label, if_true_label; | |
9953 | { | |
b3694847 SS |
9954 | rtx op0, op1; |
9955 | tree type; | |
9956 | enum machine_mode mode; | |
b30f05db BS |
9957 | int unsignedp; |
9958 | enum rtx_code code; | |
9959 | ||
9960 | /* Don't crash if the comparison was erroneous. */ | |
a06ef755 | 9961 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
b30f05db BS |
9962 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) |
9963 | return; | |
9964 | ||
a06ef755 | 9965 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
6b16805e JJ |
9966 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) |
9967 | return; | |
9968 | ||
b30f05db BS |
9969 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
9970 | mode = TYPE_MODE (type); | |
6b16805e JJ |
9971 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
9972 | && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST | |
9973 | || (GET_MODE_BITSIZE (mode) | |
31a7659b JDA |
9974 | > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, |
9975 | 1))))))) | |
6b16805e JJ |
9976 | { |
9977 | /* op0 might have been replaced by promoted constant, in which | |
9978 | case the type of second argument should be used. */ | |
9979 | type = TREE_TYPE (TREE_OPERAND (exp, 1)); | |
9980 | mode = TYPE_MODE (type); | |
9981 | } | |
b30f05db BS |
9982 | unsignedp = TREE_UNSIGNED (type); |
9983 | code = unsignedp ? unsigned_code : signed_code; | |
9984 | ||
9985 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
9986 | /* If function pointers need to be "canonicalized" before they can | |
9987 | be reliably compared, then canonicalize them. */ | |
9988 | if (HAVE_canonicalize_funcptr_for_compare | |
9989 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
9990 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
9991 | == FUNCTION_TYPE)) | |
9992 | { | |
9993 | rtx new_op0 = gen_reg_rtx (mode); | |
9994 | ||
9995 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); | |
9996 | op0 = new_op0; | |
9997 | } | |
9998 | ||
9999 | if (HAVE_canonicalize_funcptr_for_compare | |
10000 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10001 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10002 | == FUNCTION_TYPE)) | |
10003 | { | |
10004 | rtx new_op1 = gen_reg_rtx (mode); | |
10005 | ||
10006 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); | |
10007 | op1 = new_op1; | |
10008 | } | |
10009 | #endif | |
10010 | ||
10011 | /* Do any postincrements in the expression that was tested. */ | |
10012 | emit_queue (); | |
10013 | ||
10014 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, | |
10015 | ((mode == BLKmode) | |
10016 | ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), | |
b30f05db | 10017 | if_false_label, if_true_label); |
b93a436e JL |
10018 | } |
10019 | \f | |
10020 | /* Generate code to calculate EXP using a store-flag instruction | |
10021 | and return an rtx for the result. EXP is either a comparison | |
10022 | or a TRUTH_NOT_EXPR whose operand is a comparison. | |
ca695ac9 | 10023 | |
b93a436e | 10024 | If TARGET is nonzero, store the result there if convenient. |
ca695ac9 | 10025 | |
b93a436e JL |
10026 | If ONLY_CHEAP is non-zero, only do this if it is likely to be very |
10027 | cheap. | |
ca695ac9 | 10028 | |
b93a436e JL |
10029 | Return zero if there is no suitable set-flag instruction |
10030 | available on this machine. | |
ca695ac9 | 10031 | |
b93a436e JL |
10032 | Once expand_expr has been called on the arguments of the comparison, |
10033 | we are committed to doing the store flag, since it is not safe to | |
10034 | re-evaluate the expression. We emit the store-flag insn by calling | |
10035 | emit_store_flag, but only expand the arguments if we have a reason | |
10036 | to believe that emit_store_flag will be successful. If we think that | |
10037 | it will, but it isn't, we have to simulate the store-flag with a | |
10038 | set/jump/set sequence. */ | |
ca695ac9 | 10039 | |
b93a436e JL |
10040 | static rtx |
10041 | do_store_flag (exp, target, mode, only_cheap) | |
10042 | tree exp; | |
10043 | rtx target; | |
10044 | enum machine_mode mode; | |
10045 | int only_cheap; | |
10046 | { | |
10047 | enum rtx_code code; | |
10048 | tree arg0, arg1, type; | |
10049 | tree tem; | |
10050 | enum machine_mode operand_mode; | |
10051 | int invert = 0; | |
10052 | int unsignedp; | |
10053 | rtx op0, op1; | |
10054 | enum insn_code icode; | |
10055 | rtx subtarget = target; | |
381127e8 | 10056 | rtx result, label; |
ca695ac9 | 10057 | |
b93a436e JL |
10058 | /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
10059 | result at the end. We can't simply invert the test since it would | |
10060 | have already been inverted if it were valid. This case occurs for | |
10061 | some floating-point comparisons. */ | |
ca695ac9 | 10062 | |
b93a436e JL |
10063 | if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
10064 | invert = 1, exp = TREE_OPERAND (exp, 0); | |
ca695ac9 | 10065 | |
b93a436e JL |
10066 | arg0 = TREE_OPERAND (exp, 0); |
10067 | arg1 = TREE_OPERAND (exp, 1); | |
5129d2ce AH |
10068 | |
10069 | /* Don't crash if the comparison was erroneous. */ | |
10070 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
10071 | return const0_rtx; | |
10072 | ||
b93a436e JL |
10073 | type = TREE_TYPE (arg0); |
10074 | operand_mode = TYPE_MODE (type); | |
10075 | unsignedp = TREE_UNSIGNED (type); | |
ca695ac9 | 10076 | |
b93a436e JL |
10077 | /* We won't bother with BLKmode store-flag operations because it would mean |
10078 | passing a lot of information to emit_store_flag. */ | |
10079 | if (operand_mode == BLKmode) | |
10080 | return 0; | |
ca695ac9 | 10081 | |
b93a436e JL |
10082 | /* We won't bother with store-flag operations involving function pointers |
10083 | when function pointers must be canonicalized before comparisons. */ | |
10084 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
10085 | if (HAVE_canonicalize_funcptr_for_compare | |
10086 | && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
10087 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
10088 | == FUNCTION_TYPE)) | |
10089 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10090 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10091 | == FUNCTION_TYPE)))) | |
10092 | return 0; | |
ca695ac9 JB |
10093 | #endif |
10094 | ||
b93a436e JL |
10095 | STRIP_NOPS (arg0); |
10096 | STRIP_NOPS (arg1); | |
ca695ac9 | 10097 | |
b93a436e JL |
10098 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
10099 | operation of some type. Some comparisons against 1 and -1 can be | |
10100 | converted to comparisons with zero. Do so here so that the tests | |
10101 | below will be aware that we have a comparison with zero. These | |
10102 | tests will not catch constants in the first operand, but constants | |
10103 | are rarely passed as the first operand. */ | |
ca695ac9 | 10104 | |
b93a436e JL |
10105 | switch (TREE_CODE (exp)) |
10106 | { | |
10107 | case EQ_EXPR: | |
10108 | code = EQ; | |
bbf6f052 | 10109 | break; |
b93a436e JL |
10110 | case NE_EXPR: |
10111 | code = NE; | |
bbf6f052 | 10112 | break; |
b93a436e JL |
10113 | case LT_EXPR: |
10114 | if (integer_onep (arg1)) | |
10115 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
10116 | else | |
10117 | code = unsignedp ? LTU : LT; | |
ca695ac9 | 10118 | break; |
b93a436e JL |
10119 | case LE_EXPR: |
10120 | if (! unsignedp && integer_all_onesp (arg1)) | |
10121 | arg1 = integer_zero_node, code = LT; | |
10122 | else | |
10123 | code = unsignedp ? LEU : LE; | |
ca695ac9 | 10124 | break; |
b93a436e JL |
10125 | case GT_EXPR: |
10126 | if (! unsignedp && integer_all_onesp (arg1)) | |
10127 | arg1 = integer_zero_node, code = GE; | |
10128 | else | |
10129 | code = unsignedp ? GTU : GT; | |
10130 | break; | |
10131 | case GE_EXPR: | |
10132 | if (integer_onep (arg1)) | |
10133 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
10134 | else | |
10135 | code = unsignedp ? GEU : GE; | |
ca695ac9 | 10136 | break; |
1eb8759b RH |
10137 | |
10138 | case UNORDERED_EXPR: | |
10139 | code = UNORDERED; | |
10140 | break; | |
10141 | case ORDERED_EXPR: | |
10142 | code = ORDERED; | |
10143 | break; | |
10144 | case UNLT_EXPR: | |
10145 | code = UNLT; | |
10146 | break; | |
10147 | case UNLE_EXPR: | |
10148 | code = UNLE; | |
10149 | break; | |
10150 | case UNGT_EXPR: | |
10151 | code = UNGT; | |
10152 | break; | |
10153 | case UNGE_EXPR: | |
10154 | code = UNGE; | |
10155 | break; | |
10156 | case UNEQ_EXPR: | |
10157 | code = UNEQ; | |
10158 | break; | |
1eb8759b | 10159 | |
ca695ac9 | 10160 | default: |
b93a436e | 10161 | abort (); |
bbf6f052 | 10162 | } |
bbf6f052 | 10163 | |
b93a436e JL |
10164 | /* Put a constant second. */ |
10165 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) | |
10166 | { | |
10167 | tem = arg0; arg0 = arg1; arg1 = tem; | |
10168 | code = swap_condition (code); | |
ca695ac9 | 10169 | } |
bbf6f052 | 10170 | |
b93a436e JL |
10171 | /* If this is an equality or inequality test of a single bit, we can |
10172 | do this by shifting the bit being tested to the low-order bit and | |
10173 | masking the result with the constant 1. If the condition was EQ, | |
10174 | we xor it with 1. This does not require an scc insn and is faster | |
10175 | than an scc insn even if we have it. */ | |
d39985fa | 10176 | |
b93a436e JL |
10177 | if ((code == NE || code == EQ) |
10178 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
10179 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
10180 | { | |
10181 | tree inner = TREE_OPERAND (arg0, 0); | |
10182 | int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); | |
10183 | int ops_unsignedp; | |
bbf6f052 | 10184 | |
b93a436e JL |
10185 | /* If INNER is a right shift of a constant and it plus BITNUM does |
10186 | not overflow, adjust BITNUM and INNER. */ | |
ca695ac9 | 10187 | |
b93a436e JL |
10188 | if (TREE_CODE (inner) == RSHIFT_EXPR |
10189 | && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST | |
10190 | && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 | |
05bccae2 RK |
10191 | && bitnum < TYPE_PRECISION (type) |
10192 | && 0 > compare_tree_int (TREE_OPERAND (inner, 1), | |
10193 | bitnum - TYPE_PRECISION (type))) | |
ca695ac9 | 10194 | { |
b93a436e JL |
10195 | bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); |
10196 | inner = TREE_OPERAND (inner, 0); | |
ca695ac9 | 10197 | } |
ca695ac9 | 10198 | |
b93a436e JL |
10199 | /* If we are going to be able to omit the AND below, we must do our |
10200 | operations as unsigned. If we must use the AND, we have a choice. | |
10201 | Normally unsigned is faster, but for some machines signed is. */ | |
10202 | ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1 | |
10203 | #ifdef LOAD_EXTEND_OP | |
10204 | : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1) | |
10205 | #else | |
10206 | : 1 | |
10207 | #endif | |
10208 | ); | |
bbf6f052 | 10209 | |
296b4ed9 | 10210 | if (! get_subtarget (subtarget) |
a47fed55 | 10211 | || GET_MODE (subtarget) != operand_mode |
e5e809f4 | 10212 | || ! safe_from_p (subtarget, inner, 1)) |
b93a436e | 10213 | subtarget = 0; |
bbf6f052 | 10214 | |
b93a436e | 10215 | op0 = expand_expr (inner, subtarget, VOIDmode, 0); |
bbf6f052 | 10216 | |
b93a436e | 10217 | if (bitnum != 0) |
681cb233 | 10218 | op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0, |
b93a436e | 10219 | size_int (bitnum), subtarget, ops_unsignedp); |
bbf6f052 | 10220 | |
b93a436e JL |
10221 | if (GET_MODE (op0) != mode) |
10222 | op0 = convert_to_mode (mode, op0, ops_unsignedp); | |
bbf6f052 | 10223 | |
b93a436e JL |
10224 | if ((code == EQ && ! invert) || (code == NE && invert)) |
10225 | op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget, | |
10226 | ops_unsignedp, OPTAB_LIB_WIDEN); | |
bbf6f052 | 10227 | |
b93a436e JL |
10228 | /* Put the AND last so it can combine with more things. */ |
10229 | if (bitnum != TYPE_PRECISION (type) - 1) | |
22273300 | 10230 | op0 = expand_and (mode, op0, const1_rtx, subtarget); |
bbf6f052 | 10231 | |
b93a436e JL |
10232 | return op0; |
10233 | } | |
bbf6f052 | 10234 | |
b93a436e | 10235 | /* Now see if we are likely to be able to do this. Return if not. */ |
1eb8759b | 10236 | if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
b93a436e | 10237 | return 0; |
1eb8759b | 10238 | |
b93a436e JL |
10239 | icode = setcc_gen_code[(int) code]; |
10240 | if (icode == CODE_FOR_nothing | |
a995e389 | 10241 | || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
ca695ac9 | 10242 | { |
b93a436e JL |
10243 | /* We can only do this if it is one of the special cases that |
10244 | can be handled without an scc insn. */ | |
10245 | if ((code == LT && integer_zerop (arg1)) | |
10246 | || (! only_cheap && code == GE && integer_zerop (arg1))) | |
10247 | ; | |
10248 | else if (BRANCH_COST >= 0 | |
10249 | && ! only_cheap && (code == NE || code == EQ) | |
10250 | && TREE_CODE (type) != REAL_TYPE | |
10251 | && ((abs_optab->handlers[(int) operand_mode].insn_code | |
10252 | != CODE_FOR_nothing) | |
10253 | || (ffs_optab->handlers[(int) operand_mode].insn_code | |
10254 | != CODE_FOR_nothing))) | |
10255 | ; | |
10256 | else | |
10257 | return 0; | |
ca695ac9 | 10258 | } |
3a94c984 | 10259 | |
296b4ed9 | 10260 | if (! get_subtarget (target) |
a47fed55 | 10261 | || GET_MODE (subtarget) != operand_mode |
e5e809f4 | 10262 | || ! safe_from_p (subtarget, arg1, 1)) |
b93a436e JL |
10263 | subtarget = 0; |
10264 | ||
10265 | op0 = expand_expr (arg0, subtarget, VOIDmode, 0); | |
10266 | op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); | |
10267 | ||
10268 | if (target == 0) | |
10269 | target = gen_reg_rtx (mode); | |
10270 | ||
10271 | /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe | |
10272 | because, if the emit_store_flag does anything it will succeed and | |
10273 | OP0 and OP1 will not be used subsequently. */ | |
ca695ac9 | 10274 | |
b93a436e JL |
10275 | result = emit_store_flag (target, code, |
10276 | queued_subexp_p (op0) ? copy_rtx (op0) : op0, | |
10277 | queued_subexp_p (op1) ? copy_rtx (op1) : op1, | |
10278 | operand_mode, unsignedp, 1); | |
ca695ac9 | 10279 | |
b93a436e JL |
10280 | if (result) |
10281 | { | |
10282 | if (invert) | |
10283 | result = expand_binop (mode, xor_optab, result, const1_rtx, | |
10284 | result, 0, OPTAB_LIB_WIDEN); | |
10285 | return result; | |
ca695ac9 | 10286 | } |
bbf6f052 | 10287 | |
b93a436e JL |
10288 | /* If this failed, we have to do this with set/compare/jump/set code. */ |
10289 | if (GET_CODE (target) != REG | |
10290 | || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) | |
10291 | target = gen_reg_rtx (GET_MODE (target)); | |
10292 | ||
10293 | emit_move_insn (target, invert ? const0_rtx : const1_rtx); | |
10294 | result = compare_from_rtx (op0, op1, code, unsignedp, | |
a06ef755 | 10295 | operand_mode, NULL_RTX); |
b93a436e JL |
10296 | if (GET_CODE (result) == CONST_INT) |
10297 | return (((result == const0_rtx && ! invert) | |
10298 | || (result != const0_rtx && invert)) | |
10299 | ? const0_rtx : const1_rtx); | |
ca695ac9 | 10300 | |
8f08e8c0 JL |
10301 | /* The code of RESULT may not match CODE if compare_from_rtx |
10302 | decided to swap its operands and reverse the original code. | |
10303 | ||
10304 | We know that compare_from_rtx returns either a CONST_INT or | |
10305 | a new comparison code, so it is safe to just extract the | |
10306 | code from RESULT. */ | |
10307 | code = GET_CODE (result); | |
10308 | ||
b93a436e JL |
10309 | label = gen_label_rtx (); |
10310 | if (bcc_gen_fctn[(int) code] == 0) | |
10311 | abort (); | |
0f41302f | 10312 | |
b93a436e JL |
10313 | emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); |
10314 | emit_move_insn (target, invert ? const1_rtx : const0_rtx); | |
10315 | emit_label (label); | |
bbf6f052 | 10316 | |
b93a436e | 10317 | return target; |
ca695ac9 | 10318 | } |
b93a436e | 10319 | \f |
b93a436e | 10320 | |
ad82abb8 ZW |
10321 | /* Stubs in case we haven't got a casesi insn. */ |
10322 | #ifndef HAVE_casesi | |
10323 | # define HAVE_casesi 0 | |
10324 | # define gen_casesi(a, b, c, d, e) (0) | |
10325 | # define CODE_FOR_casesi CODE_FOR_nothing | |
10326 | #endif | |
10327 | ||
10328 | /* If the machine does not have a case insn that compares the bounds, | |
10329 | this means extra overhead for dispatch tables, which raises the | |
10330 | threshold for using them. */ | |
10331 | #ifndef CASE_VALUES_THRESHOLD | |
10332 | #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) | |
10333 | #endif /* CASE_VALUES_THRESHOLD */ | |
10334 | ||
10335 | unsigned int | |
10336 | case_values_threshold () | |
10337 | { | |
10338 | return CASE_VALUES_THRESHOLD; | |
10339 | } | |
10340 | ||
10341 | /* Attempt to generate a casesi instruction. Returns 1 if successful, | |
10342 | 0 otherwise (i.e. if there is no casesi instruction). */ | |
10343 | int | |
10344 | try_casesi (index_type, index_expr, minval, range, | |
10345 | table_label, default_label) | |
10346 | tree index_type, index_expr, minval, range; | |
10347 | rtx table_label ATTRIBUTE_UNUSED; | |
10348 | rtx default_label; | |
10349 | { | |
10350 | enum machine_mode index_mode = SImode; | |
10351 | int index_bits = GET_MODE_BITSIZE (index_mode); | |
10352 | rtx op1, op2, index; | |
10353 | enum machine_mode op_mode; | |
10354 | ||
10355 | if (! HAVE_casesi) | |
10356 | return 0; | |
10357 | ||
10358 | /* Convert the index to SImode. */ | |
10359 | if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) | |
10360 | { | |
10361 | enum machine_mode omode = TYPE_MODE (index_type); | |
10362 | rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
10363 | ||
10364 | /* We must handle the endpoints in the original mode. */ | |
10365 | index_expr = build (MINUS_EXPR, index_type, | |
10366 | index_expr, minval); | |
10367 | minval = integer_zero_node; | |
10368 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
10369 | emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, | |
a06ef755 | 10370 | omode, 1, default_label); |
ad82abb8 ZW |
10371 | /* Now we can safely truncate. */ |
10372 | index = convert_to_mode (index_mode, index, 0); | |
10373 | } | |
10374 | else | |
10375 | { | |
10376 | if (TYPE_MODE (index_type) != index_mode) | |
10377 | { | |
10378 | index_expr = convert (type_for_size (index_bits, 0), | |
10379 | index_expr); | |
10380 | index_type = TREE_TYPE (index_expr); | |
10381 | } | |
10382 | ||
10383 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
10384 | } | |
10385 | emit_queue (); | |
10386 | index = protect_from_queue (index, 0); | |
10387 | do_pending_stack_adjust (); | |
10388 | ||
10389 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; | |
10390 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) | |
10391 | (index, op_mode)) | |
10392 | index = copy_to_mode_reg (op_mode, index); | |
e87b4f3f | 10393 | |
ad82abb8 ZW |
10394 | op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0); |
10395 | ||
10396 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; | |
10397 | op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), | |
10398 | op1, TREE_UNSIGNED (TREE_TYPE (minval))); | |
10399 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) | |
10400 | (op1, op_mode)) | |
10401 | op1 = copy_to_mode_reg (op_mode, op1); | |
10402 | ||
10403 | op2 = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
10404 | ||
10405 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; | |
10406 | op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), | |
10407 | op2, TREE_UNSIGNED (TREE_TYPE (range))); | |
10408 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) | |
10409 | (op2, op_mode)) | |
10410 | op2 = copy_to_mode_reg (op_mode, op2); | |
10411 | ||
10412 | emit_jump_insn (gen_casesi (index, op1, op2, | |
10413 | table_label, default_label)); | |
10414 | return 1; | |
10415 | } | |
10416 | ||
10417 | /* Attempt to generate a tablejump instruction; same concept. */ | |
10418 | #ifndef HAVE_tablejump | |
10419 | #define HAVE_tablejump 0 | |
10420 | #define gen_tablejump(x, y) (0) | |
10421 | #endif | |
10422 | ||
10423 | /* Subroutine of the next function. | |
10424 | ||
10425 | INDEX is the value being switched on, with the lowest value | |
b93a436e JL |
10426 | in the table already subtracted. |
10427 | MODE is its expected mode (needed if INDEX is constant). | |
10428 | RANGE is the length of the jump table. | |
10429 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
88d3b7f0 | 10430 | |
b93a436e JL |
10431 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
10432 | index value is out of range. */ | |
0f41302f | 10433 | |
ad82abb8 | 10434 | static void |
b93a436e JL |
10435 | do_tablejump (index, mode, range, table_label, default_label) |
10436 | rtx index, range, table_label, default_label; | |
10437 | enum machine_mode mode; | |
ca695ac9 | 10438 | { |
b3694847 | 10439 | rtx temp, vector; |
88d3b7f0 | 10440 | |
b93a436e JL |
10441 | /* Do an unsigned comparison (in the proper mode) between the index |
10442 | expression and the value which represents the length of the range. | |
10443 | Since we just finished subtracting the lower bound of the range | |
10444 | from the index expression, this comparison allows us to simultaneously | |
10445 | check that the original index expression value is both greater than | |
10446 | or equal to the minimum value of the range and less than or equal to | |
10447 | the maximum value of the range. */ | |
709f5be1 | 10448 | |
c5d5d461 | 10449 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
a06ef755 | 10450 | default_label); |
bbf6f052 | 10451 | |
b93a436e JL |
10452 | /* If index is in range, it must fit in Pmode. |
10453 | Convert to Pmode so we can index with it. */ | |
10454 | if (mode != Pmode) | |
10455 | index = convert_to_mode (Pmode, index, 1); | |
bbf6f052 | 10456 | |
b93a436e JL |
10457 | /* Don't let a MEM slip thru, because then INDEX that comes |
10458 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, | |
10459 | and break_out_memory_refs will go to work on it and mess it up. */ | |
10460 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
10461 | if (flag_pic && GET_CODE (index) != REG) | |
10462 | index = copy_to_mode_reg (Pmode, index); | |
10463 | #endif | |
ca695ac9 | 10464 | |
b93a436e JL |
10465 | /* If flag_force_addr were to affect this address |
10466 | it could interfere with the tricky assumptions made | |
10467 | about addresses that contain label-refs, | |
10468 | which may be valid only very near the tablejump itself. */ | |
10469 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the | |
10470 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
10471 | uses should all be Pmode, because they are addresses. This code | |
10472 | could fail if addresses and insns are not the same size. */ | |
10473 | index = gen_rtx_PLUS (Pmode, | |
10474 | gen_rtx_MULT (Pmode, index, | |
10475 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
10476 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
10477 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
10478 | if (flag_pic) | |
10479 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
10480 | else | |
bbf6f052 | 10481 | #endif |
b93a436e JL |
10482 | index = memory_address_noforce (CASE_VECTOR_MODE, index); |
10483 | temp = gen_reg_rtx (CASE_VECTOR_MODE); | |
10484 | vector = gen_rtx_MEM (CASE_VECTOR_MODE, index); | |
10485 | RTX_UNCHANGING_P (vector) = 1; | |
10486 | convert_move (temp, vector, 0); | |
10487 | ||
10488 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
10489 | ||
10490 | /* If we are generating PIC code or if the table is PC-relative, the | |
10491 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
10492 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
10493 | emit_barrier (); | |
bbf6f052 | 10494 | } |
b93a436e | 10495 | |
ad82abb8 ZW |
10496 | int |
10497 | try_tablejump (index_type, index_expr, minval, range, | |
10498 | table_label, default_label) | |
10499 | tree index_type, index_expr, minval, range; | |
10500 | rtx table_label, default_label; | |
10501 | { | |
10502 | rtx index; | |
10503 | ||
10504 | if (! HAVE_tablejump) | |
10505 | return 0; | |
10506 | ||
10507 | index_expr = fold (build (MINUS_EXPR, index_type, | |
10508 | convert (index_type, index_expr), | |
10509 | convert (index_type, minval))); | |
10510 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
10511 | emit_queue (); | |
10512 | index = protect_from_queue (index, 0); | |
10513 | do_pending_stack_adjust (); | |
10514 | ||
10515 | do_tablejump (index, TYPE_MODE (index_type), | |
10516 | convert_modes (TYPE_MODE (index_type), | |
10517 | TYPE_MODE (TREE_TYPE (range)), | |
10518 | expand_expr (range, NULL_RTX, | |
10519 | VOIDmode, 0), | |
10520 | TREE_UNSIGNED (TREE_TYPE (range))), | |
10521 | table_label, default_label); | |
10522 | return 1; | |
10523 | } |