]>
Commit | Line | Data |
---|---|---|
bbf6f052 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
8752c357 AJ |
2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
3 | 2000, 2001 Free Software Foundation, Inc. | |
bbf6f052 RK |
4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
940d9d63 RK |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, |
20 | Boston, MA 02111-1307, USA. */ | |
bbf6f052 | 21 | |
bbf6f052 | 22 | #include "config.h" |
670ee920 | 23 | #include "system.h" |
ca695ac9 | 24 | #include "machmode.h" |
bbf6f052 RK |
25 | #include "rtl.h" |
26 | #include "tree.h" | |
ca695ac9 | 27 | #include "obstack.h" |
bbf6f052 | 28 | #include "flags.h" |
bf76bb5a | 29 | #include "regs.h" |
4ed67205 | 30 | #include "hard-reg-set.h" |
3d195391 | 31 | #include "except.h" |
bbf6f052 | 32 | #include "function.h" |
bbf6f052 | 33 | #include "insn-config.h" |
3a94c984 | 34 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 35 | #include "expr.h" |
bbf6f052 | 36 | #include "recog.h" |
3ef1eef4 | 37 | #include "reload.h" |
bbf6f052 | 38 | #include "output.h" |
bbf6f052 | 39 | #include "typeclass.h" |
10f0ad3d | 40 | #include "toplev.h" |
d7db6646 | 41 | #include "ggc.h" |
e2c49ac2 | 42 | #include "intl.h" |
b1474bb7 | 43 | #include "tm_p.h" |
bbf6f052 | 44 | |
bbf6f052 | 45 | /* Decide whether a function's arguments should be processed |
bbc8a071 RK |
46 | from first to last or from last to first. |
47 | ||
48 | They should if the stack and args grow in opposite directions, but | |
49 | only if we have push insns. */ | |
bbf6f052 | 50 | |
bbf6f052 | 51 | #ifdef PUSH_ROUNDING |
bbc8a071 | 52 | |
3319a347 | 53 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
3a94c984 | 54 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
bbf6f052 | 55 | #endif |
bbc8a071 | 56 | |
bbf6f052 RK |
57 | #endif |
58 | ||
59 | #ifndef STACK_PUSH_CODE | |
60 | #ifdef STACK_GROWS_DOWNWARD | |
61 | #define STACK_PUSH_CODE PRE_DEC | |
62 | #else | |
63 | #define STACK_PUSH_CODE PRE_INC | |
64 | #endif | |
65 | #endif | |
66 | ||
18543a22 ILT |
67 | /* Assume that case vectors are not pc-relative. */ |
68 | #ifndef CASE_VECTOR_PC_RELATIVE | |
69 | #define CASE_VECTOR_PC_RELATIVE 0 | |
70 | #endif | |
71 | ||
8f17b5c5 MM |
72 | /* Hook called by safe_from_p for language-specific tree codes. It is |
73 | up to the language front-end to install a hook if it has any such | |
74 | codes that safe_from_p needs to know about. Since same_from_p will | |
75 | recursively explore the TREE_OPERANDs of an expression, this hook | |
76 | should not reexamine those pieces. This routine may recursively | |
77 | call safe_from_p; it should always pass `0' as the TOP_P | |
78 | parameter. */ | |
79 | int (*lang_safe_from_p) PARAMS ((rtx, tree)); | |
80 | ||
bbf6f052 RK |
81 | /* If this is nonzero, we do not bother generating VOLATILE |
82 | around volatile memory references, and we are willing to | |
83 | output indirect addresses. If cse is to follow, we reject | |
84 | indirect addresses so a useful potential cse is generated; | |
85 | if it is used only once, instruction combination will produce | |
86 | the same indirect address eventually. */ | |
87 | int cse_not_expected; | |
88 | ||
956d6950 | 89 | /* Don't check memory usage, since code is being emitted to check a memory |
7d384cc0 KR |
90 | usage. Used when current_function_check_memory_usage is true, to avoid |
91 | infinite recursion. */ | |
956d6950 JL |
92 | static int in_check_memory_usage; |
93 | ||
14a774a9 RK |
94 | /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */ |
95 | static tree placeholder_list = 0; | |
96 | ||
4969d05d RK |
97 | /* This structure is used by move_by_pieces to describe the move to |
98 | be performed. */ | |
4969d05d RK |
99 | struct move_by_pieces |
100 | { | |
101 | rtx to; | |
102 | rtx to_addr; | |
103 | int autinc_to; | |
104 | int explicit_inc_to; | |
105 | rtx from; | |
106 | rtx from_addr; | |
107 | int autinc_from; | |
108 | int explicit_inc_from; | |
3bdf5ad1 RK |
109 | unsigned HOST_WIDE_INT len; |
110 | HOST_WIDE_INT offset; | |
4969d05d RK |
111 | int reverse; |
112 | }; | |
113 | ||
57814e5e | 114 | /* This structure is used by store_by_pieces to describe the clear to |
9de08200 RK |
115 | be performed. */ |
116 | ||
57814e5e | 117 | struct store_by_pieces |
9de08200 RK |
118 | { |
119 | rtx to; | |
120 | rtx to_addr; | |
121 | int autinc_to; | |
122 | int explicit_inc_to; | |
3bdf5ad1 RK |
123 | unsigned HOST_WIDE_INT len; |
124 | HOST_WIDE_INT offset; | |
57814e5e JJ |
125 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); |
126 | PTR constfundata; | |
9de08200 RK |
127 | int reverse; |
128 | }; | |
129 | ||
292b1216 | 130 | extern struct obstack permanent_obstack; |
c02bd5d9 | 131 | |
711d877c KG |
132 | static rtx get_push_address PARAMS ((int)); |
133 | ||
134 | static rtx enqueue_insn PARAMS ((rtx, rtx)); | |
3bdf5ad1 RK |
135 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns |
136 | PARAMS ((unsigned HOST_WIDE_INT, | |
137 | unsigned int)); | |
711d877c KG |
138 | static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode, |
139 | struct move_by_pieces *)); | |
57814e5e JJ |
140 | static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT, |
141 | enum machine_mode)); | |
3bdf5ad1 RK |
142 | static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT, |
143 | unsigned int)); | |
57814e5e JJ |
144 | static void store_by_pieces_1 PARAMS ((struct store_by_pieces *, |
145 | unsigned int)); | |
146 | static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...), | |
711d877c | 147 | enum machine_mode, |
57814e5e | 148 | struct store_by_pieces *)); |
296b4ed9 | 149 | static rtx get_subtarget PARAMS ((rtx)); |
711d877c KG |
150 | static int is_zeros_p PARAMS ((tree)); |
151 | static int mostly_zeros_p PARAMS ((tree)); | |
770ae6cc RK |
152 | static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT, |
153 | HOST_WIDE_INT, enum machine_mode, | |
23cb1766 RK |
154 | tree, tree, unsigned int, int, |
155 | int)); | |
770ae6cc | 156 | static void store_constructor PARAMS ((tree, rtx, unsigned int, int, |
13eb1f7f | 157 | HOST_WIDE_INT)); |
770ae6cc RK |
158 | static rtx store_field PARAMS ((rtx, HOST_WIDE_INT, |
159 | HOST_WIDE_INT, enum machine_mode, | |
729a2125 | 160 | tree, enum machine_mode, int, |
770ae6cc | 161 | unsigned int, HOST_WIDE_INT, int)); |
e009aaf3 | 162 | static enum memory_use_mode |
711d877c KG |
163 | get_memory_usage_from_modifier PARAMS ((enum expand_modifier)); |
164 | static tree save_noncopied_parts PARAMS ((tree, tree)); | |
165 | static tree init_noncopied_parts PARAMS ((tree, tree)); | |
711d877c KG |
166 | static int fixed_type_p PARAMS ((tree)); |
167 | static rtx var_rtx PARAMS ((tree)); | |
729a2125 | 168 | static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *)); |
711d877c | 169 | static rtx expand_increment PARAMS ((tree, int, int)); |
711d877c KG |
170 | static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx)); |
171 | static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx)); | |
770ae6cc RK |
172 | static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, |
173 | rtx, rtx)); | |
711d877c | 174 | static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int)); |
566aa174 | 175 | static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree)); |
bbf6f052 | 176 | |
4fa52007 RK |
177 | /* Record for each mode whether we can move a register directly to or |
178 | from an object of that mode in memory. If we can't, we won't try | |
179 | to use that mode directly when accessing a field of that mode. */ | |
180 | ||
181 | static char direct_load[NUM_MACHINE_MODES]; | |
182 | static char direct_store[NUM_MACHINE_MODES]; | |
183 | ||
7e24ffc9 HPN |
184 | /* If a memory-to-memory move would take MOVE_RATIO or more simple |
185 | move-instruction sequences, we will do a movstr or libcall instead. */ | |
bbf6f052 RK |
186 | |
187 | #ifndef MOVE_RATIO | |
266007a7 | 188 | #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti) |
bbf6f052 RK |
189 | #define MOVE_RATIO 2 |
190 | #else | |
3a94c984 | 191 | /* If we are optimizing for space (-Os), cut down the default move ratio. */ |
996d9dac | 192 | #define MOVE_RATIO (optimize_size ? 3 : 15) |
bbf6f052 RK |
193 | #endif |
194 | #endif | |
e87b4f3f | 195 | |
fbe1758d | 196 | /* This macro is used to determine whether move_by_pieces should be called |
3a94c984 | 197 | to perform a structure copy. */ |
fbe1758d | 198 | #ifndef MOVE_BY_PIECES_P |
19caa751 | 199 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
8752c357 | 200 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO) |
fbe1758d AM |
201 | #endif |
202 | ||
266007a7 | 203 | /* This array records the insn_code of insns to perform block moves. */ |
e6677db3 | 204 | enum insn_code movstr_optab[NUM_MACHINE_MODES]; |
266007a7 | 205 | |
9de08200 RK |
206 | /* This array records the insn_code of insns to perform block clears. */ |
207 | enum insn_code clrstr_optab[NUM_MACHINE_MODES]; | |
208 | ||
0f41302f | 209 | /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */ |
e87b4f3f RS |
210 | |
211 | #ifndef SLOW_UNALIGNED_ACCESS | |
e1565e65 | 212 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
e87b4f3f | 213 | #endif |
bbf6f052 | 214 | \f |
4fa52007 | 215 | /* This is run once per compilation to set up which modes can be used |
266007a7 | 216 | directly in memory and to initialize the block move optab. */ |
4fa52007 RK |
217 | |
218 | void | |
219 | init_expr_once () | |
220 | { | |
221 | rtx insn, pat; | |
222 | enum machine_mode mode; | |
cff48d8f | 223 | int num_clobbers; |
9ec36da5 | 224 | rtx mem, mem1; |
9ec36da5 JL |
225 | |
226 | start_sequence (); | |
227 | ||
e2549997 RS |
228 | /* Try indexing by frame ptr and try by stack ptr. |
229 | It is known that on the Convex the stack ptr isn't a valid index. | |
230 | With luck, one or the other is valid on any machine. */ | |
9ec36da5 JL |
231 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
232 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
4fa52007 | 233 | |
38a448ca | 234 | insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX)); |
4fa52007 RK |
235 | pat = PATTERN (insn); |
236 | ||
237 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
238 | mode = (enum machine_mode) ((int) mode + 1)) | |
239 | { | |
240 | int regno; | |
241 | rtx reg; | |
4fa52007 RK |
242 | |
243 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
244 | PUT_MODE (mem, mode); | |
e2549997 | 245 | PUT_MODE (mem1, mode); |
4fa52007 | 246 | |
e6fe56a4 RK |
247 | /* See if there is some register that can be used in this mode and |
248 | directly loaded or stored from memory. */ | |
249 | ||
7308a047 RS |
250 | if (mode != VOIDmode && mode != BLKmode) |
251 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
252 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
253 | regno++) | |
254 | { | |
255 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
256 | continue; | |
e6fe56a4 | 257 | |
38a448ca | 258 | reg = gen_rtx_REG (mode, regno); |
e6fe56a4 | 259 | |
7308a047 RS |
260 | SET_SRC (pat) = mem; |
261 | SET_DEST (pat) = reg; | |
262 | if (recog (pat, insn, &num_clobbers) >= 0) | |
263 | direct_load[(int) mode] = 1; | |
e6fe56a4 | 264 | |
e2549997 RS |
265 | SET_SRC (pat) = mem1; |
266 | SET_DEST (pat) = reg; | |
267 | if (recog (pat, insn, &num_clobbers) >= 0) | |
268 | direct_load[(int) mode] = 1; | |
269 | ||
7308a047 RS |
270 | SET_SRC (pat) = reg; |
271 | SET_DEST (pat) = mem; | |
272 | if (recog (pat, insn, &num_clobbers) >= 0) | |
273 | direct_store[(int) mode] = 1; | |
e2549997 RS |
274 | |
275 | SET_SRC (pat) = reg; | |
276 | SET_DEST (pat) = mem1; | |
277 | if (recog (pat, insn, &num_clobbers) >= 0) | |
278 | direct_store[(int) mode] = 1; | |
7308a047 | 279 | } |
4fa52007 RK |
280 | } |
281 | ||
282 | end_sequence (); | |
283 | } | |
cff48d8f | 284 | |
bbf6f052 RK |
285 | /* This is run at the start of compiling a function. */ |
286 | ||
287 | void | |
288 | init_expr () | |
289 | { | |
01d939e8 | 290 | cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status)); |
bbf6f052 | 291 | |
49ad7cfa | 292 | pending_chain = 0; |
bbf6f052 | 293 | pending_stack_adjust = 0; |
1503a7ec | 294 | stack_pointer_delta = 0; |
bbf6f052 | 295 | inhibit_defer_pop = 0; |
bbf6f052 | 296 | saveregs_value = 0; |
0006469d | 297 | apply_args_value = 0; |
e87b4f3f | 298 | forced_labels = 0; |
bbf6f052 RK |
299 | } |
300 | ||
fa51b01b RH |
301 | void |
302 | mark_expr_status (p) | |
303 | struct expr_status *p; | |
304 | { | |
305 | if (p == NULL) | |
306 | return; | |
307 | ||
308 | ggc_mark_rtx (p->x_saveregs_value); | |
309 | ggc_mark_rtx (p->x_apply_args_value); | |
310 | ggc_mark_rtx (p->x_forced_labels); | |
311 | } | |
312 | ||
313 | void | |
314 | free_expr_status (f) | |
315 | struct function *f; | |
316 | { | |
317 | free (f->expr); | |
318 | f->expr = NULL; | |
319 | } | |
320 | ||
49ad7cfa | 321 | /* Small sanity check that the queue is empty at the end of a function. */ |
296b4ed9 | 322 | |
bbf6f052 | 323 | void |
49ad7cfa | 324 | finish_expr_for_function () |
bbf6f052 | 325 | { |
49ad7cfa BS |
326 | if (pending_chain) |
327 | abort (); | |
bbf6f052 RK |
328 | } |
329 | \f | |
330 | /* Manage the queue of increment instructions to be output | |
331 | for POSTINCREMENT_EXPR expressions, etc. */ | |
332 | ||
bbf6f052 RK |
333 | /* Queue up to increment (or change) VAR later. BODY says how: |
334 | BODY should be the same thing you would pass to emit_insn | |
335 | to increment right away. It will go to emit_insn later on. | |
336 | ||
337 | The value is a QUEUED expression to be used in place of VAR | |
338 | where you want to guarantee the pre-incrementation value of VAR. */ | |
339 | ||
340 | static rtx | |
341 | enqueue_insn (var, body) | |
342 | rtx var, body; | |
343 | { | |
c5c76735 JL |
344 | pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX, |
345 | body, pending_chain); | |
bbf6f052 RK |
346 | return pending_chain; |
347 | } | |
348 | ||
349 | /* Use protect_from_queue to convert a QUEUED expression | |
350 | into something that you can put immediately into an instruction. | |
351 | If the queued incrementation has not happened yet, | |
352 | protect_from_queue returns the variable itself. | |
353 | If the incrementation has happened, protect_from_queue returns a temp | |
354 | that contains a copy of the old value of the variable. | |
355 | ||
356 | Any time an rtx which might possibly be a QUEUED is to be put | |
357 | into an instruction, it must be passed through protect_from_queue first. | |
358 | QUEUED expressions are not meaningful in instructions. | |
359 | ||
360 | Do not pass a value through protect_from_queue and then hold | |
361 | on to it for a while before putting it in an instruction! | |
362 | If the queue is flushed in between, incorrect code will result. */ | |
363 | ||
364 | rtx | |
365 | protect_from_queue (x, modify) | |
366 | register rtx x; | |
367 | int modify; | |
368 | { | |
369 | register RTX_CODE code = GET_CODE (x); | |
370 | ||
371 | #if 0 /* A QUEUED can hang around after the queue is forced out. */ | |
372 | /* Shortcut for most common case. */ | |
373 | if (pending_chain == 0) | |
374 | return x; | |
375 | #endif | |
376 | ||
377 | if (code != QUEUED) | |
378 | { | |
e9baa644 RK |
379 | /* A special hack for read access to (MEM (QUEUED ...)) to facilitate |
380 | use of autoincrement. Make a copy of the contents of the memory | |
381 | location rather than a copy of the address, but not if the value is | |
382 | of mode BLKmode. Don't modify X in place since it might be | |
383 | shared. */ | |
bbf6f052 RK |
384 | if (code == MEM && GET_MODE (x) != BLKmode |
385 | && GET_CODE (XEXP (x, 0)) == QUEUED && !modify) | |
386 | { | |
387 | register rtx y = XEXP (x, 0); | |
38a448ca | 388 | register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y)); |
e9baa644 | 389 | |
c6df88cb | 390 | MEM_COPY_ATTRIBUTES (new, x); |
e9baa644 | 391 | |
bbf6f052 RK |
392 | if (QUEUED_INSN (y)) |
393 | { | |
e9baa644 RK |
394 | register rtx temp = gen_reg_rtx (GET_MODE (new)); |
395 | emit_insn_before (gen_move_insn (temp, new), | |
bbf6f052 RK |
396 | QUEUED_INSN (y)); |
397 | return temp; | |
398 | } | |
73b7f58c BS |
399 | /* Copy the address into a pseudo, so that the returned value |
400 | remains correct across calls to emit_queue. */ | |
401 | XEXP (new, 0) = copy_to_reg (XEXP (new, 0)); | |
e9baa644 | 402 | return new; |
bbf6f052 RK |
403 | } |
404 | /* Otherwise, recursively protect the subexpressions of all | |
405 | the kinds of rtx's that can contain a QUEUED. */ | |
406 | if (code == MEM) | |
3f15938e RS |
407 | { |
408 | rtx tem = protect_from_queue (XEXP (x, 0), 0); | |
409 | if (tem != XEXP (x, 0)) | |
410 | { | |
411 | x = copy_rtx (x); | |
412 | XEXP (x, 0) = tem; | |
413 | } | |
414 | } | |
bbf6f052 RK |
415 | else if (code == PLUS || code == MULT) |
416 | { | |
3f15938e RS |
417 | rtx new0 = protect_from_queue (XEXP (x, 0), 0); |
418 | rtx new1 = protect_from_queue (XEXP (x, 1), 0); | |
419 | if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) | |
420 | { | |
421 | x = copy_rtx (x); | |
422 | XEXP (x, 0) = new0; | |
423 | XEXP (x, 1) = new1; | |
424 | } | |
bbf6f052 RK |
425 | } |
426 | return x; | |
427 | } | |
73b7f58c BS |
428 | /* If the increment has not happened, use the variable itself. Copy it |
429 | into a new pseudo so that the value remains correct across calls to | |
430 | emit_queue. */ | |
bbf6f052 | 431 | if (QUEUED_INSN (x) == 0) |
73b7f58c | 432 | return copy_to_reg (QUEUED_VAR (x)); |
bbf6f052 RK |
433 | /* If the increment has happened and a pre-increment copy exists, |
434 | use that copy. */ | |
435 | if (QUEUED_COPY (x) != 0) | |
436 | return QUEUED_COPY (x); | |
437 | /* The increment has happened but we haven't set up a pre-increment copy. | |
438 | Set one up now, and use it. */ | |
439 | QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x))); | |
440 | emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)), | |
441 | QUEUED_INSN (x)); | |
442 | return QUEUED_COPY (x); | |
443 | } | |
444 | ||
445 | /* Return nonzero if X contains a QUEUED expression: | |
446 | if it contains anything that will be altered by a queued increment. | |
447 | We handle only combinations of MEM, PLUS, MINUS and MULT operators | |
448 | since memory addresses generally contain only those. */ | |
449 | ||
1f06ee8d | 450 | int |
bbf6f052 RK |
451 | queued_subexp_p (x) |
452 | rtx x; | |
453 | { | |
454 | register enum rtx_code code = GET_CODE (x); | |
455 | switch (code) | |
456 | { | |
457 | case QUEUED: | |
458 | return 1; | |
459 | case MEM: | |
460 | return queued_subexp_p (XEXP (x, 0)); | |
461 | case MULT: | |
462 | case PLUS: | |
463 | case MINUS: | |
e9a25f70 JL |
464 | return (queued_subexp_p (XEXP (x, 0)) |
465 | || queued_subexp_p (XEXP (x, 1))); | |
466 | default: | |
467 | return 0; | |
bbf6f052 | 468 | } |
bbf6f052 RK |
469 | } |
470 | ||
471 | /* Perform all the pending incrementations. */ | |
472 | ||
473 | void | |
474 | emit_queue () | |
475 | { | |
476 | register rtx p; | |
381127e8 | 477 | while ((p = pending_chain)) |
bbf6f052 | 478 | { |
41b083c4 R |
479 | rtx body = QUEUED_BODY (p); |
480 | ||
481 | if (GET_CODE (body) == SEQUENCE) | |
482 | { | |
483 | QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0); | |
484 | emit_insn (QUEUED_BODY (p)); | |
485 | } | |
486 | else | |
487 | QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p)); | |
bbf6f052 RK |
488 | pending_chain = QUEUED_NEXT (p); |
489 | } | |
490 | } | |
bbf6f052 RK |
491 | \f |
492 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
493 | Both modes may be integer, or both may be floating. | |
494 | UNSIGNEDP should be nonzero if FROM is an unsigned type. | |
495 | This causes zero-extension instead of sign-extension. */ | |
496 | ||
497 | void | |
498 | convert_move (to, from, unsignedp) | |
499 | register rtx to, from; | |
500 | int unsignedp; | |
501 | { | |
502 | enum machine_mode to_mode = GET_MODE (to); | |
503 | enum machine_mode from_mode = GET_MODE (from); | |
504 | int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; | |
505 | int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; | |
506 | enum insn_code code; | |
507 | rtx libcall; | |
508 | ||
509 | /* rtx code for making an equivalent value. */ | |
510 | enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND); | |
511 | ||
512 | to = protect_from_queue (to, 1); | |
513 | from = protect_from_queue (from, 0); | |
514 | ||
515 | if (to_real != from_real) | |
516 | abort (); | |
517 | ||
1499e0a8 RK |
518 | /* If FROM is a SUBREG that indicates that we have already done at least |
519 | the required extension, strip it. We don't handle such SUBREGs as | |
520 | TO here. */ | |
521 | ||
522 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
523 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
524 | >= GET_MODE_SIZE (to_mode)) | |
525 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
526 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
527 | ||
528 | if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) | |
529 | abort (); | |
530 | ||
bbf6f052 RK |
531 | if (to_mode == from_mode |
532 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
533 | { | |
534 | emit_move_insn (to, from); | |
535 | return; | |
536 | } | |
537 | ||
0b4565c9 BS |
538 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
539 | { | |
540 | if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) | |
541 | abort (); | |
3a94c984 | 542 | |
0b4565c9 BS |
543 | if (VECTOR_MODE_P (to_mode)) |
544 | from = gen_rtx_SUBREG (to_mode, from, 0); | |
545 | else | |
546 | to = gen_rtx_SUBREG (from_mode, to, 0); | |
547 | ||
548 | emit_move_insn (to, from); | |
549 | return; | |
550 | } | |
551 | ||
552 | if (to_real != from_real) | |
553 | abort (); | |
554 | ||
bbf6f052 RK |
555 | if (to_real) |
556 | { | |
642dfa8b | 557 | rtx value, insns; |
81d79e2c | 558 | |
2b01c326 | 559 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)) |
b424402e | 560 | { |
2b01c326 RK |
561 | /* Try converting directly if the insn is supported. */ |
562 | if ((code = can_extend_p (to_mode, from_mode, 0)) | |
563 | != CODE_FOR_nothing) | |
564 | { | |
565 | emit_unop_insn (code, to, from, UNKNOWN); | |
566 | return; | |
567 | } | |
bbf6f052 | 568 | } |
3a94c984 | 569 | |
b424402e RS |
570 | #ifdef HAVE_trunchfqf2 |
571 | if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode) | |
572 | { | |
573 | emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN); | |
574 | return; | |
575 | } | |
576 | #endif | |
704af6a1 JL |
577 | #ifdef HAVE_trunctqfqf2 |
578 | if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode) | |
579 | { | |
580 | emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN); | |
581 | return; | |
582 | } | |
583 | #endif | |
b424402e RS |
584 | #ifdef HAVE_truncsfqf2 |
585 | if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode) | |
586 | { | |
587 | emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN); | |
588 | return; | |
589 | } | |
590 | #endif | |
591 | #ifdef HAVE_truncdfqf2 | |
592 | if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode) | |
593 | { | |
594 | emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN); | |
595 | return; | |
596 | } | |
597 | #endif | |
598 | #ifdef HAVE_truncxfqf2 | |
599 | if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode) | |
600 | { | |
601 | emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN); | |
602 | return; | |
603 | } | |
604 | #endif | |
605 | #ifdef HAVE_trunctfqf2 | |
606 | if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode) | |
607 | { | |
608 | emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN); | |
609 | return; | |
610 | } | |
611 | #endif | |
03747aa3 RK |
612 | |
613 | #ifdef HAVE_trunctqfhf2 | |
614 | if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode) | |
615 | { | |
616 | emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN); | |
617 | return; | |
618 | } | |
619 | #endif | |
b424402e RS |
620 | #ifdef HAVE_truncsfhf2 |
621 | if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode) | |
622 | { | |
623 | emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN); | |
624 | return; | |
625 | } | |
626 | #endif | |
627 | #ifdef HAVE_truncdfhf2 | |
628 | if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode) | |
629 | { | |
630 | emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN); | |
631 | return; | |
632 | } | |
633 | #endif | |
634 | #ifdef HAVE_truncxfhf2 | |
635 | if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode) | |
636 | { | |
637 | emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN); | |
638 | return; | |
639 | } | |
640 | #endif | |
641 | #ifdef HAVE_trunctfhf2 | |
642 | if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode) | |
643 | { | |
644 | emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN); | |
645 | return; | |
646 | } | |
647 | #endif | |
2b01c326 RK |
648 | |
649 | #ifdef HAVE_truncsftqf2 | |
650 | if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode) | |
651 | { | |
652 | emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN); | |
653 | return; | |
654 | } | |
655 | #endif | |
656 | #ifdef HAVE_truncdftqf2 | |
657 | if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode) | |
658 | { | |
659 | emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN); | |
660 | return; | |
661 | } | |
662 | #endif | |
663 | #ifdef HAVE_truncxftqf2 | |
664 | if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode) | |
665 | { | |
666 | emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN); | |
667 | return; | |
668 | } | |
669 | #endif | |
670 | #ifdef HAVE_trunctftqf2 | |
671 | if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode) | |
672 | { | |
673 | emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN); | |
674 | return; | |
675 | } | |
676 | #endif | |
677 | ||
bbf6f052 RK |
678 | #ifdef HAVE_truncdfsf2 |
679 | if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode) | |
680 | { | |
681 | emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN); | |
682 | return; | |
683 | } | |
684 | #endif | |
b092b471 JW |
685 | #ifdef HAVE_truncxfsf2 |
686 | if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode) | |
687 | { | |
688 | emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN); | |
689 | return; | |
690 | } | |
691 | #endif | |
bbf6f052 RK |
692 | #ifdef HAVE_trunctfsf2 |
693 | if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode) | |
694 | { | |
695 | emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN); | |
696 | return; | |
697 | } | |
698 | #endif | |
b092b471 JW |
699 | #ifdef HAVE_truncxfdf2 |
700 | if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode) | |
701 | { | |
702 | emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN); | |
703 | return; | |
704 | } | |
705 | #endif | |
bbf6f052 RK |
706 | #ifdef HAVE_trunctfdf2 |
707 | if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode) | |
708 | { | |
709 | emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN); | |
710 | return; | |
711 | } | |
712 | #endif | |
713 | ||
b092b471 JW |
714 | libcall = (rtx) 0; |
715 | switch (from_mode) | |
716 | { | |
717 | case SFmode: | |
718 | switch (to_mode) | |
719 | { | |
720 | case DFmode: | |
721 | libcall = extendsfdf2_libfunc; | |
722 | break; | |
723 | ||
724 | case XFmode: | |
725 | libcall = extendsfxf2_libfunc; | |
726 | break; | |
727 | ||
728 | case TFmode: | |
729 | libcall = extendsftf2_libfunc; | |
730 | break; | |
3a94c984 | 731 | |
e9a25f70 JL |
732 | default: |
733 | break; | |
b092b471 JW |
734 | } |
735 | break; | |
736 | ||
737 | case DFmode: | |
738 | switch (to_mode) | |
739 | { | |
740 | case SFmode: | |
741 | libcall = truncdfsf2_libfunc; | |
742 | break; | |
743 | ||
744 | case XFmode: | |
745 | libcall = extenddfxf2_libfunc; | |
746 | break; | |
747 | ||
748 | case TFmode: | |
749 | libcall = extenddftf2_libfunc; | |
750 | break; | |
3a94c984 | 751 | |
e9a25f70 JL |
752 | default: |
753 | break; | |
b092b471 JW |
754 | } |
755 | break; | |
756 | ||
757 | case XFmode: | |
758 | switch (to_mode) | |
759 | { | |
760 | case SFmode: | |
761 | libcall = truncxfsf2_libfunc; | |
762 | break; | |
763 | ||
764 | case DFmode: | |
765 | libcall = truncxfdf2_libfunc; | |
766 | break; | |
3a94c984 | 767 | |
e9a25f70 JL |
768 | default: |
769 | break; | |
b092b471 JW |
770 | } |
771 | break; | |
772 | ||
773 | case TFmode: | |
774 | switch (to_mode) | |
775 | { | |
776 | case SFmode: | |
777 | libcall = trunctfsf2_libfunc; | |
778 | break; | |
779 | ||
780 | case DFmode: | |
781 | libcall = trunctfdf2_libfunc; | |
782 | break; | |
3a94c984 | 783 | |
e9a25f70 JL |
784 | default: |
785 | break; | |
b092b471 JW |
786 | } |
787 | break; | |
3a94c984 | 788 | |
e9a25f70 JL |
789 | default: |
790 | break; | |
b092b471 JW |
791 | } |
792 | ||
793 | if (libcall == (rtx) 0) | |
794 | /* This conversion is not implemented yet. */ | |
bbf6f052 RK |
795 | abort (); |
796 | ||
642dfa8b | 797 | start_sequence (); |
ebb1b59a | 798 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
81d79e2c | 799 | 1, from, from_mode); |
642dfa8b BS |
800 | insns = get_insns (); |
801 | end_sequence (); | |
802 | emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode, | |
803 | from)); | |
bbf6f052 RK |
804 | return; |
805 | } | |
806 | ||
807 | /* Now both modes are integers. */ | |
808 | ||
809 | /* Handle expanding beyond a word. */ | |
810 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
811 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
812 | { | |
813 | rtx insns; | |
814 | rtx lowpart; | |
815 | rtx fill_value; | |
816 | rtx lowfrom; | |
817 | int i; | |
818 | enum machine_mode lowpart_mode; | |
819 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
820 | ||
821 | /* Try converting directly if the insn is supported. */ | |
822 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
823 | != CODE_FOR_nothing) | |
824 | { | |
cd1b4b44 RK |
825 | /* If FROM is a SUBREG, put it into a register. Do this |
826 | so that we always generate the same set of insns for | |
827 | better cse'ing; if an intermediate assignment occurred, | |
828 | we won't be doing the operation directly on the SUBREG. */ | |
829 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
830 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
831 | emit_unop_insn (code, to, from, equiv_code); |
832 | return; | |
833 | } | |
834 | /* Next, try converting via full word. */ | |
835 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
836 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
837 | != CODE_FOR_nothing)) | |
838 | { | |
a81fee56 | 839 | if (GET_CODE (to) == REG) |
38a448ca | 840 | emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); |
bbf6f052 RK |
841 | convert_move (gen_lowpart (word_mode, to), from, unsignedp); |
842 | emit_unop_insn (code, to, | |
843 | gen_lowpart (word_mode, to), equiv_code); | |
844 | return; | |
845 | } | |
846 | ||
847 | /* No special multiword conversion insn; do it by hand. */ | |
848 | start_sequence (); | |
849 | ||
5c5033c3 RK |
850 | /* Since we will turn this into a no conflict block, we must ensure |
851 | that the source does not overlap the target. */ | |
852 | ||
853 | if (reg_overlap_mentioned_p (to, from)) | |
854 | from = force_reg (from_mode, from); | |
855 | ||
bbf6f052 RK |
856 | /* Get a copy of FROM widened to a word, if necessary. */ |
857 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
858 | lowpart_mode = word_mode; | |
859 | else | |
860 | lowpart_mode = from_mode; | |
861 | ||
862 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
863 | ||
864 | lowpart = gen_lowpart (lowpart_mode, to); | |
865 | emit_move_insn (lowpart, lowfrom); | |
866 | ||
867 | /* Compute the value to put in each remaining word. */ | |
868 | if (unsignedp) | |
869 | fill_value = const0_rtx; | |
870 | else | |
871 | { | |
872 | #ifdef HAVE_slt | |
873 | if (HAVE_slt | |
a995e389 | 874 | && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
bbf6f052 RK |
875 | && STORE_FLAG_VALUE == -1) |
876 | { | |
906c4e36 RK |
877 | emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
878 | lowpart_mode, 0, 0); | |
bbf6f052 RK |
879 | fill_value = gen_reg_rtx (word_mode); |
880 | emit_insn (gen_slt (fill_value)); | |
881 | } | |
882 | else | |
883 | #endif | |
884 | { | |
885 | fill_value | |
886 | = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, | |
887 | size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), | |
906c4e36 | 888 | NULL_RTX, 0); |
bbf6f052 RK |
889 | fill_value = convert_to_mode (word_mode, fill_value, 1); |
890 | } | |
891 | } | |
892 | ||
893 | /* Fill the remaining words. */ | |
894 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
895 | { | |
896 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
897 | rtx subword = operand_subword (to, index, 1, to_mode); | |
898 | ||
899 | if (subword == 0) | |
900 | abort (); | |
901 | ||
902 | if (fill_value != subword) | |
903 | emit_move_insn (subword, fill_value); | |
904 | } | |
905 | ||
906 | insns = get_insns (); | |
907 | end_sequence (); | |
908 | ||
906c4e36 | 909 | emit_no_conflict_block (insns, to, from, NULL_RTX, |
38a448ca | 910 | gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); |
bbf6f052 RK |
911 | return; |
912 | } | |
913 | ||
d3c64ee3 RS |
914 | /* Truncating multi-word to a word or less. */ |
915 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
916 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
bbf6f052 | 917 | { |
431a6eca JW |
918 | if (!((GET_CODE (from) == MEM |
919 | && ! MEM_VOLATILE_P (from) | |
920 | && direct_load[(int) to_mode] | |
921 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
922 | || GET_CODE (from) == REG | |
923 | || GET_CODE (from) == SUBREG)) | |
924 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
925 | convert_move (to, gen_lowpart (word_mode, from), 0); |
926 | return; | |
927 | } | |
928 | ||
3a94c984 | 929 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
e5e809f4 JL |
930 | if (to_mode == PQImode) |
931 | { | |
932 | if (from_mode != QImode) | |
933 | from = convert_to_mode (QImode, from, unsignedp); | |
934 | ||
935 | #ifdef HAVE_truncqipqi2 | |
936 | if (HAVE_truncqipqi2) | |
937 | { | |
938 | emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN); | |
939 | return; | |
940 | } | |
941 | #endif /* HAVE_truncqipqi2 */ | |
942 | abort (); | |
943 | } | |
944 | ||
945 | if (from_mode == PQImode) | |
946 | { | |
947 | if (to_mode != QImode) | |
948 | { | |
949 | from = convert_to_mode (QImode, from, unsignedp); | |
950 | from_mode = QImode; | |
951 | } | |
952 | else | |
953 | { | |
954 | #ifdef HAVE_extendpqiqi2 | |
955 | if (HAVE_extendpqiqi2) | |
956 | { | |
957 | emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN); | |
958 | return; | |
959 | } | |
960 | #endif /* HAVE_extendpqiqi2 */ | |
961 | abort (); | |
962 | } | |
963 | } | |
964 | ||
bbf6f052 RK |
965 | if (to_mode == PSImode) |
966 | { | |
967 | if (from_mode != SImode) | |
968 | from = convert_to_mode (SImode, from, unsignedp); | |
969 | ||
1f584163 DE |
970 | #ifdef HAVE_truncsipsi2 |
971 | if (HAVE_truncsipsi2) | |
bbf6f052 | 972 | { |
1f584163 | 973 | emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN); |
bbf6f052 RK |
974 | return; |
975 | } | |
1f584163 | 976 | #endif /* HAVE_truncsipsi2 */ |
bbf6f052 RK |
977 | abort (); |
978 | } | |
979 | ||
980 | if (from_mode == PSImode) | |
981 | { | |
982 | if (to_mode != SImode) | |
983 | { | |
984 | from = convert_to_mode (SImode, from, unsignedp); | |
985 | from_mode = SImode; | |
986 | } | |
987 | else | |
988 | { | |
1f584163 | 989 | #ifdef HAVE_extendpsisi2 |
43d75418 | 990 | if (! unsignedp && HAVE_extendpsisi2) |
bbf6f052 | 991 | { |
1f584163 | 992 | emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN); |
bbf6f052 RK |
993 | return; |
994 | } | |
1f584163 | 995 | #endif /* HAVE_extendpsisi2 */ |
43d75418 R |
996 | #ifdef HAVE_zero_extendpsisi2 |
997 | if (unsignedp && HAVE_zero_extendpsisi2) | |
998 | { | |
999 | emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN); | |
1000 | return; | |
1001 | } | |
1002 | #endif /* HAVE_zero_extendpsisi2 */ | |
bbf6f052 RK |
1003 | abort (); |
1004 | } | |
1005 | } | |
1006 | ||
0407367d RK |
1007 | if (to_mode == PDImode) |
1008 | { | |
1009 | if (from_mode != DImode) | |
1010 | from = convert_to_mode (DImode, from, unsignedp); | |
1011 | ||
1012 | #ifdef HAVE_truncdipdi2 | |
1013 | if (HAVE_truncdipdi2) | |
1014 | { | |
1015 | emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN); | |
1016 | return; | |
1017 | } | |
1018 | #endif /* HAVE_truncdipdi2 */ | |
1019 | abort (); | |
1020 | } | |
1021 | ||
1022 | if (from_mode == PDImode) | |
1023 | { | |
1024 | if (to_mode != DImode) | |
1025 | { | |
1026 | from = convert_to_mode (DImode, from, unsignedp); | |
1027 | from_mode = DImode; | |
1028 | } | |
1029 | else | |
1030 | { | |
1031 | #ifdef HAVE_extendpdidi2 | |
1032 | if (HAVE_extendpdidi2) | |
1033 | { | |
1034 | emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN); | |
1035 | return; | |
1036 | } | |
1037 | #endif /* HAVE_extendpdidi2 */ | |
1038 | abort (); | |
1039 | } | |
1040 | } | |
1041 | ||
bbf6f052 RK |
1042 | /* Now follow all the conversions between integers |
1043 | no more than a word long. */ | |
1044 | ||
1045 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
1046 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
1047 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
d3c64ee3 | 1048 | GET_MODE_BITSIZE (from_mode))) |
bbf6f052 | 1049 | { |
d3c64ee3 RS |
1050 | if (!((GET_CODE (from) == MEM |
1051 | && ! MEM_VOLATILE_P (from) | |
1052 | && direct_load[(int) to_mode] | |
1053 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
1054 | || GET_CODE (from) == REG | |
1055 | || GET_CODE (from) == SUBREG)) | |
1056 | from = force_reg (from_mode, from); | |
34aa3599 RK |
1057 | if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER |
1058 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) | |
1059 | from = copy_to_reg (from); | |
bbf6f052 RK |
1060 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
1061 | return; | |
1062 | } | |
1063 | ||
d3c64ee3 | 1064 | /* Handle extension. */ |
bbf6f052 RK |
1065 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
1066 | { | |
1067 | /* Convert directly if that works. */ | |
1068 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
1069 | != CODE_FOR_nothing) | |
1070 | { | |
1071 | emit_unop_insn (code, to, from, equiv_code); | |
1072 | return; | |
1073 | } | |
1074 | else | |
1075 | { | |
1076 | enum machine_mode intermediate; | |
2b28d92e NC |
1077 | rtx tmp; |
1078 | tree shift_amount; | |
bbf6f052 RK |
1079 | |
1080 | /* Search for a mode to convert via. */ | |
1081 | for (intermediate = from_mode; intermediate != VOIDmode; | |
1082 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
930b4e39 RK |
1083 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
1084 | != CODE_FOR_nothing) | |
1085 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
d60eaeff JL |
1086 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
1087 | GET_MODE_BITSIZE (intermediate)))) | |
bbf6f052 RK |
1088 | && (can_extend_p (intermediate, from_mode, unsignedp) |
1089 | != CODE_FOR_nothing)) | |
1090 | { | |
1091 | convert_move (to, convert_to_mode (intermediate, from, | |
1092 | unsignedp), unsignedp); | |
1093 | return; | |
1094 | } | |
1095 | ||
2b28d92e | 1096 | /* No suitable intermediate mode. |
3a94c984 | 1097 | Generate what we need with shifts. */ |
2b28d92e NC |
1098 | shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode) |
1099 | - GET_MODE_BITSIZE (from_mode), 0); | |
1100 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); | |
1101 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
1102 | to, unsignedp); | |
3a94c984 | 1103 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
2b28d92e NC |
1104 | to, unsignedp); |
1105 | if (tmp != to) | |
1106 | emit_move_insn (to, tmp); | |
1107 | return; | |
bbf6f052 RK |
1108 | } |
1109 | } | |
1110 | ||
3a94c984 | 1111 | /* Support special truncate insns for certain modes. */ |
bbf6f052 RK |
1112 | |
1113 | if (from_mode == DImode && to_mode == SImode) | |
1114 | { | |
1115 | #ifdef HAVE_truncdisi2 | |
1116 | if (HAVE_truncdisi2) | |
1117 | { | |
1118 | emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN); | |
1119 | return; | |
1120 | } | |
1121 | #endif | |
1122 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1123 | return; | |
1124 | } | |
1125 | ||
1126 | if (from_mode == DImode && to_mode == HImode) | |
1127 | { | |
1128 | #ifdef HAVE_truncdihi2 | |
1129 | if (HAVE_truncdihi2) | |
1130 | { | |
1131 | emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN); | |
1132 | return; | |
1133 | } | |
1134 | #endif | |
1135 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1136 | return; | |
1137 | } | |
1138 | ||
1139 | if (from_mode == DImode && to_mode == QImode) | |
1140 | { | |
1141 | #ifdef HAVE_truncdiqi2 | |
1142 | if (HAVE_truncdiqi2) | |
1143 | { | |
1144 | emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN); | |
1145 | return; | |
1146 | } | |
1147 | #endif | |
1148 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1149 | return; | |
1150 | } | |
1151 | ||
1152 | if (from_mode == SImode && to_mode == HImode) | |
1153 | { | |
1154 | #ifdef HAVE_truncsihi2 | |
1155 | if (HAVE_truncsihi2) | |
1156 | { | |
1157 | emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN); | |
1158 | return; | |
1159 | } | |
1160 | #endif | |
1161 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1162 | return; | |
1163 | } | |
1164 | ||
1165 | if (from_mode == SImode && to_mode == QImode) | |
1166 | { | |
1167 | #ifdef HAVE_truncsiqi2 | |
1168 | if (HAVE_truncsiqi2) | |
1169 | { | |
1170 | emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN); | |
1171 | return; | |
1172 | } | |
1173 | #endif | |
1174 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1175 | return; | |
1176 | } | |
1177 | ||
1178 | if (from_mode == HImode && to_mode == QImode) | |
1179 | { | |
1180 | #ifdef HAVE_trunchiqi2 | |
1181 | if (HAVE_trunchiqi2) | |
1182 | { | |
1183 | emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN); | |
1184 | return; | |
1185 | } | |
1186 | #endif | |
1187 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1188 | return; | |
1189 | } | |
1190 | ||
b9bcad65 RK |
1191 | if (from_mode == TImode && to_mode == DImode) |
1192 | { | |
1193 | #ifdef HAVE_trunctidi2 | |
1194 | if (HAVE_trunctidi2) | |
1195 | { | |
1196 | emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN); | |
1197 | return; | |
1198 | } | |
1199 | #endif | |
1200 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1201 | return; | |
1202 | } | |
1203 | ||
1204 | if (from_mode == TImode && to_mode == SImode) | |
1205 | { | |
1206 | #ifdef HAVE_trunctisi2 | |
1207 | if (HAVE_trunctisi2) | |
1208 | { | |
1209 | emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN); | |
1210 | return; | |
1211 | } | |
1212 | #endif | |
1213 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1214 | return; | |
1215 | } | |
1216 | ||
1217 | if (from_mode == TImode && to_mode == HImode) | |
1218 | { | |
1219 | #ifdef HAVE_trunctihi2 | |
1220 | if (HAVE_trunctihi2) | |
1221 | { | |
1222 | emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN); | |
1223 | return; | |
1224 | } | |
1225 | #endif | |
1226 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1227 | return; | |
1228 | } | |
1229 | ||
1230 | if (from_mode == TImode && to_mode == QImode) | |
1231 | { | |
1232 | #ifdef HAVE_trunctiqi2 | |
1233 | if (HAVE_trunctiqi2) | |
1234 | { | |
1235 | emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN); | |
1236 | return; | |
1237 | } | |
1238 | #endif | |
1239 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1240 | return; | |
1241 | } | |
1242 | ||
bbf6f052 RK |
1243 | /* Handle truncation of volatile memrefs, and so on; |
1244 | the things that couldn't be truncated directly, | |
1245 | and for which there was no special instruction. */ | |
1246 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) | |
1247 | { | |
1248 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
1249 | emit_move_insn (to, temp); | |
1250 | return; | |
1251 | } | |
1252 | ||
1253 | /* Mode combination is not recognized. */ | |
1254 | abort (); | |
1255 | } | |
1256 | ||
1257 | /* Return an rtx for a value that would result | |
1258 | from converting X to mode MODE. | |
1259 | Both X and MODE may be floating, or both integer. | |
1260 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1261 | This can be done by referring to a part of X in place | |
5d901c31 RS |
1262 | or by copying to a new temporary with conversion. |
1263 | ||
1264 | This function *must not* call protect_from_queue | |
1265 | except when putting X into an insn (in which case convert_move does it). */ | |
bbf6f052 RK |
1266 | |
1267 | rtx | |
1268 | convert_to_mode (mode, x, unsignedp) | |
1269 | enum machine_mode mode; | |
1270 | rtx x; | |
1271 | int unsignedp; | |
5ffe63ed RS |
1272 | { |
1273 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
1274 | } | |
1275 | ||
1276 | /* Return an rtx for a value that would result | |
1277 | from converting X from mode OLDMODE to mode MODE. | |
1278 | Both modes may be floating, or both integer. | |
1279 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1280 | ||
1281 | This can be done by referring to a part of X in place | |
1282 | or by copying to a new temporary with conversion. | |
1283 | ||
1284 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. | |
1285 | ||
1286 | This function *must not* call protect_from_queue | |
1287 | except when putting X into an insn (in which case convert_move does it). */ | |
1288 | ||
1289 | rtx | |
1290 | convert_modes (mode, oldmode, x, unsignedp) | |
1291 | enum machine_mode mode, oldmode; | |
1292 | rtx x; | |
1293 | int unsignedp; | |
bbf6f052 RK |
1294 | { |
1295 | register rtx temp; | |
5ffe63ed | 1296 | |
1499e0a8 RK |
1297 | /* If FROM is a SUBREG that indicates that we have already done at least |
1298 | the required extension, strip it. */ | |
1299 | ||
1300 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
1301 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
1302 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
1303 | x = gen_lowpart (mode, x); | |
bbf6f052 | 1304 | |
64791b18 RK |
1305 | if (GET_MODE (x) != VOIDmode) |
1306 | oldmode = GET_MODE (x); | |
3a94c984 | 1307 | |
5ffe63ed | 1308 | if (mode == oldmode) |
bbf6f052 RK |
1309 | return x; |
1310 | ||
1311 | /* There is one case that we must handle specially: If we are converting | |
906c4e36 | 1312 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
bbf6f052 RK |
1313 | we are to interpret the constant as unsigned, gen_lowpart will do |
1314 | the wrong if the constant appears negative. What we want to do is | |
1315 | make the high-order word of the constant zero, not all ones. */ | |
1316 | ||
1317 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
906c4e36 | 1318 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
bbf6f052 | 1319 | && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
96ff8a16 ILT |
1320 | { |
1321 | HOST_WIDE_INT val = INTVAL (x); | |
1322 | ||
1323 | if (oldmode != VOIDmode | |
1324 | && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) | |
1325 | { | |
1326 | int width = GET_MODE_BITSIZE (oldmode); | |
1327 | ||
1328 | /* We need to zero extend VAL. */ | |
1329 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1330 | } | |
1331 | ||
1332 | return immed_double_const (val, (HOST_WIDE_INT) 0, mode); | |
1333 | } | |
bbf6f052 RK |
1334 | |
1335 | /* We can do this with a gen_lowpart if both desired and current modes | |
1336 | are integer, and this is either a constant integer, a register, or a | |
ba2e110c RK |
1337 | non-volatile MEM. Except for the constant case where MODE is no |
1338 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
bbf6f052 | 1339 | |
ba2e110c RK |
1340 | if ((GET_CODE (x) == CONST_INT |
1341 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
bbf6f052 | 1342 | || (GET_MODE_CLASS (mode) == MODE_INT |
5ffe63ed | 1343 | && GET_MODE_CLASS (oldmode) == MODE_INT |
bbf6f052 | 1344 | && (GET_CODE (x) == CONST_DOUBLE |
5ffe63ed | 1345 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
d57c66da JW |
1346 | && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x) |
1347 | && direct_load[(int) mode]) | |
2bf29316 JW |
1348 | || (GET_CODE (x) == REG |
1349 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
1350 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
ba2e110c RK |
1351 | { |
1352 | /* ?? If we don't know OLDMODE, we have to assume here that | |
1353 | X does not need sign- or zero-extension. This may not be | |
1354 | the case, but it's the best we can do. */ | |
1355 | if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode | |
1356 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) | |
1357 | { | |
1358 | HOST_WIDE_INT val = INTVAL (x); | |
1359 | int width = GET_MODE_BITSIZE (oldmode); | |
1360 | ||
1361 | /* We must sign or zero-extend in this case. Start by | |
1362 | zero-extending, then sign extend if we need to. */ | |
1363 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1364 | if (! unsignedp | |
1365 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
1366 | val |= (HOST_WIDE_INT) (-1) << width; | |
1367 | ||
69107307 | 1368 | return GEN_INT (trunc_int_for_mode (val, mode)); |
ba2e110c RK |
1369 | } |
1370 | ||
1371 | return gen_lowpart (mode, x); | |
1372 | } | |
bbf6f052 RK |
1373 | |
1374 | temp = gen_reg_rtx (mode); | |
1375 | convert_move (temp, x, unsignedp); | |
1376 | return temp; | |
1377 | } | |
1378 | \f | |
fbe1758d | 1379 | /* This macro is used to determine what the largest unit size that |
3a94c984 | 1380 | move_by_pieces can use is. */ |
fbe1758d AM |
1381 | |
1382 | /* MOVE_MAX_PIECES is the number of bytes at a time which we can | |
1383 | move efficiently, as opposed to MOVE_MAX which is the maximum | |
3a94c984 | 1384 | number of bytes we can move with a single instruction. */ |
fbe1758d AM |
1385 | |
1386 | #ifndef MOVE_MAX_PIECES | |
1387 | #define MOVE_MAX_PIECES MOVE_MAX | |
1388 | #endif | |
1389 | ||
bbf6f052 RK |
1390 | /* Generate several move instructions to copy LEN bytes |
1391 | from block FROM to block TO. (These are MEM rtx's with BLKmode). | |
1392 | The caller must pass FROM and TO | |
1393 | through protect_from_queue before calling. | |
566aa174 JH |
1394 | |
1395 | When TO is NULL, the emit_single_push_insn is used to push the | |
1396 | FROM to stack. | |
1397 | ||
19caa751 | 1398 | ALIGN is maximum alignment we can assume. */ |
bbf6f052 | 1399 | |
2e245dac | 1400 | void |
bbf6f052 RK |
1401 | move_by_pieces (to, from, len, align) |
1402 | rtx to, from; | |
3bdf5ad1 | 1403 | unsigned HOST_WIDE_INT len; |
729a2125 | 1404 | unsigned int align; |
bbf6f052 RK |
1405 | { |
1406 | struct move_by_pieces data; | |
566aa174 | 1407 | rtx to_addr, from_addr = XEXP (from, 0); |
770ae6cc | 1408 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
1409 | enum machine_mode mode = VOIDmode, tmode; |
1410 | enum insn_code icode; | |
bbf6f052 RK |
1411 | |
1412 | data.offset = 0; | |
bbf6f052 | 1413 | data.from_addr = from_addr; |
566aa174 JH |
1414 | if (to) |
1415 | { | |
1416 | to_addr = XEXP (to, 0); | |
1417 | data.to = to; | |
1418 | data.autinc_to | |
1419 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
1420 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
1421 | data.reverse | |
1422 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
1423 | } | |
1424 | else | |
1425 | { | |
1426 | to_addr = NULL_RTX; | |
1427 | data.to = NULL_RTX; | |
1428 | data.autinc_to = 1; | |
1429 | #ifdef STACK_GROWS_DOWNWARD | |
1430 | data.reverse = 1; | |
1431 | #else | |
1432 | data.reverse = 0; | |
1433 | #endif | |
1434 | } | |
1435 | data.to_addr = to_addr; | |
bbf6f052 | 1436 | data.from = from; |
bbf6f052 RK |
1437 | data.autinc_from |
1438 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
1439 | || GET_CODE (from_addr) == POST_INC | |
1440 | || GET_CODE (from_addr) == POST_DEC); | |
1441 | ||
1442 | data.explicit_inc_from = 0; | |
1443 | data.explicit_inc_to = 0; | |
bbf6f052 RK |
1444 | if (data.reverse) data.offset = len; |
1445 | data.len = len; | |
1446 | ||
1447 | /* If copying requires more than two move insns, | |
1448 | copy addresses to registers (to make displacements shorter) | |
1449 | and use post-increment if available. */ | |
1450 | if (!(data.autinc_from && data.autinc_to) | |
1451 | && move_by_pieces_ninsns (len, align) > 2) | |
1452 | { | |
3a94c984 | 1453 | /* Find the mode of the largest move... */ |
fbe1758d AM |
1454 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1455 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1456 | if (GET_MODE_SIZE (tmode) < max_size) | |
1457 | mode = tmode; | |
1458 | ||
1459 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
bbf6f052 RK |
1460 | { |
1461 | data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); | |
1462 | data.autinc_from = 1; | |
1463 | data.explicit_inc_from = -1; | |
1464 | } | |
fbe1758d | 1465 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
bbf6f052 RK |
1466 | { |
1467 | data.from_addr = copy_addr_to_reg (from_addr); | |
1468 | data.autinc_from = 1; | |
1469 | data.explicit_inc_from = 1; | |
1470 | } | |
bbf6f052 RK |
1471 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
1472 | data.from_addr = copy_addr_to_reg (from_addr); | |
fbe1758d | 1473 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1474 | { |
1475 | data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); | |
1476 | data.autinc_to = 1; | |
1477 | data.explicit_inc_to = -1; | |
1478 | } | |
fbe1758d | 1479 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
bbf6f052 RK |
1480 | { |
1481 | data.to_addr = copy_addr_to_reg (to_addr); | |
1482 | data.autinc_to = 1; | |
1483 | data.explicit_inc_to = 1; | |
1484 | } | |
bbf6f052 RK |
1485 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
1486 | data.to_addr = copy_addr_to_reg (to_addr); | |
1487 | } | |
1488 | ||
e1565e65 | 1489 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 RK |
1490 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
1491 | align = MOVE_MAX * BITS_PER_UNIT; | |
bbf6f052 RK |
1492 | |
1493 | /* First move what we can in the largest integer mode, then go to | |
1494 | successively smaller modes. */ | |
1495 | ||
1496 | while (max_size > 1) | |
1497 | { | |
e7c33f54 RK |
1498 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1499 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1500 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1501 | mode = tmode; |
1502 | ||
1503 | if (mode == VOIDmode) | |
1504 | break; | |
1505 | ||
1506 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1507 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1508 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
1509 | ||
1510 | max_size = GET_MODE_SIZE (mode); | |
1511 | } | |
1512 | ||
1513 | /* The code above should have handled everything. */ | |
2a8e278c | 1514 | if (data.len > 0) |
bbf6f052 RK |
1515 | abort (); |
1516 | } | |
1517 | ||
1518 | /* Return number of insns required to move L bytes by pieces. | |
f1eaaf73 | 1519 | ALIGN (in bits) is maximum alignment we can assume. */ |
bbf6f052 | 1520 | |
3bdf5ad1 | 1521 | static unsigned HOST_WIDE_INT |
bbf6f052 | 1522 | move_by_pieces_ninsns (l, align) |
3bdf5ad1 | 1523 | unsigned HOST_WIDE_INT l; |
729a2125 | 1524 | unsigned int align; |
bbf6f052 | 1525 | { |
3bdf5ad1 RK |
1526 | unsigned HOST_WIDE_INT n_insns = 0; |
1527 | unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1; | |
bbf6f052 | 1528 | |
e1565e65 | 1529 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 1530 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
14c78e9b | 1531 | align = MOVE_MAX * BITS_PER_UNIT; |
bbf6f052 RK |
1532 | |
1533 | while (max_size > 1) | |
1534 | { | |
1535 | enum machine_mode mode = VOIDmode, tmode; | |
1536 | enum insn_code icode; | |
1537 | ||
e7c33f54 RK |
1538 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1539 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1540 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1541 | mode = tmode; |
1542 | ||
1543 | if (mode == VOIDmode) | |
1544 | break; | |
1545 | ||
1546 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 1547 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1548 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1549 | ||
1550 | max_size = GET_MODE_SIZE (mode); | |
1551 | } | |
1552 | ||
13c6f0d5 NS |
1553 | if (l) |
1554 | abort (); | |
bbf6f052 RK |
1555 | return n_insns; |
1556 | } | |
1557 | ||
1558 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1559 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1560 | to make a move insn for that mode. DATA has all the other info. */ | |
1561 | ||
1562 | static void | |
1563 | move_by_pieces_1 (genfun, mode, data) | |
711d877c | 1564 | rtx (*genfun) PARAMS ((rtx, ...)); |
bbf6f052 RK |
1565 | enum machine_mode mode; |
1566 | struct move_by_pieces *data; | |
1567 | { | |
3bdf5ad1 RK |
1568 | unsigned int size = GET_MODE_SIZE (mode); |
1569 | rtx to1, from1; | |
bbf6f052 RK |
1570 | |
1571 | while (data->len >= size) | |
1572 | { | |
3bdf5ad1 RK |
1573 | if (data->reverse) |
1574 | data->offset -= size; | |
1575 | ||
566aa174 | 1576 | if (data->to) |
3bdf5ad1 | 1577 | { |
566aa174 JH |
1578 | if (data->autinc_to) |
1579 | { | |
1580 | to1 = gen_rtx_MEM (mode, data->to_addr); | |
1581 | MEM_COPY_ATTRIBUTES (to1, data->to); | |
1582 | } | |
1583 | else | |
1584 | to1 = change_address (data->to, mode, | |
1585 | plus_constant (data->to_addr, data->offset)); | |
3bdf5ad1 | 1586 | } |
3bdf5ad1 RK |
1587 | |
1588 | if (data->autinc_from) | |
1589 | { | |
1590 | from1 = gen_rtx_MEM (mode, data->from_addr); | |
1591 | MEM_COPY_ATTRIBUTES (from1, data->from); | |
1592 | } | |
1593 | else | |
1594 | from1 = change_address (data->from, mode, | |
1595 | plus_constant (data->from_addr, data->offset)); | |
bbf6f052 | 1596 | |
940da324 | 1597 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
906c4e36 | 1598 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size))); |
940da324 | 1599 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
906c4e36 | 1600 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size))); |
bbf6f052 | 1601 | |
566aa174 JH |
1602 | if (data->to) |
1603 | emit_insn ((*genfun) (to1, from1)); | |
1604 | else | |
1605 | emit_single_push_insn (mode, from1, NULL); | |
3bdf5ad1 | 1606 | |
940da324 | 1607 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
906c4e36 | 1608 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
940da324 | 1609 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
906c4e36 | 1610 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
bbf6f052 | 1611 | |
3bdf5ad1 RK |
1612 | if (! data->reverse) |
1613 | data->offset += size; | |
bbf6f052 RK |
1614 | |
1615 | data->len -= size; | |
1616 | } | |
1617 | } | |
1618 | \f | |
1619 | /* Emit code to move a block Y to a block X. | |
1620 | This may be done with string-move instructions, | |
1621 | with multiple scalar move instructions, or with a library call. | |
1622 | ||
1623 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) | |
1624 | with mode BLKmode. | |
1625 | SIZE is an rtx that says how long they are. | |
19caa751 | 1626 | ALIGN is the maximum alignment we can assume they have. |
bbf6f052 | 1627 | |
e9a25f70 JL |
1628 | Return the address of the new block, if memcpy is called and returns it, |
1629 | 0 otherwise. */ | |
1630 | ||
1631 | rtx | |
bbf6f052 RK |
1632 | emit_block_move (x, y, size, align) |
1633 | rtx x, y; | |
1634 | rtx size; | |
729a2125 | 1635 | unsigned int align; |
bbf6f052 | 1636 | { |
e9a25f70 | 1637 | rtx retval = 0; |
52cf7115 JL |
1638 | #ifdef TARGET_MEM_FUNCTIONS |
1639 | static tree fn; | |
1640 | tree call_expr, arg_list; | |
1641 | #endif | |
e9a25f70 | 1642 | |
bbf6f052 RK |
1643 | if (GET_MODE (x) != BLKmode) |
1644 | abort (); | |
1645 | ||
1646 | if (GET_MODE (y) != BLKmode) | |
1647 | abort (); | |
1648 | ||
1649 | x = protect_from_queue (x, 1); | |
1650 | y = protect_from_queue (y, 0); | |
5d901c31 | 1651 | size = protect_from_queue (size, 0); |
bbf6f052 RK |
1652 | |
1653 | if (GET_CODE (x) != MEM) | |
1654 | abort (); | |
1655 | if (GET_CODE (y) != MEM) | |
1656 | abort (); | |
1657 | if (size == 0) | |
1658 | abort (); | |
1659 | ||
fbe1758d | 1660 | if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
bbf6f052 RK |
1661 | move_by_pieces (x, y, INTVAL (size), align); |
1662 | else | |
1663 | { | |
1664 | /* Try the most limited insn first, because there's no point | |
1665 | including more than one in the machine description unless | |
1666 | the more limited one has some advantage. */ | |
266007a7 | 1667 | |
19caa751 | 1668 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
266007a7 RK |
1669 | enum machine_mode mode; |
1670 | ||
3ef1eef4 RK |
1671 | /* Since this is a move insn, we don't care about volatility. */ |
1672 | volatile_ok = 1; | |
1673 | ||
266007a7 RK |
1674 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
1675 | mode = GET_MODE_WIDER_MODE (mode)) | |
bbf6f052 | 1676 | { |
266007a7 | 1677 | enum insn_code code = movstr_optab[(int) mode]; |
a995e389 | 1678 | insn_operand_predicate_fn pred; |
266007a7 RK |
1679 | |
1680 | if (code != CODE_FOR_nothing | |
803090c4 RK |
1681 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT |
1682 | here because if SIZE is less than the mode mask, as it is | |
8008b228 | 1683 | returned by the macro, it will definitely be less than the |
803090c4 | 1684 | actual mode mask. */ |
8ca00751 RK |
1685 | && ((GET_CODE (size) == CONST_INT |
1686 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
e5e809f4 | 1687 | <= (GET_MODE_MASK (mode) >> 1))) |
8ca00751 | 1688 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
a995e389 RH |
1689 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 |
1690 | || (*pred) (x, BLKmode)) | |
1691 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1692 | || (*pred) (y, BLKmode)) | |
1693 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1694 | || (*pred) (opalign, VOIDmode))) | |
bbf6f052 | 1695 | { |
1ba1e2a8 | 1696 | rtx op2; |
266007a7 RK |
1697 | rtx last = get_last_insn (); |
1698 | rtx pat; | |
1699 | ||
1ba1e2a8 | 1700 | op2 = convert_to_mode (mode, size, 1); |
a995e389 RH |
1701 | pred = insn_data[(int) code].operand[2].predicate; |
1702 | if (pred != 0 && ! (*pred) (op2, mode)) | |
266007a7 RK |
1703 | op2 = copy_to_mode_reg (mode, op2); |
1704 | ||
1705 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1706 | if (pat) | |
1707 | { | |
1708 | emit_insn (pat); | |
3ef1eef4 | 1709 | volatile_ok = 0; |
e9a25f70 | 1710 | return 0; |
266007a7 RK |
1711 | } |
1712 | else | |
1713 | delete_insns_since (last); | |
bbf6f052 RK |
1714 | } |
1715 | } | |
bbf6f052 | 1716 | |
3ef1eef4 RK |
1717 | volatile_ok = 0; |
1718 | ||
4bc973ae JL |
1719 | /* X, Y, or SIZE may have been passed through protect_from_queue. |
1720 | ||
1721 | It is unsafe to save the value generated by protect_from_queue | |
1722 | and reuse it later. Consider what happens if emit_queue is | |
1723 | called before the return value from protect_from_queue is used. | |
1724 | ||
1725 | Expansion of the CALL_EXPR below will call emit_queue before | |
1726 | we are finished emitting RTL for argument setup. So if we are | |
1727 | not careful we could get the wrong value for an argument. | |
1728 | ||
1729 | To avoid this problem we go ahead and emit code to copy X, Y & | |
1730 | SIZE into new pseudos. We can then place those new pseudos | |
1731 | into an RTL_EXPR and use them later, even after a call to | |
3a94c984 | 1732 | emit_queue. |
4bc973ae JL |
1733 | |
1734 | Note this is not strictly needed for library calls since they | |
1735 | do not call emit_queue before loading their arguments. However, | |
1736 | we may need to have library calls call emit_queue in the future | |
1737 | since failing to do so could cause problems for targets which | |
1738 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
1739 | x = copy_to_mode_reg (Pmode, XEXP (x, 0)); | |
1740 | y = copy_to_mode_reg (Pmode, XEXP (y, 0)); | |
1741 | ||
1742 | #ifdef TARGET_MEM_FUNCTIONS | |
1743 | size = copy_to_mode_reg (TYPE_MODE (sizetype), size); | |
1744 | #else | |
1745 | size = convert_to_mode (TYPE_MODE (integer_type_node), size, | |
1746 | TREE_UNSIGNED (integer_type_node)); | |
f3dc586a | 1747 | size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); |
4bc973ae JL |
1748 | #endif |
1749 | ||
bbf6f052 | 1750 | #ifdef TARGET_MEM_FUNCTIONS |
52cf7115 JL |
1751 | /* It is incorrect to use the libcall calling conventions to call |
1752 | memcpy in this context. | |
1753 | ||
1754 | This could be a user call to memcpy and the user may wish to | |
1755 | examine the return value from memcpy. | |
1756 | ||
1757 | For targets where libcalls and normal calls have different conventions | |
3a94c984 | 1758 | for returning pointers, we could end up generating incorrect code. |
52cf7115 JL |
1759 | |
1760 | So instead of using a libcall sequence we build up a suitable | |
1761 | CALL_EXPR and expand the call in the normal fashion. */ | |
1762 | if (fn == NULL_TREE) | |
1763 | { | |
1764 | tree fntype; | |
1765 | ||
1766 | /* This was copied from except.c, I don't know if all this is | |
1767 | necessary in this context or not. */ | |
1768 | fn = get_identifier ("memcpy"); | |
52cf7115 JL |
1769 | fntype = build_pointer_type (void_type_node); |
1770 | fntype = build_function_type (fntype, NULL_TREE); | |
1771 | fn = build_decl (FUNCTION_DECL, fn, fntype); | |
3a94c984 | 1772 | ggc_add_tree_root (&fn, 1); |
52cf7115 JL |
1773 | DECL_EXTERNAL (fn) = 1; |
1774 | TREE_PUBLIC (fn) = 1; | |
1775 | DECL_ARTIFICIAL (fn) = 1; | |
6496a589 | 1776 | make_decl_rtl (fn, NULL); |
52cf7115 | 1777 | assemble_external (fn); |
52cf7115 JL |
1778 | } |
1779 | ||
3a94c984 | 1780 | /* We need to make an argument list for the function call. |
52cf7115 JL |
1781 | |
1782 | memcpy has three arguments, the first two are void * addresses and | |
1783 | the last is a size_t byte count for the copy. */ | |
1784 | arg_list | |
1785 | = build_tree_list (NULL_TREE, | |
4bc973ae | 1786 | make_tree (build_pointer_type (void_type_node), x)); |
52cf7115 JL |
1787 | TREE_CHAIN (arg_list) |
1788 | = build_tree_list (NULL_TREE, | |
4bc973ae | 1789 | make_tree (build_pointer_type (void_type_node), y)); |
52cf7115 JL |
1790 | TREE_CHAIN (TREE_CHAIN (arg_list)) |
1791 | = build_tree_list (NULL_TREE, make_tree (sizetype, size)); | |
1792 | TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE; | |
1793 | ||
1794 | /* Now we have to build up the CALL_EXPR itself. */ | |
1795 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
1796 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
1797 | call_expr, arg_list, NULL_TREE); | |
1798 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
1799 | ||
1800 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
bbf6f052 | 1801 | #else |
ebb1b59a | 1802 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
fe7bbd2a | 1803 | VOIDmode, 3, y, Pmode, x, Pmode, |
3b6f75e2 JW |
1804 | convert_to_mode (TYPE_MODE (integer_type_node), size, |
1805 | TREE_UNSIGNED (integer_type_node)), | |
1806 | TYPE_MODE (integer_type_node)); | |
bbf6f052 RK |
1807 | #endif |
1808 | } | |
e9a25f70 JL |
1809 | |
1810 | return retval; | |
bbf6f052 RK |
1811 | } |
1812 | \f | |
1813 | /* Copy all or part of a value X into registers starting at REGNO. | |
1814 | The number of registers to be filled is NREGS. */ | |
1815 | ||
1816 | void | |
1817 | move_block_to_reg (regno, x, nregs, mode) | |
1818 | int regno; | |
1819 | rtx x; | |
1820 | int nregs; | |
1821 | enum machine_mode mode; | |
1822 | { | |
1823 | int i; | |
381127e8 | 1824 | #ifdef HAVE_load_multiple |
3a94c984 | 1825 | rtx pat; |
381127e8 RL |
1826 | rtx last; |
1827 | #endif | |
bbf6f052 | 1828 | |
72bb9717 RK |
1829 | if (nregs == 0) |
1830 | return; | |
1831 | ||
bbf6f052 RK |
1832 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
1833 | x = validize_mem (force_const_mem (mode, x)); | |
1834 | ||
1835 | /* See if the machine can do this with a load multiple insn. */ | |
1836 | #ifdef HAVE_load_multiple | |
c3a02afe | 1837 | if (HAVE_load_multiple) |
bbf6f052 | 1838 | { |
c3a02afe | 1839 | last = get_last_insn (); |
38a448ca | 1840 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
c3a02afe RK |
1841 | GEN_INT (nregs)); |
1842 | if (pat) | |
1843 | { | |
1844 | emit_insn (pat); | |
1845 | return; | |
1846 | } | |
1847 | else | |
1848 | delete_insns_since (last); | |
bbf6f052 | 1849 | } |
bbf6f052 RK |
1850 | #endif |
1851 | ||
1852 | for (i = 0; i < nregs; i++) | |
38a448ca | 1853 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
bbf6f052 RK |
1854 | operand_subword_force (x, i, mode)); |
1855 | } | |
1856 | ||
1857 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
0040593d JW |
1858 | The number of registers to be filled is NREGS. SIZE indicates the number |
1859 | of bytes in the object X. */ | |
1860 | ||
bbf6f052 | 1861 | void |
0040593d | 1862 | move_block_from_reg (regno, x, nregs, size) |
bbf6f052 RK |
1863 | int regno; |
1864 | rtx x; | |
1865 | int nregs; | |
0040593d | 1866 | int size; |
bbf6f052 RK |
1867 | { |
1868 | int i; | |
381127e8 | 1869 | #ifdef HAVE_store_multiple |
3a94c984 | 1870 | rtx pat; |
381127e8 RL |
1871 | rtx last; |
1872 | #endif | |
58a32c5c | 1873 | enum machine_mode mode; |
bbf6f052 | 1874 | |
2954d7db RK |
1875 | if (nregs == 0) |
1876 | return; | |
1877 | ||
58a32c5c DE |
1878 | /* If SIZE is that of a mode no bigger than a word, just use that |
1879 | mode's store operation. */ | |
1880 | if (size <= UNITS_PER_WORD | |
1881 | && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode) | |
1882 | { | |
1883 | emit_move_insn (change_address (x, mode, NULL), | |
38a448ca | 1884 | gen_rtx_REG (mode, regno)); |
58a32c5c DE |
1885 | return; |
1886 | } | |
3a94c984 | 1887 | |
0040593d | 1888 | /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned |
58a32c5c DE |
1889 | to the left before storing to memory. Note that the previous test |
1890 | doesn't handle all cases (e.g. SIZE == 3). */ | |
0040593d JW |
1891 | if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN) |
1892 | { | |
1893 | rtx tem = operand_subword (x, 0, 1, BLKmode); | |
1894 | rtx shift; | |
1895 | ||
1896 | if (tem == 0) | |
1897 | abort (); | |
1898 | ||
1899 | shift = expand_shift (LSHIFT_EXPR, word_mode, | |
38a448ca | 1900 | gen_rtx_REG (word_mode, regno), |
0040593d JW |
1901 | build_int_2 ((UNITS_PER_WORD - size) |
1902 | * BITS_PER_UNIT, 0), NULL_RTX, 0); | |
1903 | emit_move_insn (tem, shift); | |
1904 | return; | |
1905 | } | |
1906 | ||
bbf6f052 RK |
1907 | /* See if the machine can do this with a store multiple insn. */ |
1908 | #ifdef HAVE_store_multiple | |
c3a02afe | 1909 | if (HAVE_store_multiple) |
bbf6f052 | 1910 | { |
c3a02afe | 1911 | last = get_last_insn (); |
38a448ca | 1912 | pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), |
c3a02afe RK |
1913 | GEN_INT (nregs)); |
1914 | if (pat) | |
1915 | { | |
1916 | emit_insn (pat); | |
1917 | return; | |
1918 | } | |
1919 | else | |
1920 | delete_insns_since (last); | |
bbf6f052 | 1921 | } |
bbf6f052 RK |
1922 | #endif |
1923 | ||
1924 | for (i = 0; i < nregs; i++) | |
1925 | { | |
1926 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
1927 | ||
1928 | if (tem == 0) | |
1929 | abort (); | |
1930 | ||
38a448ca | 1931 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
bbf6f052 RK |
1932 | } |
1933 | } | |
1934 | ||
aac5cc16 RH |
1935 | /* Emit code to move a block SRC to a block DST, where DST is non-consecutive |
1936 | registers represented by a PARALLEL. SSIZE represents the total size of | |
1937 | block SRC in bytes, or -1 if not known. ALIGN is the known alignment of | |
1938 | SRC in bits. */ | |
1939 | /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that | |
1940 | the balance will be in what would be the low-order memory addresses, i.e. | |
1941 | left justified for big endian, right justified for little endian. This | |
1942 | happens to be true for the targets currently using this support. If this | |
1943 | ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING | |
1944 | would be needed. */ | |
fffa9c1d JW |
1945 | |
1946 | void | |
aac5cc16 RH |
1947 | emit_group_load (dst, orig_src, ssize, align) |
1948 | rtx dst, orig_src; | |
729a2125 RK |
1949 | unsigned int align; |
1950 | int ssize; | |
fffa9c1d | 1951 | { |
aac5cc16 RH |
1952 | rtx *tmps, src; |
1953 | int start, i; | |
fffa9c1d | 1954 | |
aac5cc16 | 1955 | if (GET_CODE (dst) != PARALLEL) |
fffa9c1d JW |
1956 | abort (); |
1957 | ||
1958 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1959 | both on the stack and in registers. */ | |
aac5cc16 RH |
1960 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
1961 | start = 0; | |
fffa9c1d | 1962 | else |
aac5cc16 RH |
1963 | start = 1; |
1964 | ||
3a94c984 | 1965 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0)); |
aac5cc16 | 1966 | |
aac5cc16 RH |
1967 | /* Process the pieces. */ |
1968 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1969 | { | |
1970 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
770ae6cc RK |
1971 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
1972 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
aac5cc16 RH |
1973 | int shift = 0; |
1974 | ||
1975 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1976 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
aac5cc16 RH |
1977 | { |
1978 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
1979 | bytelen = ssize - bytepos; | |
1980 | if (bytelen <= 0) | |
729a2125 | 1981 | abort (); |
aac5cc16 RH |
1982 | } |
1983 | ||
f3ce87a9 DE |
1984 | /* If we won't be loading directly from memory, protect the real source |
1985 | from strange tricks we might play; but make sure that the source can | |
1986 | be loaded directly into the destination. */ | |
1987 | src = orig_src; | |
1988 | if (GET_CODE (orig_src) != MEM | |
1989 | && (!CONSTANT_P (orig_src) | |
1990 | || (GET_MODE (orig_src) != mode | |
1991 | && GET_MODE (orig_src) != VOIDmode))) | |
1992 | { | |
1993 | if (GET_MODE (orig_src) == VOIDmode) | |
1994 | src = gen_reg_rtx (mode); | |
1995 | else | |
1996 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
1997 | emit_move_insn (src, orig_src); | |
1998 | } | |
1999 | ||
aac5cc16 RH |
2000 | /* Optimize the access just a bit. */ |
2001 | if (GET_CODE (src) == MEM | |
19caa751 | 2002 | && align >= GET_MODE_ALIGNMENT (mode) |
729a2125 | 2003 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 RH |
2004 | && bytelen == GET_MODE_SIZE (mode)) |
2005 | { | |
2006 | tmps[i] = gen_reg_rtx (mode); | |
2007 | emit_move_insn (tmps[i], | |
2008 | change_address (src, mode, | |
2009 | plus_constant (XEXP (src, 0), | |
2010 | bytepos))); | |
fffa9c1d | 2011 | } |
7c4a6db0 JW |
2012 | else if (GET_CODE (src) == CONCAT) |
2013 | { | |
2014 | if (bytepos == 0 | |
2015 | && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))) | |
2016 | tmps[i] = XEXP (src, 0); | |
8752c357 | 2017 | else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) |
7c4a6db0 JW |
2018 | && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))) |
2019 | tmps[i] = XEXP (src, 1); | |
2020 | else | |
2021 | abort (); | |
2022 | } | |
f3ce87a9 | 2023 | else if (CONSTANT_P (src) |
2ee5437b RH |
2024 | || (GET_CODE (src) == REG && GET_MODE (src) == mode)) |
2025 | tmps[i] = src; | |
fffa9c1d | 2026 | else |
19caa751 RK |
2027 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
2028 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
2029 | mode, mode, align, ssize); | |
fffa9c1d | 2030 | |
aac5cc16 | 2031 | if (BYTES_BIG_ENDIAN && shift) |
19caa751 RK |
2032 | expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), |
2033 | tmps[i], 0, OPTAB_WIDEN); | |
fffa9c1d | 2034 | } |
19caa751 | 2035 | |
3a94c984 | 2036 | emit_queue (); |
aac5cc16 RH |
2037 | |
2038 | /* Copy the extracted pieces into the proper (probable) hard regs. */ | |
2039 | for (i = start; i < XVECLEN (dst, 0); i++) | |
2040 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]); | |
fffa9c1d JW |
2041 | } |
2042 | ||
aac5cc16 RH |
2043 | /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive |
2044 | registers represented by a PARALLEL. SSIZE represents the total size of | |
2045 | block DST, or -1 if not known. ALIGN is the known alignment of DST. */ | |
fffa9c1d JW |
2046 | |
2047 | void | |
aac5cc16 RH |
2048 | emit_group_store (orig_dst, src, ssize, align) |
2049 | rtx orig_dst, src; | |
729a2125 RK |
2050 | int ssize; |
2051 | unsigned int align; | |
fffa9c1d | 2052 | { |
aac5cc16 RH |
2053 | rtx *tmps, dst; |
2054 | int start, i; | |
fffa9c1d | 2055 | |
aac5cc16 | 2056 | if (GET_CODE (src) != PARALLEL) |
fffa9c1d JW |
2057 | abort (); |
2058 | ||
2059 | /* Check for a NULL entry, used to indicate that the parameter goes | |
2060 | both on the stack and in registers. */ | |
aac5cc16 RH |
2061 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
2062 | start = 0; | |
fffa9c1d | 2063 | else |
aac5cc16 RH |
2064 | start = 1; |
2065 | ||
3a94c984 | 2066 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0)); |
fffa9c1d | 2067 | |
aac5cc16 RH |
2068 | /* Copy the (probable) hard regs into pseudos. */ |
2069 | for (i = start; i < XVECLEN (src, 0); i++) | |
fffa9c1d | 2070 | { |
aac5cc16 RH |
2071 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
2072 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
2073 | emit_move_insn (tmps[i], reg); | |
2074 | } | |
3a94c984 | 2075 | emit_queue (); |
fffa9c1d | 2076 | |
aac5cc16 RH |
2077 | /* If we won't be storing directly into memory, protect the real destination |
2078 | from strange tricks we might play. */ | |
2079 | dst = orig_dst; | |
10a9f2be JW |
2080 | if (GET_CODE (dst) == PARALLEL) |
2081 | { | |
2082 | rtx temp; | |
2083 | ||
2084 | /* We can get a PARALLEL dst if there is a conditional expression in | |
2085 | a return statement. In that case, the dst and src are the same, | |
2086 | so no action is necessary. */ | |
2087 | if (rtx_equal_p (dst, src)) | |
2088 | return; | |
2089 | ||
2090 | /* It is unclear if we can ever reach here, but we may as well handle | |
2091 | it. Allocate a temporary, and split this into a store/load to/from | |
2092 | the temporary. */ | |
2093 | ||
2094 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
2095 | emit_group_store (temp, src, ssize, align); | |
2096 | emit_group_load (dst, temp, ssize, align); | |
2097 | return; | |
2098 | } | |
2099 | else if (GET_CODE (dst) != MEM) | |
aac5cc16 RH |
2100 | { |
2101 | dst = gen_reg_rtx (GET_MODE (orig_dst)); | |
2102 | /* Make life a bit easier for combine. */ | |
2103 | emit_move_insn (dst, const0_rtx); | |
2104 | } | |
aac5cc16 RH |
2105 | |
2106 | /* Process the pieces. */ | |
2107 | for (i = start; i < XVECLEN (src, 0); i++) | |
2108 | { | |
770ae6cc | 2109 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
aac5cc16 | 2110 | enum machine_mode mode = GET_MODE (tmps[i]); |
770ae6cc | 2111 | unsigned int bytelen = GET_MODE_SIZE (mode); |
aac5cc16 RH |
2112 | |
2113 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 2114 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
71bc0330 | 2115 | { |
aac5cc16 RH |
2116 | if (BYTES_BIG_ENDIAN) |
2117 | { | |
2118 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2119 | expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift), | |
2120 | tmps[i], 0, OPTAB_WIDEN); | |
2121 | } | |
2122 | bytelen = ssize - bytepos; | |
71bc0330 | 2123 | } |
fffa9c1d | 2124 | |
aac5cc16 RH |
2125 | /* Optimize the access just a bit. */ |
2126 | if (GET_CODE (dst) == MEM | |
19caa751 | 2127 | && align >= GET_MODE_ALIGNMENT (mode) |
729a2125 | 2128 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 | 2129 | && bytelen == GET_MODE_SIZE (mode)) |
729a2125 RK |
2130 | emit_move_insn (change_address (dst, mode, |
2131 | plus_constant (XEXP (dst, 0), | |
2132 | bytepos)), | |
2133 | tmps[i]); | |
aac5cc16 | 2134 | else |
729a2125 | 2135 | store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
aac5cc16 | 2136 | mode, tmps[i], align, ssize); |
fffa9c1d | 2137 | } |
729a2125 | 2138 | |
3a94c984 | 2139 | emit_queue (); |
aac5cc16 RH |
2140 | |
2141 | /* Copy from the pseudo into the (probable) hard reg. */ | |
2142 | if (GET_CODE (dst) == REG) | |
2143 | emit_move_insn (orig_dst, dst); | |
fffa9c1d JW |
2144 | } |
2145 | ||
c36fce9a GRK |
2146 | /* Generate code to copy a BLKmode object of TYPE out of a |
2147 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
2148 | is null, a stack temporary is created. TGTBLK is returned. | |
2149 | ||
2150 | The primary purpose of this routine is to handle functions | |
2151 | that return BLKmode structures in registers. Some machines | |
2152 | (the PA for example) want to return all small structures | |
3a94c984 | 2153 | in registers regardless of the structure's alignment. */ |
c36fce9a GRK |
2154 | |
2155 | rtx | |
19caa751 | 2156 | copy_blkmode_from_reg (tgtblk, srcreg, type) |
c36fce9a GRK |
2157 | rtx tgtblk; |
2158 | rtx srcreg; | |
2159 | tree type; | |
2160 | { | |
19caa751 RK |
2161 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2162 | rtx src = NULL, dst = NULL; | |
2163 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
2164 | unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0; | |
2165 | ||
2166 | if (tgtblk == 0) | |
2167 | { | |
1da68f56 RK |
2168 | tgtblk = assign_temp (build_qualified_type (type, |
2169 | (TYPE_QUALS (type) | |
2170 | | TYPE_QUAL_CONST)), | |
2171 | 0, 1, 1); | |
19caa751 RK |
2172 | preserve_temp_slots (tgtblk); |
2173 | } | |
3a94c984 | 2174 | |
19caa751 RK |
2175 | /* This code assumes srcreg is at least a full word. If it isn't, |
2176 | copy it into a new pseudo which is a full word. */ | |
2177 | if (GET_MODE (srcreg) != BLKmode | |
2178 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
2179 | srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type)); | |
2180 | ||
2181 | /* Structures whose size is not a multiple of a word are aligned | |
2182 | to the least significant byte (to the right). On a BYTES_BIG_ENDIAN | |
2183 | machine, this means we must skip the empty high order bytes when | |
2184 | calculating the bit offset. */ | |
2185 | if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD) | |
2186 | big_endian_correction | |
2187 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); | |
2188 | ||
2189 | /* Copy the structure BITSIZE bites at a time. | |
3a94c984 | 2190 | |
19caa751 RK |
2191 | We could probably emit more efficient code for machines which do not use |
2192 | strict alignment, but it doesn't seem worth the effort at the current | |
2193 | time. */ | |
2194 | for (bitpos = 0, xbitpos = big_endian_correction; | |
2195 | bitpos < bytes * BITS_PER_UNIT; | |
2196 | bitpos += bitsize, xbitpos += bitsize) | |
2197 | { | |
3a94c984 | 2198 | /* We need a new source operand each time xbitpos is on a |
19caa751 RK |
2199 | word boundary and when xbitpos == big_endian_correction |
2200 | (the first time through). */ | |
2201 | if (xbitpos % BITS_PER_WORD == 0 | |
2202 | || xbitpos == big_endian_correction) | |
b47f8cfc JH |
2203 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
2204 | GET_MODE (srcreg)); | |
19caa751 RK |
2205 | |
2206 | /* We need a new destination operand each time bitpos is on | |
2207 | a word boundary. */ | |
2208 | if (bitpos % BITS_PER_WORD == 0) | |
2209 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
3a94c984 | 2210 | |
19caa751 RK |
2211 | /* Use xbitpos for the source extraction (right justified) and |
2212 | xbitpos for the destination store (left justified). */ | |
2213 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, | |
2214 | extract_bit_field (src, bitsize, | |
2215 | xbitpos % BITS_PER_WORD, 1, | |
2216 | NULL_RTX, word_mode, word_mode, | |
2217 | bitsize, BITS_PER_WORD), | |
2218 | bitsize, BITS_PER_WORD); | |
2219 | } | |
2220 | ||
2221 | return tgtblk; | |
c36fce9a GRK |
2222 | } |
2223 | ||
94b25f81 RK |
2224 | /* Add a USE expression for REG to the (possibly empty) list pointed |
2225 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
bbf6f052 RK |
2226 | |
2227 | void | |
b3f8cf4a RK |
2228 | use_reg (call_fusage, reg) |
2229 | rtx *call_fusage, reg; | |
2230 | { | |
0304dfbb DE |
2231 | if (GET_CODE (reg) != REG |
2232 | || REGNO (reg) >= FIRST_PSEUDO_REGISTER) | |
3a94c984 | 2233 | abort (); |
b3f8cf4a RK |
2234 | |
2235 | *call_fusage | |
38a448ca RH |
2236 | = gen_rtx_EXPR_LIST (VOIDmode, |
2237 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
b3f8cf4a RK |
2238 | } |
2239 | ||
94b25f81 RK |
2240 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
2241 | starting at REGNO. All of these registers must be hard registers. */ | |
b3f8cf4a RK |
2242 | |
2243 | void | |
0304dfbb DE |
2244 | use_regs (call_fusage, regno, nregs) |
2245 | rtx *call_fusage; | |
bbf6f052 RK |
2246 | int regno; |
2247 | int nregs; | |
2248 | { | |
0304dfbb | 2249 | int i; |
bbf6f052 | 2250 | |
0304dfbb DE |
2251 | if (regno + nregs > FIRST_PSEUDO_REGISTER) |
2252 | abort (); | |
2253 | ||
2254 | for (i = 0; i < nregs; i++) | |
38a448ca | 2255 | use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i)); |
bbf6f052 | 2256 | } |
fffa9c1d JW |
2257 | |
2258 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
2259 | PARALLEL REGS. This is for calls that pass values in multiple | |
2260 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
2261 | ||
2262 | void | |
2263 | use_group_regs (call_fusage, regs) | |
2264 | rtx *call_fusage; | |
2265 | rtx regs; | |
2266 | { | |
2267 | int i; | |
2268 | ||
6bd35f86 DE |
2269 | for (i = 0; i < XVECLEN (regs, 0); i++) |
2270 | { | |
2271 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
fffa9c1d | 2272 | |
6bd35f86 DE |
2273 | /* A NULL entry means the parameter goes both on the stack and in |
2274 | registers. This can also be a MEM for targets that pass values | |
2275 | partially on the stack and partially in registers. */ | |
e9a25f70 | 2276 | if (reg != 0 && GET_CODE (reg) == REG) |
6bd35f86 DE |
2277 | use_reg (call_fusage, reg); |
2278 | } | |
fffa9c1d | 2279 | } |
bbf6f052 | 2280 | \f |
57814e5e JJ |
2281 | |
2282 | int | |
2283 | can_store_by_pieces (len, constfun, constfundata, align) | |
2284 | unsigned HOST_WIDE_INT len; | |
2285 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2286 | PTR constfundata; | |
2287 | unsigned int align; | |
2288 | { | |
98166639 | 2289 | unsigned HOST_WIDE_INT max_size, l; |
57814e5e JJ |
2290 | HOST_WIDE_INT offset = 0; |
2291 | enum machine_mode mode, tmode; | |
2292 | enum insn_code icode; | |
2293 | int reverse; | |
2294 | rtx cst; | |
2295 | ||
2296 | if (! MOVE_BY_PIECES_P (len, align)) | |
2297 | return 0; | |
2298 | ||
2299 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) | |
2300 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) | |
2301 | align = MOVE_MAX * BITS_PER_UNIT; | |
2302 | ||
2303 | /* We would first store what we can in the largest integer mode, then go to | |
2304 | successively smaller modes. */ | |
2305 | ||
2306 | for (reverse = 0; | |
2307 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2308 | reverse++) | |
2309 | { | |
2310 | l = len; | |
2311 | mode = VOIDmode; | |
98166639 | 2312 | max_size = MOVE_MAX_PIECES + 1; |
57814e5e JJ |
2313 | while (max_size > 1) |
2314 | { | |
2315 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2316 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2317 | if (GET_MODE_SIZE (tmode) < max_size) | |
2318 | mode = tmode; | |
2319 | ||
2320 | if (mode == VOIDmode) | |
2321 | break; | |
2322 | ||
2323 | icode = mov_optab->handlers[(int) mode].insn_code; | |
2324 | if (icode != CODE_FOR_nothing | |
2325 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2326 | { | |
2327 | unsigned int size = GET_MODE_SIZE (mode); | |
2328 | ||
2329 | while (l >= size) | |
2330 | { | |
2331 | if (reverse) | |
2332 | offset -= size; | |
2333 | ||
2334 | cst = (*constfun) (constfundata, offset, mode); | |
2335 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2336 | return 0; | |
2337 | ||
2338 | if (!reverse) | |
2339 | offset += size; | |
2340 | ||
2341 | l -= size; | |
2342 | } | |
2343 | } | |
2344 | ||
2345 | max_size = GET_MODE_SIZE (mode); | |
2346 | } | |
2347 | ||
2348 | /* The code above should have handled everything. */ | |
2349 | if (l != 0) | |
2350 | abort (); | |
2351 | } | |
2352 | ||
2353 | return 1; | |
2354 | } | |
2355 | ||
2356 | /* Generate several move instructions to store LEN bytes generated by | |
2357 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2358 | pointer which will be passed as argument in every CONSTFUN call. | |
2359 | ALIGN is maximum alignment we can assume. */ | |
2360 | ||
2361 | void | |
2362 | store_by_pieces (to, len, constfun, constfundata, align) | |
2363 | rtx to; | |
2364 | unsigned HOST_WIDE_INT len; | |
2365 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2366 | PTR constfundata; | |
2367 | unsigned int align; | |
2368 | { | |
2369 | struct store_by_pieces data; | |
2370 | ||
2371 | if (! MOVE_BY_PIECES_P (len, align)) | |
2372 | abort (); | |
2373 | to = protect_from_queue (to, 1); | |
2374 | data.constfun = constfun; | |
2375 | data.constfundata = constfundata; | |
2376 | data.len = len; | |
2377 | data.to = to; | |
2378 | store_by_pieces_1 (&data, align); | |
2379 | } | |
2380 | ||
19caa751 RK |
2381 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
2382 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2383 | before calling. ALIGN is maximum alignment we can assume. */ | |
9de08200 RK |
2384 | |
2385 | static void | |
2386 | clear_by_pieces (to, len, align) | |
2387 | rtx to; | |
3bdf5ad1 | 2388 | unsigned HOST_WIDE_INT len; |
729a2125 | 2389 | unsigned int align; |
9de08200 | 2390 | { |
57814e5e JJ |
2391 | struct store_by_pieces data; |
2392 | ||
2393 | data.constfun = clear_by_pieces_1; | |
df4ae160 | 2394 | data.constfundata = NULL; |
57814e5e JJ |
2395 | data.len = len; |
2396 | data.to = to; | |
2397 | store_by_pieces_1 (&data, align); | |
2398 | } | |
2399 | ||
2400 | /* Callback routine for clear_by_pieces. | |
2401 | Return const0_rtx unconditionally. */ | |
2402 | ||
2403 | static rtx | |
2404 | clear_by_pieces_1 (data, offset, mode) | |
2405 | PTR data ATTRIBUTE_UNUSED; | |
2406 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED; | |
2407 | enum machine_mode mode ATTRIBUTE_UNUSED; | |
2408 | { | |
2409 | return const0_rtx; | |
2410 | } | |
2411 | ||
2412 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2413 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
2414 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2415 | before calling. ALIGN is maximum alignment we can assume. */ | |
2416 | ||
2417 | static void | |
2418 | store_by_pieces_1 (data, align) | |
2419 | struct store_by_pieces *data; | |
2420 | unsigned int align; | |
2421 | { | |
2422 | rtx to_addr = XEXP (data->to, 0); | |
3bdf5ad1 | 2423 | unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
2424 | enum machine_mode mode = VOIDmode, tmode; |
2425 | enum insn_code icode; | |
9de08200 | 2426 | |
57814e5e JJ |
2427 | data->offset = 0; |
2428 | data->to_addr = to_addr; | |
2429 | data->autinc_to | |
9de08200 RK |
2430 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2431 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2432 | ||
57814e5e JJ |
2433 | data->explicit_inc_to = 0; |
2434 | data->reverse | |
9de08200 | 2435 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
57814e5e JJ |
2436 | if (data->reverse) |
2437 | data->offset = data->len; | |
9de08200 | 2438 | |
57814e5e | 2439 | /* If storing requires more than two move insns, |
9de08200 RK |
2440 | copy addresses to registers (to make displacements shorter) |
2441 | and use post-increment if available. */ | |
57814e5e JJ |
2442 | if (!data->autinc_to |
2443 | && move_by_pieces_ninsns (data->len, align) > 2) | |
9de08200 | 2444 | { |
3a94c984 | 2445 | /* Determine the main mode we'll be using. */ |
fbe1758d AM |
2446 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2447 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2448 | if (GET_MODE_SIZE (tmode) < max_size) | |
2449 | mode = tmode; | |
2450 | ||
57814e5e | 2451 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
9de08200 | 2452 | { |
57814e5e JJ |
2453 | data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
2454 | data->autinc_to = 1; | |
2455 | data->explicit_inc_to = -1; | |
9de08200 | 2456 | } |
3bdf5ad1 | 2457 | |
57814e5e JJ |
2458 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2459 | && ! data->autinc_to) | |
9de08200 | 2460 | { |
57814e5e JJ |
2461 | data->to_addr = copy_addr_to_reg (to_addr); |
2462 | data->autinc_to = 1; | |
2463 | data->explicit_inc_to = 1; | |
9de08200 | 2464 | } |
3bdf5ad1 | 2465 | |
57814e5e JJ |
2466 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
2467 | data->to_addr = copy_addr_to_reg (to_addr); | |
9de08200 RK |
2468 | } |
2469 | ||
e1565e65 | 2470 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
19caa751 | 2471 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
bdb429a5 | 2472 | align = MOVE_MAX * BITS_PER_UNIT; |
9de08200 | 2473 | |
57814e5e | 2474 | /* First store what we can in the largest integer mode, then go to |
9de08200 RK |
2475 | successively smaller modes. */ |
2476 | ||
2477 | while (max_size > 1) | |
2478 | { | |
9de08200 RK |
2479 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2480 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2481 | if (GET_MODE_SIZE (tmode) < max_size) | |
2482 | mode = tmode; | |
2483 | ||
2484 | if (mode == VOIDmode) | |
2485 | break; | |
2486 | ||
2487 | icode = mov_optab->handlers[(int) mode].insn_code; | |
19caa751 | 2488 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
57814e5e | 2489 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
9de08200 RK |
2490 | |
2491 | max_size = GET_MODE_SIZE (mode); | |
2492 | } | |
2493 | ||
2494 | /* The code above should have handled everything. */ | |
57814e5e | 2495 | if (data->len != 0) |
9de08200 RK |
2496 | abort (); |
2497 | } | |
2498 | ||
57814e5e | 2499 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
9de08200 RK |
2500 | with move instructions for mode MODE. GENFUN is the gen_... function |
2501 | to make a move insn for that mode. DATA has all the other info. */ | |
2502 | ||
2503 | static void | |
57814e5e | 2504 | store_by_pieces_2 (genfun, mode, data) |
711d877c | 2505 | rtx (*genfun) PARAMS ((rtx, ...)); |
9de08200 | 2506 | enum machine_mode mode; |
57814e5e | 2507 | struct store_by_pieces *data; |
9de08200 | 2508 | { |
3bdf5ad1 | 2509 | unsigned int size = GET_MODE_SIZE (mode); |
57814e5e | 2510 | rtx to1, cst; |
9de08200 RK |
2511 | |
2512 | while (data->len >= size) | |
2513 | { | |
3bdf5ad1 RK |
2514 | if (data->reverse) |
2515 | data->offset -= size; | |
9de08200 | 2516 | |
3bdf5ad1 RK |
2517 | if (data->autinc_to) |
2518 | { | |
2519 | to1 = gen_rtx_MEM (mode, data->to_addr); | |
2520 | MEM_COPY_ATTRIBUTES (to1, data->to); | |
2521 | } | |
3a94c984 | 2522 | else |
3bdf5ad1 RK |
2523 | to1 = change_address (data->to, mode, |
2524 | plus_constant (data->to_addr, data->offset)); | |
9de08200 | 2525 | |
940da324 | 2526 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
57814e5e JJ |
2527 | emit_insn (gen_add2_insn (data->to_addr, |
2528 | GEN_INT (-(HOST_WIDE_INT) size))); | |
9de08200 | 2529 | |
57814e5e JJ |
2530 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2531 | emit_insn ((*genfun) (to1, cst)); | |
3bdf5ad1 | 2532 | |
940da324 | 2533 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
9de08200 | 2534 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
9de08200 | 2535 | |
3bdf5ad1 RK |
2536 | if (! data->reverse) |
2537 | data->offset += size; | |
9de08200 RK |
2538 | |
2539 | data->len -= size; | |
2540 | } | |
2541 | } | |
2542 | \f | |
19caa751 RK |
2543 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
2544 | its length in bytes and ALIGN is the maximum alignment we can is has. | |
bbf6f052 | 2545 | |
e9a25f70 JL |
2546 | If we call a function that returns the length of the block, return it. */ |
2547 | ||
2548 | rtx | |
9de08200 | 2549 | clear_storage (object, size, align) |
bbf6f052 | 2550 | rtx object; |
4c08eef0 | 2551 | rtx size; |
729a2125 | 2552 | unsigned int align; |
bbf6f052 | 2553 | { |
52cf7115 JL |
2554 | #ifdef TARGET_MEM_FUNCTIONS |
2555 | static tree fn; | |
2556 | tree call_expr, arg_list; | |
2557 | #endif | |
e9a25f70 JL |
2558 | rtx retval = 0; |
2559 | ||
fcf1b822 RK |
2560 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2561 | just move a zero. Otherwise, do this a piece at a time. */ | |
2562 | if (GET_MODE (object) != BLKmode | |
2563 | && GET_CODE (size) == CONST_INT | |
8752c357 | 2564 | && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size)) |
fcf1b822 RK |
2565 | emit_move_insn (object, CONST0_RTX (GET_MODE (object))); |
2566 | else | |
bbf6f052 | 2567 | { |
9de08200 RK |
2568 | object = protect_from_queue (object, 1); |
2569 | size = protect_from_queue (size, 0); | |
2570 | ||
2571 | if (GET_CODE (size) == CONST_INT | |
fbe1758d | 2572 | && MOVE_BY_PIECES_P (INTVAL (size), align)) |
9de08200 | 2573 | clear_by_pieces (object, INTVAL (size), align); |
9de08200 RK |
2574 | else |
2575 | { | |
2576 | /* Try the most limited insn first, because there's no point | |
2577 | including more than one in the machine description unless | |
2578 | the more limited one has some advantage. */ | |
2579 | ||
19caa751 | 2580 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
9de08200 RK |
2581 | enum machine_mode mode; |
2582 | ||
2583 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2584 | mode = GET_MODE_WIDER_MODE (mode)) | |
2585 | { | |
2586 | enum insn_code code = clrstr_optab[(int) mode]; | |
a995e389 | 2587 | insn_operand_predicate_fn pred; |
9de08200 RK |
2588 | |
2589 | if (code != CODE_FOR_nothing | |
2590 | /* We don't need MODE to be narrower than | |
2591 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2592 | the mode mask, as it is returned by the macro, it will | |
2593 | definitely be less than the actual mode mask. */ | |
2594 | && ((GET_CODE (size) == CONST_INT | |
2595 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
e5e809f4 | 2596 | <= (GET_MODE_MASK (mode) >> 1))) |
9de08200 | 2597 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
a995e389 RH |
2598 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 |
2599 | || (*pred) (object, BLKmode)) | |
2600 | && ((pred = insn_data[(int) code].operand[2].predicate) == 0 | |
2601 | || (*pred) (opalign, VOIDmode))) | |
9de08200 RK |
2602 | { |
2603 | rtx op1; | |
2604 | rtx last = get_last_insn (); | |
2605 | rtx pat; | |
2606 | ||
2607 | op1 = convert_to_mode (mode, size, 1); | |
a995e389 RH |
2608 | pred = insn_data[(int) code].operand[1].predicate; |
2609 | if (pred != 0 && ! (*pred) (op1, mode)) | |
9de08200 RK |
2610 | op1 = copy_to_mode_reg (mode, op1); |
2611 | ||
2612 | pat = GEN_FCN ((int) code) (object, op1, opalign); | |
2613 | if (pat) | |
2614 | { | |
2615 | emit_insn (pat); | |
e9a25f70 | 2616 | return 0; |
9de08200 RK |
2617 | } |
2618 | else | |
2619 | delete_insns_since (last); | |
2620 | } | |
2621 | } | |
2622 | ||
4bc973ae | 2623 | /* OBJECT or SIZE may have been passed through protect_from_queue. |
9de08200 | 2624 | |
4bc973ae JL |
2625 | It is unsafe to save the value generated by protect_from_queue |
2626 | and reuse it later. Consider what happens if emit_queue is | |
2627 | called before the return value from protect_from_queue is used. | |
52cf7115 | 2628 | |
4bc973ae JL |
2629 | Expansion of the CALL_EXPR below will call emit_queue before |
2630 | we are finished emitting RTL for argument setup. So if we are | |
2631 | not careful we could get the wrong value for an argument. | |
52cf7115 | 2632 | |
4bc973ae JL |
2633 | To avoid this problem we go ahead and emit code to copy OBJECT |
2634 | and SIZE into new pseudos. We can then place those new pseudos | |
2635 | into an RTL_EXPR and use them later, even after a call to | |
2636 | emit_queue. | |
52cf7115 | 2637 | |
4bc973ae JL |
2638 | Note this is not strictly needed for library calls since they |
2639 | do not call emit_queue before loading their arguments. However, | |
2640 | we may need to have library calls call emit_queue in the future | |
2641 | since failing to do so could cause problems for targets which | |
2642 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
2643 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); | |
52cf7115 | 2644 | |
4bc973ae JL |
2645 | #ifdef TARGET_MEM_FUNCTIONS |
2646 | size = copy_to_mode_reg (TYPE_MODE (sizetype), size); | |
2647 | #else | |
2648 | size = convert_to_mode (TYPE_MODE (integer_type_node), size, | |
2649 | TREE_UNSIGNED (integer_type_node)); | |
f3dc586a | 2650 | size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size); |
4bc973ae | 2651 | #endif |
52cf7115 | 2652 | |
4bc973ae JL |
2653 | #ifdef TARGET_MEM_FUNCTIONS |
2654 | /* It is incorrect to use the libcall calling conventions to call | |
2655 | memset in this context. | |
52cf7115 | 2656 | |
4bc973ae JL |
2657 | This could be a user call to memset and the user may wish to |
2658 | examine the return value from memset. | |
52cf7115 | 2659 | |
4bc973ae JL |
2660 | For targets where libcalls and normal calls have different |
2661 | conventions for returning pointers, we could end up generating | |
3a94c984 | 2662 | incorrect code. |
4bc973ae JL |
2663 | |
2664 | So instead of using a libcall sequence we build up a suitable | |
2665 | CALL_EXPR and expand the call in the normal fashion. */ | |
2666 | if (fn == NULL_TREE) | |
2667 | { | |
2668 | tree fntype; | |
2669 | ||
2670 | /* This was copied from except.c, I don't know if all this is | |
2671 | necessary in this context or not. */ | |
2672 | fn = get_identifier ("memset"); | |
4bc973ae JL |
2673 | fntype = build_pointer_type (void_type_node); |
2674 | fntype = build_function_type (fntype, NULL_TREE); | |
2675 | fn = build_decl (FUNCTION_DECL, fn, fntype); | |
d7db6646 | 2676 | ggc_add_tree_root (&fn, 1); |
4bc973ae JL |
2677 | DECL_EXTERNAL (fn) = 1; |
2678 | TREE_PUBLIC (fn) = 1; | |
2679 | DECL_ARTIFICIAL (fn) = 1; | |
6496a589 | 2680 | make_decl_rtl (fn, NULL); |
4bc973ae | 2681 | assemble_external (fn); |
4bc973ae JL |
2682 | } |
2683 | ||
3a94c984 | 2684 | /* We need to make an argument list for the function call. |
4bc973ae JL |
2685 | |
2686 | memset has three arguments, the first is a void * addresses, the | |
2687 | second a integer with the initialization value, the last is a | |
2688 | size_t byte count for the copy. */ | |
2689 | arg_list | |
2690 | = build_tree_list (NULL_TREE, | |
2691 | make_tree (build_pointer_type (void_type_node), | |
2692 | object)); | |
2693 | TREE_CHAIN (arg_list) | |
2694 | = build_tree_list (NULL_TREE, | |
3a94c984 | 2695 | make_tree (integer_type_node, const0_rtx)); |
4bc973ae JL |
2696 | TREE_CHAIN (TREE_CHAIN (arg_list)) |
2697 | = build_tree_list (NULL_TREE, make_tree (sizetype, size)); | |
2698 | TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE; | |
2699 | ||
2700 | /* Now we have to build up the CALL_EXPR itself. */ | |
2701 | call_expr = build1 (ADDR_EXPR, | |
2702 | build_pointer_type (TREE_TYPE (fn)), fn); | |
2703 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
2704 | call_expr, arg_list, NULL_TREE); | |
2705 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
2706 | ||
2707 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
bbf6f052 | 2708 | #else |
ebb1b59a | 2709 | emit_library_call (bzero_libfunc, LCT_NORMAL, |
fe7bbd2a | 2710 | VOIDmode, 2, object, Pmode, size, |
9de08200 | 2711 | TYPE_MODE (integer_type_node)); |
bbf6f052 | 2712 | #endif |
9de08200 | 2713 | } |
bbf6f052 | 2714 | } |
e9a25f70 JL |
2715 | |
2716 | return retval; | |
bbf6f052 RK |
2717 | } |
2718 | ||
2719 | /* Generate code to copy Y into X. | |
2720 | Both Y and X must have the same mode, except that | |
2721 | Y can be a constant with VOIDmode. | |
2722 | This mode cannot be BLKmode; use emit_block_move for that. | |
2723 | ||
2724 | Return the last instruction emitted. */ | |
2725 | ||
2726 | rtx | |
2727 | emit_move_insn (x, y) | |
2728 | rtx x, y; | |
2729 | { | |
2730 | enum machine_mode mode = GET_MODE (x); | |
de1b33dd AO |
2731 | rtx y_cst = NULL_RTX; |
2732 | rtx last_insn; | |
bbf6f052 RK |
2733 | |
2734 | x = protect_from_queue (x, 1); | |
2735 | y = protect_from_queue (y, 0); | |
2736 | ||
2737 | if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) | |
2738 | abort (); | |
2739 | ||
ee5332b8 RH |
2740 | /* Never force constant_p_rtx to memory. */ |
2741 | if (GET_CODE (y) == CONSTANT_P_RTX) | |
2742 | ; | |
2743 | else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y)) | |
de1b33dd AO |
2744 | { |
2745 | y_cst = y; | |
2746 | y = force_const_mem (mode, y); | |
2747 | } | |
bbf6f052 RK |
2748 | |
2749 | /* If X or Y are memory references, verify that their addresses are valid | |
2750 | for the machine. */ | |
2751 | if (GET_CODE (x) == MEM | |
2752 | && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) | |
2753 | && ! push_operand (x, GET_MODE (x))) | |
2754 | || (flag_force_addr | |
2755 | && CONSTANT_ADDRESS_P (XEXP (x, 0))))) | |
2756 | x = change_address (x, VOIDmode, XEXP (x, 0)); | |
2757 | ||
2758 | if (GET_CODE (y) == MEM | |
2759 | && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) | |
2760 | || (flag_force_addr | |
2761 | && CONSTANT_ADDRESS_P (XEXP (y, 0))))) | |
2762 | y = change_address (y, VOIDmode, XEXP (y, 0)); | |
2763 | ||
2764 | if (mode == BLKmode) | |
2765 | abort (); | |
2766 | ||
de1b33dd AO |
2767 | last_insn = emit_move_insn_1 (x, y); |
2768 | ||
2769 | if (y_cst && GET_CODE (x) == REG) | |
2770 | REG_NOTES (last_insn) | |
2771 | = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn)); | |
2772 | ||
2773 | return last_insn; | |
261c4230 RS |
2774 | } |
2775 | ||
2776 | /* Low level part of emit_move_insn. | |
2777 | Called just like emit_move_insn, but assumes X and Y | |
2778 | are basically valid. */ | |
2779 | ||
2780 | rtx | |
2781 | emit_move_insn_1 (x, y) | |
2782 | rtx x, y; | |
2783 | { | |
2784 | enum machine_mode mode = GET_MODE (x); | |
2785 | enum machine_mode submode; | |
2786 | enum mode_class class = GET_MODE_CLASS (mode); | |
770ae6cc | 2787 | unsigned int i; |
261c4230 | 2788 | |
dbbbbf3b | 2789 | if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) |
3a94c984 | 2790 | abort (); |
76bbe028 | 2791 | |
bbf6f052 RK |
2792 | if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
2793 | return | |
2794 | emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y)); | |
2795 | ||
89742723 | 2796 | /* Expand complex moves by moving real part and imag part, if possible. */ |
7308a047 | 2797 | else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT) |
d0c76654 RK |
2798 | && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode) |
2799 | * BITS_PER_UNIT), | |
2800 | (class == MODE_COMPLEX_INT | |
2801 | ? MODE_INT : MODE_FLOAT), | |
2802 | 0)) | |
7308a047 RS |
2803 | && (mov_optab->handlers[(int) submode].insn_code |
2804 | != CODE_FOR_nothing)) | |
2805 | { | |
2806 | /* Don't split destination if it is a stack push. */ | |
2807 | int stack = push_operand (x, GET_MODE (x)); | |
7308a047 | 2808 | |
79ce92d7 | 2809 | #ifdef PUSH_ROUNDING |
1a06f5fe JH |
2810 | /* In case we output to the stack, but the size is smaller machine can |
2811 | push exactly, we need to use move instructions. */ | |
2812 | if (stack | |
2813 | && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode)) | |
2814 | { | |
2815 | rtx temp; | |
2816 | int offset1, offset2; | |
2817 | ||
2818 | /* Do not use anti_adjust_stack, since we don't want to update | |
2819 | stack_pointer_delta. */ | |
2820 | temp = expand_binop (Pmode, | |
2821 | #ifdef STACK_GROWS_DOWNWARD | |
2822 | sub_optab, | |
2823 | #else | |
2824 | add_optab, | |
2825 | #endif | |
2826 | stack_pointer_rtx, | |
2827 | GEN_INT | |
2828 | (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))), | |
2829 | stack_pointer_rtx, | |
2830 | 0, | |
2831 | OPTAB_LIB_WIDEN); | |
2832 | if (temp != stack_pointer_rtx) | |
2833 | emit_move_insn (stack_pointer_rtx, temp); | |
2834 | #ifdef STACK_GROWS_DOWNWARD | |
2835 | offset1 = 0; | |
2836 | offset2 = GET_MODE_SIZE (submode); | |
2837 | #else | |
2838 | offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); | |
2839 | offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) | |
2840 | + GET_MODE_SIZE (submode)); | |
2841 | #endif | |
2842 | emit_move_insn (change_address (x, submode, | |
2843 | gen_rtx_PLUS (Pmode, | |
2844 | stack_pointer_rtx, | |
2845 | GEN_INT (offset1))), | |
2846 | gen_realpart (submode, y)); | |
2847 | emit_move_insn (change_address (x, submode, | |
2848 | gen_rtx_PLUS (Pmode, | |
2849 | stack_pointer_rtx, | |
2850 | GEN_INT (offset2))), | |
2851 | gen_imagpart (submode, y)); | |
2852 | } | |
e9c0bd54 | 2853 | else |
79ce92d7 | 2854 | #endif |
7308a047 RS |
2855 | /* If this is a stack, push the highpart first, so it |
2856 | will be in the argument order. | |
2857 | ||
2858 | In that case, change_address is used only to convert | |
2859 | the mode, not to change the address. */ | |
e9c0bd54 | 2860 | if (stack) |
c937357e | 2861 | { |
e33c0d66 RS |
2862 | /* Note that the real part always precedes the imag part in memory |
2863 | regardless of machine's endianness. */ | |
c937357e RS |
2864 | #ifdef STACK_GROWS_DOWNWARD |
2865 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
3bdf5ad1 | 2866 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2867 | gen_imagpart (submode, y))); |
c937357e | 2868 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
3bdf5ad1 | 2869 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2870 | gen_realpart (submode, y))); |
c937357e RS |
2871 | #else |
2872 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
3bdf5ad1 | 2873 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2874 | gen_realpart (submode, y))); |
c937357e | 2875 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
3bdf5ad1 | 2876 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
e33c0d66 | 2877 | gen_imagpart (submode, y))); |
c937357e RS |
2878 | #endif |
2879 | } | |
2880 | else | |
2881 | { | |
235ae7be DM |
2882 | rtx realpart_x, realpart_y; |
2883 | rtx imagpart_x, imagpart_y; | |
2884 | ||
405f63da MM |
2885 | /* If this is a complex value with each part being smaller than a |
2886 | word, the usual calling sequence will likely pack the pieces into | |
2887 | a single register. Unfortunately, SUBREG of hard registers only | |
2888 | deals in terms of words, so we have a problem converting input | |
2889 | arguments to the CONCAT of two registers that is used elsewhere | |
2890 | for complex values. If this is before reload, we can copy it into | |
2891 | memory and reload. FIXME, we should see about using extract and | |
2892 | insert on integer registers, but complex short and complex char | |
2893 | variables should be rarely used. */ | |
3a94c984 | 2894 | if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD |
405f63da MM |
2895 | && (reload_in_progress | reload_completed) == 0) |
2896 | { | |
2897 | int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); | |
2898 | int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); | |
2899 | ||
2900 | if (packed_dest_p || packed_src_p) | |
2901 | { | |
2902 | enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) | |
2903 | ? MODE_FLOAT : MODE_INT); | |
2904 | ||
1da68f56 RK |
2905 | enum machine_mode reg_mode |
2906 | = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); | |
405f63da MM |
2907 | |
2908 | if (reg_mode != BLKmode) | |
2909 | { | |
2910 | rtx mem = assign_stack_temp (reg_mode, | |
2911 | GET_MODE_SIZE (mode), 0); | |
405f63da MM |
2912 | rtx cmem = change_address (mem, mode, NULL_RTX); |
2913 | ||
1da68f56 RK |
2914 | cfun->cannot_inline |
2915 | = N_("function using short complex types cannot be inline"); | |
405f63da MM |
2916 | |
2917 | if (packed_dest_p) | |
2918 | { | |
2919 | rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); | |
2920 | emit_move_insn_1 (cmem, y); | |
2921 | return emit_move_insn_1 (sreg, mem); | |
2922 | } | |
2923 | else | |
2924 | { | |
2925 | rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); | |
2926 | emit_move_insn_1 (mem, sreg); | |
2927 | return emit_move_insn_1 (x, cmem); | |
2928 | } | |
2929 | } | |
2930 | } | |
2931 | } | |
2932 | ||
235ae7be DM |
2933 | realpart_x = gen_realpart (submode, x); |
2934 | realpart_y = gen_realpart (submode, y); | |
2935 | imagpart_x = gen_imagpart (submode, x); | |
2936 | imagpart_y = gen_imagpart (submode, y); | |
2937 | ||
2938 | /* Show the output dies here. This is necessary for SUBREGs | |
2939 | of pseudos since we cannot track their lifetimes correctly; | |
c14c6529 RH |
2940 | hard regs shouldn't appear here except as return values. |
2941 | We never want to emit such a clobber after reload. */ | |
2942 | if (x != y | |
235ae7be DM |
2943 | && ! (reload_in_progress || reload_completed) |
2944 | && (GET_CODE (realpart_x) == SUBREG | |
2945 | || GET_CODE (imagpart_x) == SUBREG)) | |
b2e7e6fb | 2946 | { |
c14c6529 | 2947 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
b2e7e6fb | 2948 | } |
2638126a | 2949 | |
c937357e | 2950 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
235ae7be | 2951 | (realpart_x, realpart_y)); |
c937357e | 2952 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
235ae7be | 2953 | (imagpart_x, imagpart_y)); |
c937357e | 2954 | } |
7308a047 | 2955 | |
7a1ab50a | 2956 | return get_last_insn (); |
7308a047 RS |
2957 | } |
2958 | ||
bbf6f052 RK |
2959 | /* This will handle any multi-word mode that lacks a move_insn pattern. |
2960 | However, you will get better code if you define such patterns, | |
2961 | even if they must turn into multiple assembler instructions. */ | |
a4320483 | 2962 | else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
bbf6f052 RK |
2963 | { |
2964 | rtx last_insn = 0; | |
3ef1eef4 | 2965 | rtx seq, inner; |
235ae7be | 2966 | int need_clobber; |
3a94c984 | 2967 | |
a98c9f1a RK |
2968 | #ifdef PUSH_ROUNDING |
2969 | ||
2970 | /* If X is a push on the stack, do the push now and replace | |
2971 | X with a reference to the stack pointer. */ | |
2972 | if (push_operand (x, GET_MODE (x))) | |
2973 | { | |
2974 | anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); | |
2975 | x = change_address (x, VOIDmode, stack_pointer_rtx); | |
2976 | } | |
2977 | #endif | |
3a94c984 | 2978 | |
3ef1eef4 RK |
2979 | /* If we are in reload, see if either operand is a MEM whose address |
2980 | is scheduled for replacement. */ | |
2981 | if (reload_in_progress && GET_CODE (x) == MEM | |
2982 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) | |
2983 | { | |
2984 | rtx new = gen_rtx_MEM (GET_MODE (x), inner); | |
2985 | ||
3ef1eef4 | 2986 | MEM_COPY_ATTRIBUTES (new, x); |
3ef1eef4 RK |
2987 | x = new; |
2988 | } | |
2989 | if (reload_in_progress && GET_CODE (y) == MEM | |
2990 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) | |
2991 | { | |
2992 | rtx new = gen_rtx_MEM (GET_MODE (y), inner); | |
2993 | ||
3ef1eef4 | 2994 | MEM_COPY_ATTRIBUTES (new, y); |
3ef1eef4 RK |
2995 | y = new; |
2996 | } | |
2997 | ||
235ae7be | 2998 | start_sequence (); |
15a7a8ec | 2999 | |
235ae7be | 3000 | need_clobber = 0; |
bbf6f052 | 3001 | for (i = 0; |
3a94c984 | 3002 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
bbf6f052 RK |
3003 | i++) |
3004 | { | |
3005 | rtx xpart = operand_subword (x, i, 1, mode); | |
3006 | rtx ypart = operand_subword (y, i, 1, mode); | |
3007 | ||
3008 | /* If we can't get a part of Y, put Y into memory if it is a | |
3009 | constant. Otherwise, force it into a register. If we still | |
3010 | can't get a part of Y, abort. */ | |
3011 | if (ypart == 0 && CONSTANT_P (y)) | |
3012 | { | |
3013 | y = force_const_mem (mode, y); | |
3014 | ypart = operand_subword (y, i, 1, mode); | |
3015 | } | |
3016 | else if (ypart == 0) | |
3017 | ypart = operand_subword_force (y, i, mode); | |
3018 | ||
3019 | if (xpart == 0 || ypart == 0) | |
3020 | abort (); | |
3021 | ||
235ae7be DM |
3022 | need_clobber |= (GET_CODE (xpart) == SUBREG); |
3023 | ||
bbf6f052 RK |
3024 | last_insn = emit_move_insn (xpart, ypart); |
3025 | } | |
6551fa4d | 3026 | |
235ae7be DM |
3027 | seq = gen_sequence (); |
3028 | end_sequence (); | |
3029 | ||
3030 | /* Show the output dies here. This is necessary for SUBREGs | |
3031 | of pseudos since we cannot track their lifetimes correctly; | |
3032 | hard regs shouldn't appear here except as return values. | |
3033 | We never want to emit such a clobber after reload. */ | |
3034 | if (x != y | |
3035 | && ! (reload_in_progress || reload_completed) | |
3036 | && need_clobber != 0) | |
3037 | { | |
3038 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
3039 | } | |
3040 | ||
3041 | emit_insn (seq); | |
3042 | ||
bbf6f052 RK |
3043 | return last_insn; |
3044 | } | |
3045 | else | |
3046 | abort (); | |
3047 | } | |
3048 | \f | |
3049 | /* Pushing data onto the stack. */ | |
3050 | ||
3051 | /* Push a block of length SIZE (perhaps variable) | |
3052 | and return an rtx to address the beginning of the block. | |
3053 | Note that it is not possible for the value returned to be a QUEUED. | |
3054 | The value may be virtual_outgoing_args_rtx. | |
3055 | ||
3056 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
3057 | BELOW nonzero means this padding comes at low addresses; | |
3058 | otherwise, the padding comes at high addresses. */ | |
3059 | ||
3060 | rtx | |
3061 | push_block (size, extra, below) | |
3062 | rtx size; | |
3063 | int extra, below; | |
3064 | { | |
3065 | register rtx temp; | |
88f63c77 RK |
3066 | |
3067 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
bbf6f052 RK |
3068 | if (CONSTANT_P (size)) |
3069 | anti_adjust_stack (plus_constant (size, extra)); | |
3070 | else if (GET_CODE (size) == REG && extra == 0) | |
3071 | anti_adjust_stack (size); | |
3072 | else | |
3073 | { | |
ce48579b | 3074 | temp = copy_to_mode_reg (Pmode, size); |
bbf6f052 | 3075 | if (extra != 0) |
906c4e36 | 3076 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
bbf6f052 RK |
3077 | temp, 0, OPTAB_LIB_WIDEN); |
3078 | anti_adjust_stack (temp); | |
3079 | } | |
3080 | ||
f73ad30e JH |
3081 | #ifndef STACK_GROWS_DOWNWARD |
3082 | #ifdef ARGS_GROW_DOWNWARD | |
3083 | if (!ACCUMULATE_OUTGOING_ARGS) | |
bbf6f052 | 3084 | #else |
f73ad30e JH |
3085 | if (0) |
3086 | #endif | |
3087 | #else | |
3088 | if (1) | |
bbf6f052 | 3089 | #endif |
f73ad30e | 3090 | { |
f73ad30e JH |
3091 | /* Return the lowest stack address when STACK or ARGS grow downward and |
3092 | we are not aaccumulating outgoing arguments (the c4x port uses such | |
3093 | conventions). */ | |
3094 | temp = virtual_outgoing_args_rtx; | |
3095 | if (extra != 0 && below) | |
3096 | temp = plus_constant (temp, extra); | |
3097 | } | |
3098 | else | |
3099 | { | |
3100 | if (GET_CODE (size) == CONST_INT) | |
3101 | temp = plus_constant (virtual_outgoing_args_rtx, | |
3a94c984 | 3102 | -INTVAL (size) - (below ? 0 : extra)); |
f73ad30e JH |
3103 | else if (extra != 0 && !below) |
3104 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3bdf5ad1 | 3105 | negate_rtx (Pmode, plus_constant (size, extra))); |
f73ad30e JH |
3106 | else |
3107 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3108 | negate_rtx (Pmode, size)); | |
3109 | } | |
bbf6f052 RK |
3110 | |
3111 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
3112 | } | |
3113 | ||
bbf6f052 | 3114 | |
921b3427 RK |
3115 | /* Return an rtx for the address of the beginning of a as-if-it-was-pushed |
3116 | block of SIZE bytes. */ | |
3117 | ||
3118 | static rtx | |
3119 | get_push_address (size) | |
3a94c984 | 3120 | int size; |
921b3427 RK |
3121 | { |
3122 | register rtx temp; | |
3123 | ||
3124 | if (STACK_PUSH_CODE == POST_DEC) | |
38a448ca | 3125 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size)); |
921b3427 | 3126 | else if (STACK_PUSH_CODE == POST_INC) |
38a448ca | 3127 | temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size)); |
921b3427 RK |
3128 | else |
3129 | temp = stack_pointer_rtx; | |
3130 | ||
c85f7c16 | 3131 | return copy_to_reg (temp); |
921b3427 RK |
3132 | } |
3133 | ||
566aa174 JH |
3134 | /* Emit single push insn. */ |
3135 | static void | |
3136 | emit_single_push_insn (mode, x, type) | |
3137 | rtx x; | |
3138 | enum machine_mode mode; | |
3139 | tree type; | |
3140 | { | |
3141 | #ifdef PUSH_ROUNDING | |
3142 | rtx dest_addr; | |
3143 | int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); | |
3144 | rtx dest; | |
3145 | ||
3146 | if (GET_MODE_SIZE (mode) == rounded_size) | |
3147 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
3148 | else | |
3149 | { | |
3150 | #ifdef STACK_GROWS_DOWNWARD | |
3151 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
3152 | GEN_INT (-rounded_size)); | |
3153 | #else | |
3154 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
3155 | GEN_INT (rounded_size)); | |
3156 | #endif | |
3157 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3158 | } | |
3159 | ||
3160 | dest = gen_rtx_MEM (mode, dest_addr); | |
3161 | ||
3162 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); | |
3163 | ||
3164 | if (type != 0) | |
3165 | { | |
3166 | set_mem_attributes (dest, type, 1); | |
3167 | /* Function incoming arguments may overlap with sibling call | |
3168 | outgoing arguments and we cannot allow reordering of reads | |
3169 | from function arguments with stores to outgoing arguments | |
3170 | of sibling calls. */ | |
3171 | MEM_ALIAS_SET (dest) = 0; | |
3172 | } | |
3173 | emit_move_insn (dest, x); | |
3174 | #else | |
3175 | abort(); | |
3176 | #endif | |
3177 | } | |
3178 | ||
bbf6f052 RK |
3179 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3180 | type TYPE. | |
3181 | MODE is redundant except when X is a CONST_INT (since they don't | |
3182 | carry mode info). | |
3183 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3184 | needed only if X is BLKmode. | |
3185 | ||
f1eaaf73 | 3186 | ALIGN (in bits) is maximum alignment we can assume. |
bbf6f052 | 3187 | |
cd048831 RK |
3188 | If PARTIAL and REG are both nonzero, then copy that many of the first |
3189 | words of X into registers starting with REG, and push the rest of X. | |
bbf6f052 RK |
3190 | The amount of space pushed is decreased by PARTIAL words, |
3191 | rounded *down* to a multiple of PARM_BOUNDARY. | |
3192 | REG must be a hard register in this case. | |
cd048831 RK |
3193 | If REG is zero but PARTIAL is not, take any all others actions for an |
3194 | argument partially in registers, but do not actually load any | |
3195 | registers. | |
bbf6f052 RK |
3196 | |
3197 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
6dc42e49 | 3198 | This is ignored if an argument block has already been allocated. |
bbf6f052 RK |
3199 | |
3200 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3201 | the bottom of the argument block for this call. We use indexing off there | |
3202 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3203 | argument block has not been preallocated. | |
3204 | ||
e5e809f4 JL |
3205 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3206 | ||
3207 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3208 | for arguments passed in registers. If nonzero, it will be the number | |
3209 | of bytes required. */ | |
bbf6f052 RK |
3210 | |
3211 | void | |
3212 | emit_push_insn (x, mode, type, size, align, partial, reg, extra, | |
4fc026cd CM |
3213 | args_addr, args_so_far, reg_parm_stack_space, |
3214 | alignment_pad) | |
bbf6f052 RK |
3215 | register rtx x; |
3216 | enum machine_mode mode; | |
3217 | tree type; | |
3218 | rtx size; | |
729a2125 | 3219 | unsigned int align; |
bbf6f052 RK |
3220 | int partial; |
3221 | rtx reg; | |
3222 | int extra; | |
3223 | rtx args_addr; | |
3224 | rtx args_so_far; | |
e5e809f4 | 3225 | int reg_parm_stack_space; |
4fc026cd | 3226 | rtx alignment_pad; |
bbf6f052 RK |
3227 | { |
3228 | rtx xinner; | |
3229 | enum direction stack_direction | |
3230 | #ifdef STACK_GROWS_DOWNWARD | |
3231 | = downward; | |
3232 | #else | |
3233 | = upward; | |
3234 | #endif | |
3235 | ||
3236 | /* Decide where to pad the argument: `downward' for below, | |
3237 | `upward' for above, or `none' for don't pad it. | |
3238 | Default is below for small data on big-endian machines; else above. */ | |
3239 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3240 | ||
3241 | /* Invert direction if stack is post-update. */ | |
3242 | if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC) | |
3243 | if (where_pad != none) | |
3244 | where_pad = (where_pad == downward ? upward : downward); | |
3245 | ||
3246 | xinner = x = protect_from_queue (x, 0); | |
3247 | ||
3248 | if (mode == BLKmode) | |
3249 | { | |
3250 | /* Copy a block into the stack, entirely or partially. */ | |
3251 | ||
3252 | register rtx temp; | |
3253 | int used = partial * UNITS_PER_WORD; | |
3254 | int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3255 | int skip; | |
3a94c984 | 3256 | |
bbf6f052 RK |
3257 | if (size == 0) |
3258 | abort (); | |
3259 | ||
3260 | used -= offset; | |
3261 | ||
3262 | /* USED is now the # of bytes we need not copy to the stack | |
3263 | because registers will take care of them. */ | |
3264 | ||
3265 | if (partial != 0) | |
3266 | xinner = change_address (xinner, BLKmode, | |
3267 | plus_constant (XEXP (xinner, 0), used)); | |
3268 | ||
3269 | /* If the partial register-part of the arg counts in its stack size, | |
3270 | skip the part of stack space corresponding to the registers. | |
3271 | Otherwise, start copying to the beginning of the stack space, | |
3272 | by setting SKIP to 0. */ | |
e5e809f4 | 3273 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
bbf6f052 RK |
3274 | |
3275 | #ifdef PUSH_ROUNDING | |
3276 | /* Do it with several push insns if that doesn't take lots of insns | |
3277 | and if there is no difficulty with push insns that skip bytes | |
3278 | on the stack for alignment purposes. */ | |
3279 | if (args_addr == 0 | |
f73ad30e | 3280 | && PUSH_ARGS |
bbf6f052 RK |
3281 | && GET_CODE (size) == CONST_INT |
3282 | && skip == 0 | |
15914757 | 3283 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
bbf6f052 RK |
3284 | /* Here we avoid the case of a structure whose weak alignment |
3285 | forces many pushes of a small amount of data, | |
3286 | and such small pushes do rounding that causes trouble. */ | |
e1565e65 | 3287 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
19caa751 | 3288 | || align >= BIGGEST_ALIGNMENT |
f1eaaf73 DE |
3289 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3290 | == (align / BITS_PER_UNIT))) | |
bbf6f052 RK |
3291 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3292 | { | |
3293 | /* Push padding now if padding above and stack grows down, | |
3294 | or if padding below and stack grows up. | |
3295 | But if space already allocated, this has already been done. */ | |
3296 | if (extra && args_addr == 0 | |
3297 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3298 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 | 3299 | |
566aa174 | 3300 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align); |
921b3427 | 3301 | |
7d384cc0 | 3302 | if (current_function_check_memory_usage && ! in_check_memory_usage) |
921b3427 RK |
3303 | { |
3304 | rtx temp; | |
3a94c984 | 3305 | |
956d6950 | 3306 | in_check_memory_usage = 1; |
3a94c984 | 3307 | temp = get_push_address (INTVAL (size) - used); |
c85f7c16 | 3308 | if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) |
ebb1b59a BS |
3309 | emit_library_call (chkr_copy_bitmap_libfunc, |
3310 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp, | |
3311 | Pmode, XEXP (xinner, 0), Pmode, | |
3a94c984 | 3312 | GEN_INT (INTVAL (size) - used), |
921b3427 RK |
3313 | TYPE_MODE (sizetype)); |
3314 | else | |
ebb1b59a BS |
3315 | emit_library_call (chkr_set_right_libfunc, |
3316 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp, | |
3317 | Pmode, GEN_INT (INTVAL (size) - used), | |
921b3427 | 3318 | TYPE_MODE (sizetype), |
956d6950 JL |
3319 | GEN_INT (MEMORY_USE_RW), |
3320 | TYPE_MODE (integer_type_node)); | |
3321 | in_check_memory_usage = 0; | |
921b3427 | 3322 | } |
bbf6f052 RK |
3323 | } |
3324 | else | |
3a94c984 | 3325 | #endif /* PUSH_ROUNDING */ |
bbf6f052 | 3326 | { |
7ab923cc JJ |
3327 | rtx target; |
3328 | ||
bbf6f052 RK |
3329 | /* Otherwise make space on the stack and copy the data |
3330 | to the address of that space. */ | |
3331 | ||
3332 | /* Deduct words put into registers from the size we must copy. */ | |
3333 | if (partial != 0) | |
3334 | { | |
3335 | if (GET_CODE (size) == CONST_INT) | |
906c4e36 | 3336 | size = GEN_INT (INTVAL (size) - used); |
bbf6f052 RK |
3337 | else |
3338 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
906c4e36 RK |
3339 | GEN_INT (used), NULL_RTX, 0, |
3340 | OPTAB_LIB_WIDEN); | |
bbf6f052 RK |
3341 | } |
3342 | ||
3343 | /* Get the address of the stack space. | |
3344 | In this case, we do not deal with EXTRA separately. | |
3345 | A single stack adjust will do. */ | |
3346 | if (! args_addr) | |
3347 | { | |
3348 | temp = push_block (size, extra, where_pad == downward); | |
3349 | extra = 0; | |
3350 | } | |
3351 | else if (GET_CODE (args_so_far) == CONST_INT) | |
3352 | temp = memory_address (BLKmode, | |
3353 | plus_constant (args_addr, | |
3354 | skip + INTVAL (args_so_far))); | |
3355 | else | |
3356 | temp = memory_address (BLKmode, | |
38a448ca RH |
3357 | plus_constant (gen_rtx_PLUS (Pmode, |
3358 | args_addr, | |
3359 | args_so_far), | |
bbf6f052 | 3360 | skip)); |
7d384cc0 | 3361 | if (current_function_check_memory_usage && ! in_check_memory_usage) |
921b3427 | 3362 | { |
956d6950 | 3363 | in_check_memory_usage = 1; |
921b3427 | 3364 | target = copy_to_reg (temp); |
c85f7c16 | 3365 | if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) |
ebb1b59a BS |
3366 | emit_library_call (chkr_copy_bitmap_libfunc, |
3367 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
6a9c4aed MK |
3368 | target, Pmode, |
3369 | XEXP (xinner, 0), Pmode, | |
921b3427 RK |
3370 | size, TYPE_MODE (sizetype)); |
3371 | else | |
ebb1b59a BS |
3372 | emit_library_call (chkr_set_right_libfunc, |
3373 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
6a9c4aed | 3374 | target, Pmode, |
921b3427 | 3375 | size, TYPE_MODE (sizetype), |
956d6950 JL |
3376 | GEN_INT (MEMORY_USE_RW), |
3377 | TYPE_MODE (integer_type_node)); | |
3378 | in_check_memory_usage = 0; | |
921b3427 | 3379 | } |
bbf6f052 | 3380 | |
3a94c984 | 3381 | target = gen_rtx_MEM (BLKmode, temp); |
7ab923cc | 3382 | |
3a94c984 KH |
3383 | if (type != 0) |
3384 | { | |
3385 | set_mem_attributes (target, type, 1); | |
3386 | /* Function incoming arguments may overlap with sibling call | |
3387 | outgoing arguments and we cannot allow reordering of reads | |
3388 | from function arguments with stores to outgoing arguments | |
3389 | of sibling calls. */ | |
3390 | MEM_ALIAS_SET (target) = 0; | |
3391 | } | |
7ab923cc | 3392 | |
bbf6f052 RK |
3393 | /* TEMP is the address of the block. Copy the data there. */ |
3394 | if (GET_CODE (size) == CONST_INT | |
729a2125 | 3395 | && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)) |
bbf6f052 | 3396 | { |
7ab923cc | 3397 | move_by_pieces (target, xinner, INTVAL (size), align); |
bbf6f052 RK |
3398 | goto ret; |
3399 | } | |
e5e809f4 | 3400 | else |
bbf6f052 | 3401 | { |
19caa751 | 3402 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
e5e809f4 | 3403 | enum machine_mode mode; |
3bdf5ad1 | 3404 | |
e5e809f4 JL |
3405 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
3406 | mode != VOIDmode; | |
3407 | mode = GET_MODE_WIDER_MODE (mode)) | |
c841050e | 3408 | { |
e5e809f4 | 3409 | enum insn_code code = movstr_optab[(int) mode]; |
a995e389 | 3410 | insn_operand_predicate_fn pred; |
e5e809f4 JL |
3411 | |
3412 | if (code != CODE_FOR_nothing | |
3413 | && ((GET_CODE (size) == CONST_INT | |
3414 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
3415 | <= (GET_MODE_MASK (mode) >> 1))) | |
3416 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
a995e389 RH |
3417 | && (!(pred = insn_data[(int) code].operand[0].predicate) |
3418 | || ((*pred) (target, BLKmode))) | |
3419 | && (!(pred = insn_data[(int) code].operand[1].predicate) | |
3420 | || ((*pred) (xinner, BLKmode))) | |
3421 | && (!(pred = insn_data[(int) code].operand[3].predicate) | |
3422 | || ((*pred) (opalign, VOIDmode)))) | |
e5e809f4 JL |
3423 | { |
3424 | rtx op2 = convert_to_mode (mode, size, 1); | |
3425 | rtx last = get_last_insn (); | |
3426 | rtx pat; | |
3427 | ||
a995e389 RH |
3428 | pred = insn_data[(int) code].operand[2].predicate; |
3429 | if (pred != 0 && ! (*pred) (op2, mode)) | |
e5e809f4 JL |
3430 | op2 = copy_to_mode_reg (mode, op2); |
3431 | ||
3432 | pat = GEN_FCN ((int) code) (target, xinner, | |
3433 | op2, opalign); | |
3434 | if (pat) | |
3435 | { | |
3436 | emit_insn (pat); | |
3437 | goto ret; | |
3438 | } | |
3439 | else | |
3440 | delete_insns_since (last); | |
3441 | } | |
c841050e | 3442 | } |
bbf6f052 | 3443 | } |
bbf6f052 | 3444 | |
f73ad30e JH |
3445 | if (!ACCUMULATE_OUTGOING_ARGS) |
3446 | { | |
3447 | /* If the source is referenced relative to the stack pointer, | |
3448 | copy it to another register to stabilize it. We do not need | |
3449 | to do this if we know that we won't be changing sp. */ | |
bbf6f052 | 3450 | |
f73ad30e JH |
3451 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) |
3452 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3453 | temp = copy_to_reg (temp); | |
3454 | } | |
bbf6f052 RK |
3455 | |
3456 | /* Make inhibit_defer_pop nonzero around the library call | |
3457 | to force it to pop the bcopy-arguments right away. */ | |
3458 | NO_DEFER_POP; | |
3459 | #ifdef TARGET_MEM_FUNCTIONS | |
ebb1b59a | 3460 | emit_library_call (memcpy_libfunc, LCT_NORMAL, |
bbf6f052 | 3461 | VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode, |
0fa83258 RK |
3462 | convert_to_mode (TYPE_MODE (sizetype), |
3463 | size, TREE_UNSIGNED (sizetype)), | |
26ba80fc | 3464 | TYPE_MODE (sizetype)); |
bbf6f052 | 3465 | #else |
ebb1b59a | 3466 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
bbf6f052 | 3467 | VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode, |
3b6f75e2 JW |
3468 | convert_to_mode (TYPE_MODE (integer_type_node), |
3469 | size, | |
3470 | TREE_UNSIGNED (integer_type_node)), | |
3471 | TYPE_MODE (integer_type_node)); | |
bbf6f052 RK |
3472 | #endif |
3473 | OK_DEFER_POP; | |
3474 | } | |
3475 | } | |
3476 | else if (partial > 0) | |
3477 | { | |
3478 | /* Scalar partly in registers. */ | |
3479 | ||
3480 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3481 | int i; | |
3482 | int not_stack; | |
3483 | /* # words of start of argument | |
3484 | that we must make space for but need not store. */ | |
3485 | int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); | |
3486 | int args_offset = INTVAL (args_so_far); | |
3487 | int skip; | |
3488 | ||
3489 | /* Push padding now if padding above and stack grows down, | |
3490 | or if padding below and stack grows up. | |
3491 | But if space already allocated, this has already been done. */ | |
3492 | if (extra && args_addr == 0 | |
3493 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3494 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3495 | |
3496 | /* If we make space by pushing it, we might as well push | |
3497 | the real data. Otherwise, we can leave OFFSET nonzero | |
3498 | and leave the space uninitialized. */ | |
3499 | if (args_addr == 0) | |
3500 | offset = 0; | |
3501 | ||
3502 | /* Now NOT_STACK gets the number of words that we don't need to | |
3503 | allocate on the stack. */ | |
3504 | not_stack = partial - offset; | |
3505 | ||
3506 | /* If the partial register-part of the arg counts in its stack size, | |
3507 | skip the part of stack space corresponding to the registers. | |
3508 | Otherwise, start copying to the beginning of the stack space, | |
3509 | by setting SKIP to 0. */ | |
e5e809f4 | 3510 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
bbf6f052 RK |
3511 | |
3512 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3513 | x = validize_mem (force_const_mem (mode, x)); | |
3514 | ||
3515 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3516 | SUBREGs of such registers are not allowed. */ | |
3517 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER | |
3518 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) | |
3519 | x = copy_to_reg (x); | |
3520 | ||
3521 | /* Loop over all the words allocated on the stack for this arg. */ | |
3522 | /* We can do it by words, because any scalar bigger than a word | |
3523 | has a size a multiple of a word. */ | |
3524 | #ifndef PUSH_ARGS_REVERSED | |
3525 | for (i = not_stack; i < size; i++) | |
3526 | #else | |
3527 | for (i = size - 1; i >= not_stack; i--) | |
3528 | #endif | |
3529 | if (i >= not_stack + offset) | |
3530 | emit_push_insn (operand_subword_force (x, i, mode), | |
906c4e36 RK |
3531 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3532 | 0, args_addr, | |
3533 | GEN_INT (args_offset + ((i - not_stack + skip) | |
e5e809f4 | 3534 | * UNITS_PER_WORD)), |
4fc026cd | 3535 | reg_parm_stack_space, alignment_pad); |
bbf6f052 RK |
3536 | } |
3537 | else | |
3538 | { | |
3539 | rtx addr; | |
921b3427 | 3540 | rtx target = NULL_RTX; |
3bdf5ad1 | 3541 | rtx dest; |
bbf6f052 RK |
3542 | |
3543 | /* Push padding now if padding above and stack grows down, | |
3544 | or if padding below and stack grows up. | |
3545 | But if space already allocated, this has already been done. */ | |
3546 | if (extra && args_addr == 0 | |
3547 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3548 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3549 | |
3550 | #ifdef PUSH_ROUNDING | |
f73ad30e | 3551 | if (args_addr == 0 && PUSH_ARGS) |
566aa174 | 3552 | emit_single_push_insn (mode, x, type); |
bbf6f052 RK |
3553 | else |
3554 | #endif | |
921b3427 RK |
3555 | { |
3556 | if (GET_CODE (args_so_far) == CONST_INT) | |
3557 | addr | |
3558 | = memory_address (mode, | |
3a94c984 | 3559 | plus_constant (args_addr, |
921b3427 | 3560 | INTVAL (args_so_far))); |
3a94c984 | 3561 | else |
38a448ca RH |
3562 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3563 | args_so_far)); | |
921b3427 | 3564 | target = addr; |
566aa174 JH |
3565 | dest = gen_rtx_MEM (mode, addr); |
3566 | if (type != 0) | |
3567 | { | |
3568 | set_mem_attributes (dest, type, 1); | |
3569 | /* Function incoming arguments may overlap with sibling call | |
3570 | outgoing arguments and we cannot allow reordering of reads | |
3571 | from function arguments with stores to outgoing arguments | |
3572 | of sibling calls. */ | |
3573 | MEM_ALIAS_SET (dest) = 0; | |
3574 | } | |
bbf6f052 | 3575 | |
566aa174 | 3576 | emit_move_insn (dest, x); |
3bdf5ad1 | 3577 | |
566aa174 | 3578 | } |
921b3427 | 3579 | |
7d384cc0 | 3580 | if (current_function_check_memory_usage && ! in_check_memory_usage) |
921b3427 | 3581 | { |
956d6950 | 3582 | in_check_memory_usage = 1; |
921b3427 RK |
3583 | if (target == 0) |
3584 | target = get_push_address (GET_MODE_SIZE (mode)); | |
3585 | ||
c85f7c16 | 3586 | if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type)) |
ebb1b59a BS |
3587 | emit_library_call (chkr_copy_bitmap_libfunc, |
3588 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target, | |
3589 | Pmode, XEXP (x, 0), Pmode, | |
921b3427 RK |
3590 | GEN_INT (GET_MODE_SIZE (mode)), |
3591 | TYPE_MODE (sizetype)); | |
3592 | else | |
ebb1b59a BS |
3593 | emit_library_call (chkr_set_right_libfunc, |
3594 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target, | |
3595 | Pmode, GEN_INT (GET_MODE_SIZE (mode)), | |
921b3427 | 3596 | TYPE_MODE (sizetype), |
956d6950 JL |
3597 | GEN_INT (MEMORY_USE_RW), |
3598 | TYPE_MODE (integer_type_node)); | |
3599 | in_check_memory_usage = 0; | |
921b3427 | 3600 | } |
bbf6f052 RK |
3601 | } |
3602 | ||
3603 | ret: | |
3604 | /* If part should go in registers, copy that part | |
3605 | into the appropriate registers. Do this now, at the end, | |
3606 | since mem-to-mem copies above may do function calls. */ | |
cd048831 | 3607 | if (partial > 0 && reg != 0) |
fffa9c1d JW |
3608 | { |
3609 | /* Handle calls that pass values in multiple non-contiguous locations. | |
3610 | The Irix 6 ABI has examples of this. */ | |
3611 | if (GET_CODE (reg) == PARALLEL) | |
aac5cc16 | 3612 | emit_group_load (reg, x, -1, align); /* ??? size? */ |
fffa9c1d JW |
3613 | else |
3614 | move_block_to_reg (REGNO (reg), x, partial, mode); | |
3615 | } | |
bbf6f052 RK |
3616 | |
3617 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
906c4e36 | 3618 | anti_adjust_stack (GEN_INT (extra)); |
3a94c984 | 3619 | |
3ea2292a | 3620 | if (alignment_pad && args_addr == 0) |
4fc026cd | 3621 | anti_adjust_stack (alignment_pad); |
bbf6f052 RK |
3622 | } |
3623 | \f | |
296b4ed9 RK |
3624 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
3625 | operations. */ | |
3626 | ||
3627 | static rtx | |
3628 | get_subtarget (x) | |
3629 | rtx x; | |
3630 | { | |
3631 | return ((x == 0 | |
3632 | /* Only registers can be subtargets. */ | |
3633 | || GET_CODE (x) != REG | |
3634 | /* If the register is readonly, it can't be set more than once. */ | |
3635 | || RTX_UNCHANGING_P (x) | |
3636 | /* Don't use hard regs to avoid extending their life. */ | |
3637 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
3638 | /* Avoid subtargets inside loops, | |
3639 | since they hide some invariant expressions. */ | |
3640 | || preserve_subexpressions_p ()) | |
3641 | ? 0 : x); | |
3642 | } | |
3643 | ||
bbf6f052 RK |
3644 | /* Expand an assignment that stores the value of FROM into TO. |
3645 | If WANT_VALUE is nonzero, return an rtx for the value of TO. | |
709f5be1 RS |
3646 | (This may contain a QUEUED rtx; |
3647 | if the value is constant, this rtx is a constant.) | |
3648 | Otherwise, the returned value is NULL_RTX. | |
bbf6f052 RK |
3649 | |
3650 | SUGGEST_REG is no longer actually used. | |
3651 | It used to mean, copy the value through a register | |
3652 | and return that register, if that is possible. | |
709f5be1 | 3653 | We now use WANT_VALUE to decide whether to do this. */ |
bbf6f052 RK |
3654 | |
3655 | rtx | |
3656 | expand_assignment (to, from, want_value, suggest_reg) | |
3657 | tree to, from; | |
3658 | int want_value; | |
c5c76735 | 3659 | int suggest_reg ATTRIBUTE_UNUSED; |
bbf6f052 RK |
3660 | { |
3661 | register rtx to_rtx = 0; | |
3662 | rtx result; | |
3663 | ||
3664 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
3665 | ||
3666 | if (TREE_CODE (to) == ERROR_MARK) | |
709f5be1 RS |
3667 | { |
3668 | result = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
3669 | return want_value ? result : NULL_RTX; | |
3670 | } | |
bbf6f052 RK |
3671 | |
3672 | /* Assignment of a structure component needs special treatment | |
3673 | if the structure component's rtx is not simply a MEM. | |
6be58303 JW |
3674 | Assignment of an array element at a constant index, and assignment of |
3675 | an array element in an unaligned packed structure field, has the same | |
3676 | problem. */ | |
bbf6f052 | 3677 | |
08293add | 3678 | if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF |
b4e3fabb | 3679 | || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF) |
bbf6f052 RK |
3680 | { |
3681 | enum machine_mode mode1; | |
770ae6cc | 3682 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 3683 | tree offset; |
bbf6f052 RK |
3684 | int unsignedp; |
3685 | int volatilep = 0; | |
0088fcb1 | 3686 | tree tem; |
729a2125 | 3687 | unsigned int alignment; |
0088fcb1 RK |
3688 | |
3689 | push_temp_slots (); | |
839c4796 RK |
3690 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
3691 | &unsignedp, &volatilep, &alignment); | |
bbf6f052 RK |
3692 | |
3693 | /* If we are going to use store_bit_field and extract_bit_field, | |
3694 | make sure to_rtx will be safe for multiple use. */ | |
3695 | ||
3696 | if (mode1 == VOIDmode && want_value) | |
3697 | tem = stabilize_reference (tem); | |
3698 | ||
921b3427 | 3699 | to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT); |
7bb0943f RS |
3700 | if (offset != 0) |
3701 | { | |
906c4e36 | 3702 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
7bb0943f RS |
3703 | |
3704 | if (GET_CODE (to_rtx) != MEM) | |
3705 | abort (); | |
bd070e1a RH |
3706 | |
3707 | if (GET_MODE (offset_rtx) != ptr_mode) | |
3708 | { | |
3709 | #ifdef POINTERS_EXTEND_UNSIGNED | |
822a3443 | 3710 | offset_rtx = convert_memory_address (ptr_mode, offset_rtx); |
bd070e1a RH |
3711 | #else |
3712 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
3713 | #endif | |
3714 | } | |
3715 | ||
9a7b9f4f JL |
3716 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
3717 | to call force_reg for that case. Avoid that case. */ | |
89752202 HB |
3718 | if (GET_CODE (to_rtx) == MEM |
3719 | && GET_MODE (to_rtx) == BLKmode | |
9a7b9f4f | 3720 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
89752202 | 3721 | && bitsize |
3a94c984 | 3722 | && (bitpos % bitsize) == 0 |
89752202 | 3723 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
19caa751 | 3724 | && alignment == GET_MODE_ALIGNMENT (mode1)) |
89752202 HB |
3725 | { |
3726 | rtx temp = change_address (to_rtx, mode1, | |
3727 | plus_constant (XEXP (to_rtx, 0), | |
3728 | (bitpos / | |
3729 | BITS_PER_UNIT))); | |
3730 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
3731 | to_rtx = temp; | |
3732 | else | |
3733 | to_rtx = change_address (to_rtx, mode1, | |
3734 | force_reg (GET_MODE (XEXP (temp, 0)), | |
3735 | XEXP (temp, 0))); | |
3736 | bitpos = 0; | |
3737 | } | |
3738 | ||
7bb0943f | 3739 | to_rtx = change_address (to_rtx, VOIDmode, |
38a448ca | 3740 | gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0), |
c5c76735 JL |
3741 | force_reg (ptr_mode, |
3742 | offset_rtx))); | |
7bb0943f | 3743 | } |
c5c76735 | 3744 | |
bbf6f052 RK |
3745 | if (volatilep) |
3746 | { | |
3747 | if (GET_CODE (to_rtx) == MEM) | |
01188446 JW |
3748 | { |
3749 | /* When the offset is zero, to_rtx is the address of the | |
3750 | structure we are storing into, and hence may be shared. | |
3751 | We must make a new MEM before setting the volatile bit. */ | |
3752 | if (offset == 0) | |
effbcc6a RK |
3753 | to_rtx = copy_rtx (to_rtx); |
3754 | ||
01188446 JW |
3755 | MEM_VOLATILE_P (to_rtx) = 1; |
3756 | } | |
bbf6f052 RK |
3757 | #if 0 /* This was turned off because, when a field is volatile |
3758 | in an object which is not volatile, the object may be in a register, | |
3759 | and then we would abort over here. */ | |
3760 | else | |
3761 | abort (); | |
3762 | #endif | |
3763 | } | |
3764 | ||
956d6950 JL |
3765 | if (TREE_CODE (to) == COMPONENT_REF |
3766 | && TREE_READONLY (TREE_OPERAND (to, 1))) | |
3767 | { | |
8bd6ecc2 | 3768 | if (offset == 0) |
956d6950 JL |
3769 | to_rtx = copy_rtx (to_rtx); |
3770 | ||
3771 | RTX_UNCHANGING_P (to_rtx) = 1; | |
3772 | } | |
3773 | ||
921b3427 | 3774 | /* Check the access. */ |
7d384cc0 | 3775 | if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM) |
921b3427 RK |
3776 | { |
3777 | rtx to_addr; | |
3778 | int size; | |
3779 | int best_mode_size; | |
3780 | enum machine_mode best_mode; | |
3781 | ||
3782 | best_mode = get_best_mode (bitsize, bitpos, | |
3783 | TYPE_ALIGN (TREE_TYPE (tem)), | |
3784 | mode1, volatilep); | |
3785 | if (best_mode == VOIDmode) | |
3786 | best_mode = QImode; | |
3787 | ||
3788 | best_mode_size = GET_MODE_BITSIZE (best_mode); | |
3789 | to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT)); | |
3790 | size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size); | |
3791 | size *= GET_MODE_SIZE (best_mode); | |
3792 | ||
3793 | /* Check the access right of the pointer. */ | |
ea4da9db | 3794 | in_check_memory_usage = 1; |
e9a25f70 | 3795 | if (size) |
ebb1b59a BS |
3796 | emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK, |
3797 | VOIDmode, 3, to_addr, Pmode, | |
e9a25f70 | 3798 | GEN_INT (size), TYPE_MODE (sizetype), |
956d6950 JL |
3799 | GEN_INT (MEMORY_USE_WO), |
3800 | TYPE_MODE (integer_type_node)); | |
ea4da9db | 3801 | in_check_memory_usage = 0; |
921b3427 RK |
3802 | } |
3803 | ||
a69beca1 RK |
3804 | /* If this is a varying-length object, we must get the address of |
3805 | the source and do an explicit block move. */ | |
3806 | if (bitsize < 0) | |
3807 | { | |
3808 | unsigned int from_align; | |
3809 | rtx from_rtx = expand_expr_unaligned (from, &from_align); | |
3810 | rtx inner_to_rtx | |
3811 | = change_address (to_rtx, VOIDmode, | |
3812 | plus_constant (XEXP (to_rtx, 0), | |
3813 | bitpos / BITS_PER_UNIT)); | |
3814 | ||
3815 | emit_block_move (inner_to_rtx, from_rtx, expr_size (from), | |
19caa751 | 3816 | MIN (alignment, from_align)); |
a69beca1 RK |
3817 | free_temp_slots (); |
3818 | pop_temp_slots (); | |
3819 | return to_rtx; | |
3820 | } | |
3821 | else | |
3822 | { | |
3823 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, | |
3824 | (want_value | |
3825 | /* Spurious cast for HPUX compiler. */ | |
3826 | ? ((enum machine_mode) | |
3827 | TYPE_MODE (TREE_TYPE (to))) | |
3828 | : VOIDmode), | |
3829 | unsignedp, | |
a69beca1 RK |
3830 | alignment, |
3831 | int_size_in_bytes (TREE_TYPE (tem)), | |
3832 | get_alias_set (to)); | |
3833 | ||
3834 | preserve_temp_slots (result); | |
3835 | free_temp_slots (); | |
3836 | pop_temp_slots (); | |
3837 | ||
3838 | /* If the value is meaningful, convert RESULT to the proper mode. | |
3839 | Otherwise, return nothing. */ | |
3840 | return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)), | |
3841 | TYPE_MODE (TREE_TYPE (from)), | |
3842 | result, | |
3843 | TREE_UNSIGNED (TREE_TYPE (to))) | |
3844 | : NULL_RTX); | |
3845 | } | |
bbf6f052 RK |
3846 | } |
3847 | ||
cd1db108 RS |
3848 | /* If the rhs is a function call and its value is not an aggregate, |
3849 | call the function before we start to compute the lhs. | |
3850 | This is needed for correct code for cases such as | |
3851 | val = setjmp (buf) on machines where reference to val | |
1ad87b63 RK |
3852 | requires loading up part of an address in a separate insn. |
3853 | ||
1858863b JW |
3854 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
3855 | since it might be a promoted variable where the zero- or sign- extension | |
3856 | needs to be done. Handling this in the normal way is safe because no | |
3857 | computation is done before the call. */ | |
1ad87b63 | 3858 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from) |
b35cd3c1 | 3859 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
1858863b JW |
3860 | && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
3861 | && GET_CODE (DECL_RTL (to)) == REG)) | |
cd1db108 | 3862 | { |
0088fcb1 RK |
3863 | rtx value; |
3864 | ||
3865 | push_temp_slots (); | |
3866 | value = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
cd1db108 | 3867 | if (to_rtx == 0) |
921b3427 | 3868 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO); |
aaf87c45 | 3869 | |
fffa9c1d JW |
3870 | /* Handle calls that return values in multiple non-contiguous locations. |
3871 | The Irix 6 ABI has examples of this. */ | |
3872 | if (GET_CODE (to_rtx) == PARALLEL) | |
aac5cc16 | 3873 | emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)), |
19caa751 | 3874 | TYPE_ALIGN (TREE_TYPE (from))); |
fffa9c1d | 3875 | else if (GET_MODE (to_rtx) == BLKmode) |
db3ec607 | 3876 | emit_block_move (to_rtx, value, expr_size (from), |
19caa751 | 3877 | TYPE_ALIGN (TREE_TYPE (from))); |
aaf87c45 | 3878 | else |
6419e5b0 DT |
3879 | { |
3880 | #ifdef POINTERS_EXTEND_UNSIGNED | |
ab40f612 DT |
3881 | if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE |
3882 | || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE) | |
6419e5b0 DT |
3883 | value = convert_memory_address (GET_MODE (to_rtx), value); |
3884 | #endif | |
3885 | emit_move_insn (to_rtx, value); | |
3886 | } | |
cd1db108 RS |
3887 | preserve_temp_slots (to_rtx); |
3888 | free_temp_slots (); | |
0088fcb1 | 3889 | pop_temp_slots (); |
709f5be1 | 3890 | return want_value ? to_rtx : NULL_RTX; |
cd1db108 RS |
3891 | } |
3892 | ||
bbf6f052 RK |
3893 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
3894 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
3895 | ||
3896 | if (to_rtx == 0) | |
41472af8 MM |
3897 | { |
3898 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO); | |
3899 | if (GET_CODE (to_rtx) == MEM) | |
3900 | MEM_ALIAS_SET (to_rtx) = get_alias_set (to); | |
3901 | } | |
bbf6f052 | 3902 | |
86d38d25 | 3903 | /* Don't move directly into a return register. */ |
14a774a9 RK |
3904 | if (TREE_CODE (to) == RESULT_DECL |
3905 | && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL)) | |
86d38d25 | 3906 | { |
0088fcb1 RK |
3907 | rtx temp; |
3908 | ||
3909 | push_temp_slots (); | |
3910 | temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); | |
14a774a9 RK |
3911 | |
3912 | if (GET_CODE (to_rtx) == PARALLEL) | |
3913 | emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)), | |
19caa751 | 3914 | TYPE_ALIGN (TREE_TYPE (from))); |
14a774a9 RK |
3915 | else |
3916 | emit_move_insn (to_rtx, temp); | |
3917 | ||
86d38d25 RS |
3918 | preserve_temp_slots (to_rtx); |
3919 | free_temp_slots (); | |
0088fcb1 | 3920 | pop_temp_slots (); |
709f5be1 | 3921 | return want_value ? to_rtx : NULL_RTX; |
86d38d25 RS |
3922 | } |
3923 | ||
bbf6f052 RK |
3924 | /* In case we are returning the contents of an object which overlaps |
3925 | the place the value is being stored, use a safe function when copying | |
3926 | a value through a pointer into a structure value return block. */ | |
3927 | if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF | |
3928 | && current_function_returns_struct | |
3929 | && !current_function_returns_pcc_struct) | |
3930 | { | |
0088fcb1 RK |
3931 | rtx from_rtx, size; |
3932 | ||
3933 | push_temp_slots (); | |
33a20d10 | 3934 | size = expr_size (from); |
921b3427 RK |
3935 | from_rtx = expand_expr (from, NULL_RTX, VOIDmode, |
3936 | EXPAND_MEMORY_USE_DONT); | |
3937 | ||
3938 | /* Copy the rights of the bitmap. */ | |
7d384cc0 | 3939 | if (current_function_check_memory_usage) |
ebb1b59a BS |
3940 | emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK, |
3941 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, | |
6a9c4aed | 3942 | XEXP (from_rtx, 0), Pmode, |
921b3427 RK |
3943 | convert_to_mode (TYPE_MODE (sizetype), |
3944 | size, TREE_UNSIGNED (sizetype)), | |
3945 | TYPE_MODE (sizetype)); | |
bbf6f052 RK |
3946 | |
3947 | #ifdef TARGET_MEM_FUNCTIONS | |
b215b52e | 3948 | emit_library_call (memmove_libfunc, LCT_NORMAL, |
bbf6f052 RK |
3949 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, |
3950 | XEXP (from_rtx, 0), Pmode, | |
0fa83258 RK |
3951 | convert_to_mode (TYPE_MODE (sizetype), |
3952 | size, TREE_UNSIGNED (sizetype)), | |
26ba80fc | 3953 | TYPE_MODE (sizetype)); |
bbf6f052 | 3954 | #else |
ebb1b59a | 3955 | emit_library_call (bcopy_libfunc, LCT_NORMAL, |
bbf6f052 RK |
3956 | VOIDmode, 3, XEXP (from_rtx, 0), Pmode, |
3957 | XEXP (to_rtx, 0), Pmode, | |
3b6f75e2 JW |
3958 | convert_to_mode (TYPE_MODE (integer_type_node), |
3959 | size, TREE_UNSIGNED (integer_type_node)), | |
3960 | TYPE_MODE (integer_type_node)); | |
bbf6f052 RK |
3961 | #endif |
3962 | ||
3963 | preserve_temp_slots (to_rtx); | |
3964 | free_temp_slots (); | |
0088fcb1 | 3965 | pop_temp_slots (); |
709f5be1 | 3966 | return want_value ? to_rtx : NULL_RTX; |
bbf6f052 RK |
3967 | } |
3968 | ||
3969 | /* Compute FROM and store the value in the rtx we got. */ | |
3970 | ||
0088fcb1 | 3971 | push_temp_slots (); |
bbf6f052 RK |
3972 | result = store_expr (from, to_rtx, want_value); |
3973 | preserve_temp_slots (result); | |
3974 | free_temp_slots (); | |
0088fcb1 | 3975 | pop_temp_slots (); |
709f5be1 | 3976 | return want_value ? result : NULL_RTX; |
bbf6f052 RK |
3977 | } |
3978 | ||
3979 | /* Generate code for computing expression EXP, | |
3980 | and storing the value into TARGET. | |
bbf6f052 RK |
3981 | TARGET may contain a QUEUED rtx. |
3982 | ||
709f5be1 RS |
3983 | If WANT_VALUE is nonzero, return a copy of the value |
3984 | not in TARGET, so that we can be sure to use the proper | |
3985 | value in a containing expression even if TARGET has something | |
3986 | else stored in it. If possible, we copy the value through a pseudo | |
3987 | and return that pseudo. Or, if the value is constant, we try to | |
3988 | return the constant. In some cases, we return a pseudo | |
3989 | copied *from* TARGET. | |
3990 | ||
3991 | If the mode is BLKmode then we may return TARGET itself. | |
3992 | It turns out that in BLKmode it doesn't cause a problem. | |
3993 | because C has no operators that could combine two different | |
3994 | assignments into the same BLKmode object with different values | |
3995 | with no sequence point. Will other languages need this to | |
3996 | be more thorough? | |
3997 | ||
3998 | If WANT_VALUE is 0, we return NULL, to make sure | |
3999 | to catch quickly any cases where the caller uses the value | |
4000 | and fails to set WANT_VALUE. */ | |
bbf6f052 RK |
4001 | |
4002 | rtx | |
709f5be1 | 4003 | store_expr (exp, target, want_value) |
bbf6f052 RK |
4004 | register tree exp; |
4005 | register rtx target; | |
709f5be1 | 4006 | int want_value; |
bbf6f052 RK |
4007 | { |
4008 | register rtx temp; | |
4009 | int dont_return_target = 0; | |
e5408e52 | 4010 | int dont_store_target = 0; |
bbf6f052 RK |
4011 | |
4012 | if (TREE_CODE (exp) == COMPOUND_EXPR) | |
4013 | { | |
4014 | /* Perform first part of compound expression, then assign from second | |
4015 | part. */ | |
4016 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
4017 | emit_queue (); | |
709f5be1 | 4018 | return store_expr (TREE_OPERAND (exp, 1), target, want_value); |
bbf6f052 RK |
4019 | } |
4020 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
4021 | { | |
4022 | /* For conditional expression, get safe form of the target. Then | |
4023 | test the condition, doing the appropriate assignment on either | |
4024 | side. This avoids the creation of unnecessary temporaries. | |
4025 | For non-BLKmode, it is more efficient not to do this. */ | |
4026 | ||
4027 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
4028 | ||
4029 | emit_queue (); | |
4030 | target = protect_from_queue (target, 1); | |
4031 | ||
dabf8373 | 4032 | do_pending_stack_adjust (); |
bbf6f052 RK |
4033 | NO_DEFER_POP; |
4034 | jumpifnot (TREE_OPERAND (exp, 0), lab1); | |
956d6950 | 4035 | start_cleanup_deferral (); |
709f5be1 | 4036 | store_expr (TREE_OPERAND (exp, 1), target, 0); |
956d6950 | 4037 | end_cleanup_deferral (); |
bbf6f052 RK |
4038 | emit_queue (); |
4039 | emit_jump_insn (gen_jump (lab2)); | |
4040 | emit_barrier (); | |
4041 | emit_label (lab1); | |
956d6950 | 4042 | start_cleanup_deferral (); |
709f5be1 | 4043 | store_expr (TREE_OPERAND (exp, 2), target, 0); |
956d6950 | 4044 | end_cleanup_deferral (); |
bbf6f052 RK |
4045 | emit_queue (); |
4046 | emit_label (lab2); | |
4047 | OK_DEFER_POP; | |
a3a58acc | 4048 | |
709f5be1 | 4049 | return want_value ? target : NULL_RTX; |
bbf6f052 | 4050 | } |
bbf6f052 | 4051 | else if (queued_subexp_p (target)) |
709f5be1 RS |
4052 | /* If target contains a postincrement, let's not risk |
4053 | using it as the place to generate the rhs. */ | |
bbf6f052 RK |
4054 | { |
4055 | if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode) | |
4056 | { | |
4057 | /* Expand EXP into a new pseudo. */ | |
4058 | temp = gen_reg_rtx (GET_MODE (target)); | |
4059 | temp = expand_expr (exp, temp, GET_MODE (target), 0); | |
4060 | } | |
4061 | else | |
906c4e36 | 4062 | temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0); |
709f5be1 RS |
4063 | |
4064 | /* If target is volatile, ANSI requires accessing the value | |
4065 | *from* the target, if it is accessed. So make that happen. | |
4066 | In no case return the target itself. */ | |
4067 | if (! MEM_VOLATILE_P (target) && want_value) | |
4068 | dont_return_target = 1; | |
bbf6f052 | 4069 | } |
12f06d17 CH |
4070 | else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target) |
4071 | && GET_MODE (target) != BLKmode) | |
4072 | /* If target is in memory and caller wants value in a register instead, | |
4073 | arrange that. Pass TARGET as target for expand_expr so that, | |
4074 | if EXP is another assignment, WANT_VALUE will be nonzero for it. | |
4075 | We know expand_expr will not use the target in that case. | |
4076 | Don't do this if TARGET is volatile because we are supposed | |
4077 | to write it and then read it. */ | |
4078 | { | |
1da93fe0 | 4079 | temp = expand_expr (exp, target, GET_MODE (target), 0); |
12f06d17 | 4080 | if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) |
e5408e52 JJ |
4081 | { |
4082 | /* If TEMP is already in the desired TARGET, only copy it from | |
4083 | memory and don't store it there again. */ | |
4084 | if (temp == target | |
4085 | || (rtx_equal_p (temp, target) | |
4086 | && ! side_effects_p (temp) && ! side_effects_p (target))) | |
4087 | dont_store_target = 1; | |
4088 | temp = copy_to_reg (temp); | |
4089 | } | |
12f06d17 CH |
4090 | dont_return_target = 1; |
4091 | } | |
1499e0a8 RK |
4092 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
4093 | /* If this is an scalar in a register that is stored in a wider mode | |
4094 | than the declared mode, compute the result into its declared mode | |
4095 | and then convert to the wider mode. Our value is the computed | |
4096 | expression. */ | |
4097 | { | |
5a32d038 | 4098 | /* If we don't want a value, we can do the conversion inside EXP, |
f635a84d RK |
4099 | which will often result in some optimizations. Do the conversion |
4100 | in two steps: first change the signedness, if needed, then | |
ab6c58f1 RK |
4101 | the extend. But don't do this if the type of EXP is a subtype |
4102 | of something else since then the conversion might involve | |
4103 | more than just converting modes. */ | |
4104 | if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
4105 | && TREE_TYPE (TREE_TYPE (exp)) == 0) | |
f635a84d RK |
4106 | { |
4107 | if (TREE_UNSIGNED (TREE_TYPE (exp)) | |
4108 | != SUBREG_PROMOTED_UNSIGNED_P (target)) | |
4109 | exp | |
4110 | = convert | |
4111 | (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target), | |
4112 | TREE_TYPE (exp)), | |
4113 | exp); | |
4114 | ||
4115 | exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)), | |
4116 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
4117 | exp); | |
4118 | } | |
3a94c984 | 4119 | |
1499e0a8 | 4120 | temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
b258707c | 4121 | |
766f36c7 | 4122 | /* If TEMP is a volatile MEM and we want a result value, make |
f29369b9 RK |
4123 | the access now so it gets done only once. Likewise if |
4124 | it contains TARGET. */ | |
4125 | if (GET_CODE (temp) == MEM && want_value | |
4126 | && (MEM_VOLATILE_P (temp) | |
4127 | || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0)))) | |
766f36c7 RK |
4128 | temp = copy_to_reg (temp); |
4129 | ||
b258707c RS |
4130 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
4131 | sure that we properly convert it. */ | |
4132 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
4133 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
4134 | TYPE_MODE (TREE_TYPE (exp)), temp, | |
4135 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4136 | ||
1499e0a8 RK |
4137 | convert_move (SUBREG_REG (target), temp, |
4138 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 JW |
4139 | |
4140 | /* If we promoted a constant, change the mode back down to match | |
4141 | target. Otherwise, the caller might get confused by a result whose | |
4142 | mode is larger than expected. */ | |
4143 | ||
4144 | if (want_value && GET_MODE (temp) != GET_MODE (target) | |
4145 | && GET_MODE (temp) != VOIDmode) | |
4146 | { | |
ddef6bc7 | 4147 | temp = gen_lowpart_SUBREG (GET_MODE (target), temp); |
3dbecef9 JW |
4148 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
4149 | SUBREG_PROMOTED_UNSIGNED_P (temp) | |
4150 | = SUBREG_PROMOTED_UNSIGNED_P (target); | |
4151 | } | |
4152 | ||
709f5be1 | 4153 | return want_value ? temp : NULL_RTX; |
1499e0a8 | 4154 | } |
bbf6f052 RK |
4155 | else |
4156 | { | |
4157 | temp = expand_expr (exp, target, GET_MODE (target), 0); | |
766f36c7 | 4158 | /* Return TARGET if it's a specified hardware register. |
709f5be1 RS |
4159 | If TARGET is a volatile mem ref, either return TARGET |
4160 | or return a reg copied *from* TARGET; ANSI requires this. | |
4161 | ||
4162 | Otherwise, if TEMP is not TARGET, return TEMP | |
4163 | if it is constant (for efficiency), | |
4164 | or if we really want the correct value. */ | |
bbf6f052 RK |
4165 | if (!(target && GET_CODE (target) == REG |
4166 | && REGNO (target) < FIRST_PSEUDO_REGISTER) | |
709f5be1 | 4167 | && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
effbcc6a | 4168 | && ! rtx_equal_p (temp, target) |
709f5be1 | 4169 | && (CONSTANT_P (temp) || want_value)) |
bbf6f052 RK |
4170 | dont_return_target = 1; |
4171 | } | |
4172 | ||
b258707c RS |
4173 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
4174 | the same as that of TARGET, adjust the constant. This is needed, for | |
4175 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
4176 | value. */ | |
4177 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
c1da1f33 | 4178 | && TREE_CODE (exp) != ERROR_MARK |
b258707c RS |
4179 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
4180 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4181 | temp, TREE_UNSIGNED (TREE_TYPE (exp))); | |
4182 | ||
7d384cc0 | 4183 | if (current_function_check_memory_usage |
921b3427 RK |
4184 | && GET_CODE (target) == MEM |
4185 | && AGGREGATE_TYPE_P (TREE_TYPE (exp))) | |
4186 | { | |
ea4da9db | 4187 | in_check_memory_usage = 1; |
921b3427 | 4188 | if (GET_CODE (temp) == MEM) |
ebb1b59a BS |
4189 | emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK, |
4190 | VOIDmode, 3, XEXP (target, 0), Pmode, | |
6a9c4aed | 4191 | XEXP (temp, 0), Pmode, |
921b3427 RK |
4192 | expr_size (exp), TYPE_MODE (sizetype)); |
4193 | else | |
ebb1b59a BS |
4194 | emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK, |
4195 | VOIDmode, 3, XEXP (target, 0), Pmode, | |
921b3427 | 4196 | expr_size (exp), TYPE_MODE (sizetype), |
3a94c984 | 4197 | GEN_INT (MEMORY_USE_WO), |
956d6950 | 4198 | TYPE_MODE (integer_type_node)); |
ea4da9db | 4199 | in_check_memory_usage = 0; |
921b3427 RK |
4200 | } |
4201 | ||
bbf6f052 RK |
4202 | /* If value was not generated in the target, store it there. |
4203 | Convert the value to TARGET's type first if nec. */ | |
f3f2255a R |
4204 | /* If TEMP and TARGET compare equal according to rtx_equal_p, but |
4205 | one or both of them are volatile memory refs, we have to distinguish | |
4206 | two cases: | |
4207 | - expand_expr has used TARGET. In this case, we must not generate | |
4208 | another copy. This can be detected by TARGET being equal according | |
4209 | to == . | |
4210 | - expand_expr has not used TARGET - that means that the source just | |
4211 | happens to have the same RTX form. Since temp will have been created | |
4212 | by expand_expr, it will compare unequal according to == . | |
4213 | We must generate a copy in this case, to reach the correct number | |
4214 | of volatile memory references. */ | |
bbf6f052 | 4215 | |
6036acbb | 4216 | if ((! rtx_equal_p (temp, target) |
f3f2255a R |
4217 | || (temp != target && (side_effects_p (temp) |
4218 | || side_effects_p (target)))) | |
e5408e52 JJ |
4219 | && TREE_CODE (exp) != ERROR_MARK |
4220 | && ! dont_store_target) | |
bbf6f052 RK |
4221 | { |
4222 | target = protect_from_queue (target, 1); | |
4223 | if (GET_MODE (temp) != GET_MODE (target) | |
f0348c25 | 4224 | && GET_MODE (temp) != VOIDmode) |
bbf6f052 RK |
4225 | { |
4226 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); | |
4227 | if (dont_return_target) | |
4228 | { | |
4229 | /* In this case, we will return TEMP, | |
4230 | so make sure it has the proper mode. | |
4231 | But don't forget to store the value into TARGET. */ | |
4232 | temp = convert_to_mode (GET_MODE (target), temp, unsignedp); | |
4233 | emit_move_insn (target, temp); | |
4234 | } | |
4235 | else | |
4236 | convert_move (target, temp, unsignedp); | |
4237 | } | |
4238 | ||
4239 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4240 | { | |
4241 | /* Handle copying a string constant into an array. | |
4242 | The string constant may be shorter than the array. | |
4243 | So copy just the string's actual length, and clear the rest. */ | |
4244 | rtx size; | |
22619c3f | 4245 | rtx addr; |
bbf6f052 | 4246 | |
e87b4f3f RS |
4247 | /* Get the size of the data type of the string, |
4248 | which is actually the size of the target. */ | |
4249 | size = expr_size (exp); | |
4250 | if (GET_CODE (size) == CONST_INT | |
4251 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) | |
19caa751 | 4252 | emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp))); |
e87b4f3f | 4253 | else |
bbf6f052 | 4254 | { |
e87b4f3f RS |
4255 | /* Compute the size of the data to copy from the string. */ |
4256 | tree copy_size | |
c03b7665 | 4257 | = size_binop (MIN_EXPR, |
b50d17a1 | 4258 | make_tree (sizetype, size), |
fed3cef0 | 4259 | size_int (TREE_STRING_LENGTH (exp))); |
f9e158c3 | 4260 | unsigned int align = TYPE_ALIGN (TREE_TYPE (exp)); |
906c4e36 RK |
4261 | rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX, |
4262 | VOIDmode, 0); | |
e87b4f3f RS |
4263 | rtx label = 0; |
4264 | ||
4265 | /* Copy that much. */ | |
4266 | emit_block_move (target, temp, copy_size_rtx, | |
19caa751 | 4267 | TYPE_ALIGN (TREE_TYPE (exp))); |
e87b4f3f | 4268 | |
88f63c77 RK |
4269 | /* Figure out how much is left in TARGET that we have to clear. |
4270 | Do all calculations in ptr_mode. */ | |
4271 | ||
4272 | addr = XEXP (target, 0); | |
4273 | addr = convert_modes (ptr_mode, Pmode, addr, 1); | |
4274 | ||
e87b4f3f RS |
4275 | if (GET_CODE (copy_size_rtx) == CONST_INT) |
4276 | { | |
88f63c77 | 4277 | addr = plus_constant (addr, TREE_STRING_LENGTH (exp)); |
3a94c984 | 4278 | size = plus_constant (size, -TREE_STRING_LENGTH (exp)); |
8752c357 AJ |
4279 | align = MIN (align, |
4280 | (unsigned int) (BITS_PER_UNIT | |
4281 | * (INTVAL (copy_size_rtx) | |
4282 | & - INTVAL (copy_size_rtx)))); | |
e87b4f3f RS |
4283 | } |
4284 | else | |
4285 | { | |
88f63c77 RK |
4286 | addr = force_reg (ptr_mode, addr); |
4287 | addr = expand_binop (ptr_mode, add_optab, addr, | |
906c4e36 RK |
4288 | copy_size_rtx, NULL_RTX, 0, |
4289 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4290 | |
88f63c77 | 4291 | size = expand_binop (ptr_mode, sub_optab, size, |
906c4e36 RK |
4292 | copy_size_rtx, NULL_RTX, 0, |
4293 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4294 | |
2a5b96fd | 4295 | align = BITS_PER_UNIT; |
e87b4f3f | 4296 | label = gen_label_rtx (); |
c5d5d461 JL |
4297 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
4298 | GET_MODE (size), 0, 0, label); | |
e87b4f3f | 4299 | } |
2a5b96fd | 4300 | align = MIN (align, expr_align (copy_size)); |
e87b4f3f RS |
4301 | |
4302 | if (size != const0_rtx) | |
4303 | { | |
3bdf5ad1 RK |
4304 | rtx dest = gen_rtx_MEM (BLKmode, addr); |
4305 | ||
4306 | MEM_COPY_ATTRIBUTES (dest, target); | |
4307 | ||
921b3427 | 4308 | /* Be sure we can write on ADDR. */ |
ea4da9db | 4309 | in_check_memory_usage = 1; |
7d384cc0 | 4310 | if (current_function_check_memory_usage) |
ebb1b59a BS |
4311 | emit_library_call (chkr_check_addr_libfunc, |
4312 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
6a9c4aed | 4313 | addr, Pmode, |
921b3427 | 4314 | size, TYPE_MODE (sizetype), |
3a94c984 | 4315 | GEN_INT (MEMORY_USE_WO), |
956d6950 | 4316 | TYPE_MODE (integer_type_node)); |
ea4da9db | 4317 | in_check_memory_usage = 0; |
051ffad5 | 4318 | clear_storage (dest, size, align); |
e87b4f3f | 4319 | } |
22619c3f | 4320 | |
e87b4f3f RS |
4321 | if (label) |
4322 | emit_label (label); | |
bbf6f052 RK |
4323 | } |
4324 | } | |
fffa9c1d JW |
4325 | /* Handle calls that return values in multiple non-contiguous locations. |
4326 | The Irix 6 ABI has examples of this. */ | |
4327 | else if (GET_CODE (target) == PARALLEL) | |
aac5cc16 | 4328 | emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)), |
19caa751 | 4329 | TYPE_ALIGN (TREE_TYPE (exp))); |
bbf6f052 RK |
4330 | else if (GET_MODE (temp) == BLKmode) |
4331 | emit_block_move (target, temp, expr_size (exp), | |
19caa751 | 4332 | TYPE_ALIGN (TREE_TYPE (exp))); |
bbf6f052 RK |
4333 | else |
4334 | emit_move_insn (target, temp); | |
4335 | } | |
709f5be1 | 4336 | |
766f36c7 RK |
4337 | /* If we don't want a value, return NULL_RTX. */ |
4338 | if (! want_value) | |
4339 | return NULL_RTX; | |
4340 | ||
4341 | /* If we are supposed to return TEMP, do so as long as it isn't a MEM. | |
4342 | ??? The latter test doesn't seem to make sense. */ | |
4343 | else if (dont_return_target && GET_CODE (temp) != MEM) | |
bbf6f052 | 4344 | return temp; |
766f36c7 RK |
4345 | |
4346 | /* Return TARGET itself if it is a hard register. */ | |
4347 | else if (want_value && GET_MODE (target) != BLKmode | |
4348 | && ! (GET_CODE (target) == REG | |
4349 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
709f5be1 | 4350 | return copy_to_reg (target); |
3a94c984 | 4351 | |
766f36c7 | 4352 | else |
709f5be1 | 4353 | return target; |
bbf6f052 RK |
4354 | } |
4355 | \f | |
9de08200 RK |
4356 | /* Return 1 if EXP just contains zeros. */ |
4357 | ||
4358 | static int | |
4359 | is_zeros_p (exp) | |
4360 | tree exp; | |
4361 | { | |
4362 | tree elt; | |
4363 | ||
4364 | switch (TREE_CODE (exp)) | |
4365 | { | |
4366 | case CONVERT_EXPR: | |
4367 | case NOP_EXPR: | |
4368 | case NON_LVALUE_EXPR: | |
4369 | return is_zeros_p (TREE_OPERAND (exp, 0)); | |
4370 | ||
4371 | case INTEGER_CST: | |
05bccae2 | 4372 | return integer_zerop (exp); |
9de08200 RK |
4373 | |
4374 | case COMPLEX_CST: | |
4375 | return | |
4376 | is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp)); | |
4377 | ||
4378 | case REAL_CST: | |
41c9120b | 4379 | return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0); |
9de08200 RK |
4380 | |
4381 | case CONSTRUCTOR: | |
e1a43f73 PB |
4382 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) |
4383 | return CONSTRUCTOR_ELTS (exp) == NULL_TREE; | |
9de08200 RK |
4384 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
4385 | if (! is_zeros_p (TREE_VALUE (elt))) | |
4386 | return 0; | |
4387 | ||
4388 | return 1; | |
3a94c984 | 4389 | |
e9a25f70 JL |
4390 | default: |
4391 | return 0; | |
9de08200 | 4392 | } |
9de08200 RK |
4393 | } |
4394 | ||
4395 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
4396 | ||
4397 | static int | |
4398 | mostly_zeros_p (exp) | |
4399 | tree exp; | |
4400 | { | |
9de08200 RK |
4401 | if (TREE_CODE (exp) == CONSTRUCTOR) |
4402 | { | |
e1a43f73 PB |
4403 | int elts = 0, zeros = 0; |
4404 | tree elt = CONSTRUCTOR_ELTS (exp); | |
4405 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) | |
4406 | { | |
4407 | /* If there are no ranges of true bits, it is all zero. */ | |
4408 | return elt == NULL_TREE; | |
4409 | } | |
4410 | for (; elt; elt = TREE_CHAIN (elt)) | |
4411 | { | |
4412 | /* We do not handle the case where the index is a RANGE_EXPR, | |
4413 | so the statistic will be somewhat inaccurate. | |
4414 | We do make a more accurate count in store_constructor itself, | |
4415 | so since this function is only used for nested array elements, | |
0f41302f | 4416 | this should be close enough. */ |
e1a43f73 PB |
4417 | if (mostly_zeros_p (TREE_VALUE (elt))) |
4418 | zeros++; | |
4419 | elts++; | |
4420 | } | |
9de08200 RK |
4421 | |
4422 | return 4 * zeros >= 3 * elts; | |
4423 | } | |
4424 | ||
4425 | return is_zeros_p (exp); | |
4426 | } | |
4427 | \f | |
e1a43f73 PB |
4428 | /* Helper function for store_constructor. |
4429 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
4430 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
c5c76735 | 4431 | ALIGN and CLEARED are as for store_constructor. |
23cb1766 | 4432 | ALIAS_SET is the alias set to use for any stores. |
23ccec44 JW |
4433 | |
4434 | This provides a recursive shortcut back to store_constructor when it isn't | |
4435 | necessary to go through store_field. This is so that we can pass through | |
4436 | the cleared field to let store_constructor know that we may not have to | |
4437 | clear a substructure if the outer structure has already been cleared. */ | |
e1a43f73 PB |
4438 | |
4439 | static void | |
4440 | store_constructor_field (target, bitsize, bitpos, | |
23cb1766 | 4441 | mode, exp, type, align, cleared, alias_set) |
e1a43f73 | 4442 | rtx target; |
770ae6cc RK |
4443 | unsigned HOST_WIDE_INT bitsize; |
4444 | HOST_WIDE_INT bitpos; | |
e1a43f73 PB |
4445 | enum machine_mode mode; |
4446 | tree exp, type; | |
729a2125 | 4447 | unsigned int align; |
e1a43f73 | 4448 | int cleared; |
23cb1766 | 4449 | int alias_set; |
e1a43f73 PB |
4450 | { |
4451 | if (TREE_CODE (exp) == CONSTRUCTOR | |
23ccec44 JW |
4452 | && bitpos % BITS_PER_UNIT == 0 |
4453 | /* If we have a non-zero bitpos for a register target, then we just | |
4454 | let store_field do the bitfield handling. This is unlikely to | |
4455 | generate unnecessary clear instructions anyways. */ | |
4456 | && (bitpos == 0 || GET_CODE (target) == MEM)) | |
e1a43f73 | 4457 | { |
126e5b0d | 4458 | if (bitpos != 0) |
ce64861e RK |
4459 | target |
4460 | = change_address (target, | |
4461 | GET_MODE (target) == BLKmode | |
4462 | || 0 != (bitpos | |
4463 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
4464 | ? BLKmode : VOIDmode, | |
4465 | plus_constant (XEXP (target, 0), | |
4466 | bitpos / BITS_PER_UNIT)); | |
23cb1766 | 4467 | |
e0339ef7 RK |
4468 | |
4469 | /* Show the alignment may no longer be what it was and update the alias | |
4470 | set, if required. */ | |
eeebb824 | 4471 | if (bitpos != 0) |
8752c357 | 4472 | align = MIN (align, (unsigned int) bitpos & - bitpos); |
832ea3b3 FS |
4473 | if (GET_CODE (target) == MEM) |
4474 | MEM_ALIAS_SET (target) = alias_set; | |
e0339ef7 | 4475 | |
b7010412 | 4476 | store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
4477 | } |
4478 | else | |
19caa751 | 4479 | store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align, |
23cb1766 | 4480 | int_size_in_bytes (type), alias_set); |
e1a43f73 PB |
4481 | } |
4482 | ||
bbf6f052 | 4483 | /* Store the value of constructor EXP into the rtx TARGET. |
e1a43f73 | 4484 | TARGET is either a REG or a MEM. |
19caa751 | 4485 | ALIGN is the maximum known alignment for TARGET. |
b7010412 RK |
4486 | CLEARED is true if TARGET is known to have been zero'd. |
4487 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
4488 | may not be the same as the size of EXP if we are assigning to a field | |
4489 | which has been packed to exclude padding bits. */ | |
bbf6f052 RK |
4490 | |
4491 | static void | |
b7010412 | 4492 | store_constructor (exp, target, align, cleared, size) |
bbf6f052 RK |
4493 | tree exp; |
4494 | rtx target; | |
729a2125 | 4495 | unsigned int align; |
e1a43f73 | 4496 | int cleared; |
13eb1f7f | 4497 | HOST_WIDE_INT size; |
bbf6f052 | 4498 | { |
4af3895e | 4499 | tree type = TREE_TYPE (exp); |
a5efcd63 | 4500 | #ifdef WORD_REGISTER_OPERATIONS |
13eb1f7f | 4501 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
a5efcd63 | 4502 | #endif |
4af3895e | 4503 | |
bbf6f052 RK |
4504 | /* We know our target cannot conflict, since safe_from_p has been called. */ |
4505 | #if 0 | |
4506 | /* Don't try copying piece by piece into a hard register | |
4507 | since that is vulnerable to being clobbered by EXP. | |
4508 | Instead, construct in a pseudo register and then copy it all. */ | |
4509 | if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER) | |
4510 | { | |
4511 | rtx temp = gen_reg_rtx (GET_MODE (target)); | |
7205485e | 4512 | store_constructor (exp, temp, align, cleared, size); |
bbf6f052 RK |
4513 | emit_move_insn (target, temp); |
4514 | return; | |
4515 | } | |
4516 | #endif | |
4517 | ||
e44842fe RK |
4518 | if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE |
4519 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
bbf6f052 RK |
4520 | { |
4521 | register tree elt; | |
4522 | ||
4af3895e | 4523 | /* Inform later passes that the whole union value is dead. */ |
dd1db5ec RK |
4524 | if ((TREE_CODE (type) == UNION_TYPE |
4525 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
4526 | && ! cleared) | |
a59f8640 R |
4527 | { |
4528 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); | |
4529 | ||
4530 | /* If the constructor is empty, clear the union. */ | |
4531 | if (! CONSTRUCTOR_ELTS (exp) && ! cleared) | |
19caa751 | 4532 | clear_storage (target, expr_size (exp), TYPE_ALIGN (type)); |
a59f8640 | 4533 | } |
4af3895e JVA |
4534 | |
4535 | /* If we are building a static constructor into a register, | |
4536 | set the initial value as zero so we can fold the value into | |
67225c15 RK |
4537 | a constant. But if more than one register is involved, |
4538 | this probably loses. */ | |
4539 | else if (GET_CODE (target) == REG && TREE_STATIC (exp) | |
4540 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) | |
9de08200 RK |
4541 | { |
4542 | if (! cleared) | |
e9a25f70 | 4543 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
4af3895e | 4544 | |
9de08200 RK |
4545 | cleared = 1; |
4546 | } | |
4547 | ||
4548 | /* If the constructor has fewer fields than the structure | |
4549 | or if we are initializing the structure to mostly zeros, | |
fcf1b822 RK |
4550 | clear the whole structure first. Don't do this is TARGET is |
4551 | register whose mode size isn't equal to SIZE since clear_storage | |
4552 | can't handle this case. */ | |
9376fcd6 RK |
4553 | else if (size > 0 |
4554 | && ((list_length (CONSTRUCTOR_ELTS (exp)) | |
c3b247b4 | 4555 | != fields_length (type)) |
fcf1b822 RK |
4556 | || mostly_zeros_p (exp)) |
4557 | && (GET_CODE (target) != REG | |
8752c357 | 4558 | || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size)) |
9de08200 RK |
4559 | { |
4560 | if (! cleared) | |
19caa751 | 4561 | clear_storage (target, GEN_INT (size), align); |
9de08200 RK |
4562 | |
4563 | cleared = 1; | |
4564 | } | |
dd1db5ec | 4565 | else if (! cleared) |
bbf6f052 | 4566 | /* Inform later passes that the old value is dead. */ |
38a448ca | 4567 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
bbf6f052 RK |
4568 | |
4569 | /* Store each element of the constructor into | |
4570 | the corresponding field of TARGET. */ | |
4571 | ||
4572 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) | |
4573 | { | |
4574 | register tree field = TREE_PURPOSE (elt); | |
c5c76735 | 4575 | #ifdef WORD_REGISTER_OPERATIONS |
34c73909 | 4576 | tree value = TREE_VALUE (elt); |
c5c76735 | 4577 | #endif |
bbf6f052 | 4578 | register enum machine_mode mode; |
770ae6cc RK |
4579 | HOST_WIDE_INT bitsize; |
4580 | HOST_WIDE_INT bitpos = 0; | |
bbf6f052 | 4581 | int unsignedp; |
770ae6cc | 4582 | tree offset; |
b50d17a1 | 4583 | rtx to_rtx = target; |
bbf6f052 | 4584 | |
f32fd778 RS |
4585 | /* Just ignore missing fields. |
4586 | We cleared the whole structure, above, | |
4587 | if any fields are missing. */ | |
4588 | if (field == 0) | |
4589 | continue; | |
4590 | ||
e1a43f73 PB |
4591 | if (cleared && is_zeros_p (TREE_VALUE (elt))) |
4592 | continue; | |
9de08200 | 4593 | |
770ae6cc RK |
4594 | if (host_integerp (DECL_SIZE (field), 1)) |
4595 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
14a774a9 RK |
4596 | else |
4597 | bitsize = -1; | |
4598 | ||
bbf6f052 RK |
4599 | unsignedp = TREE_UNSIGNED (field); |
4600 | mode = DECL_MODE (field); | |
4601 | if (DECL_BIT_FIELD (field)) | |
4602 | mode = VOIDmode; | |
4603 | ||
770ae6cc RK |
4604 | offset = DECL_FIELD_OFFSET (field); |
4605 | if (host_integerp (offset, 0) | |
4606 | && host_integerp (bit_position (field), 0)) | |
4607 | { | |
4608 | bitpos = int_bit_position (field); | |
4609 | offset = 0; | |
4610 | } | |
b50d17a1 | 4611 | else |
770ae6cc | 4612 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); |
3a94c984 | 4613 | |
b50d17a1 RK |
4614 | if (offset) |
4615 | { | |
4616 | rtx offset_rtx; | |
4617 | ||
4618 | if (contains_placeholder_p (offset)) | |
7fa96708 | 4619 | offset = build (WITH_RECORD_EXPR, sizetype, |
956d6950 | 4620 | offset, make_tree (TREE_TYPE (exp), target)); |
bbf6f052 | 4621 | |
b50d17a1 RK |
4622 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
4623 | if (GET_CODE (to_rtx) != MEM) | |
4624 | abort (); | |
4625 | ||
3a94c984 KH |
4626 | if (GET_MODE (offset_rtx) != ptr_mode) |
4627 | { | |
bd070e1a | 4628 | #ifdef POINTERS_EXTEND_UNSIGNED |
822a3443 | 4629 | offset_rtx = convert_memory_address (ptr_mode, offset_rtx); |
bd070e1a RH |
4630 | #else |
4631 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
4632 | #endif | |
4633 | } | |
4634 | ||
b50d17a1 RK |
4635 | to_rtx |
4636 | = change_address (to_rtx, VOIDmode, | |
38a448ca | 4637 | gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0), |
c5c76735 JL |
4638 | force_reg (ptr_mode, |
4639 | offset_rtx))); | |
7fa96708 | 4640 | align = DECL_OFFSET_ALIGN (field); |
b50d17a1 | 4641 | } |
c5c76735 | 4642 | |
cf04eb80 RK |
4643 | if (TREE_READONLY (field)) |
4644 | { | |
9151b3bf | 4645 | if (GET_CODE (to_rtx) == MEM) |
effbcc6a RK |
4646 | to_rtx = copy_rtx (to_rtx); |
4647 | ||
cf04eb80 RK |
4648 | RTX_UNCHANGING_P (to_rtx) = 1; |
4649 | } | |
4650 | ||
34c73909 R |
4651 | #ifdef WORD_REGISTER_OPERATIONS |
4652 | /* If this initializes a field that is smaller than a word, at the | |
4653 | start of a word, try to widen it to a full word. | |
4654 | This special case allows us to output C++ member function | |
4655 | initializations in a form that the optimizers can understand. */ | |
770ae6cc | 4656 | if (GET_CODE (target) == REG |
34c73909 R |
4657 | && bitsize < BITS_PER_WORD |
4658 | && bitpos % BITS_PER_WORD == 0 | |
4659 | && GET_MODE_CLASS (mode) == MODE_INT | |
4660 | && TREE_CODE (value) == INTEGER_CST | |
13eb1f7f RK |
4661 | && exp_size >= 0 |
4662 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
34c73909 R |
4663 | { |
4664 | tree type = TREE_TYPE (value); | |
4665 | if (TYPE_PRECISION (type) < BITS_PER_WORD) | |
4666 | { | |
4667 | type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type)); | |
4668 | value = convert (type, value); | |
4669 | } | |
4670 | if (BYTES_BIG_ENDIAN) | |
4671 | value | |
4672 | = fold (build (LSHIFT_EXPR, type, value, | |
4673 | build_int_2 (BITS_PER_WORD - bitsize, 0))); | |
4674 | bitsize = BITS_PER_WORD; | |
4675 | mode = word_mode; | |
4676 | } | |
4677 | #endif | |
c5c76735 | 4678 | store_constructor_field (to_rtx, bitsize, bitpos, mode, |
23cb1766 | 4679 | TREE_VALUE (elt), type, align, cleared, |
963a2a84 | 4680 | (DECL_NONADDRESSABLE_P (field) |
1ccfe3fa | 4681 | && GET_CODE (to_rtx) == MEM) |
23cb1766 RK |
4682 | ? MEM_ALIAS_SET (to_rtx) |
4683 | : get_alias_set (TREE_TYPE (field))); | |
bbf6f052 RK |
4684 | } |
4685 | } | |
4af3895e | 4686 | else if (TREE_CODE (type) == ARRAY_TYPE) |
bbf6f052 RK |
4687 | { |
4688 | register tree elt; | |
4689 | register int i; | |
e1a43f73 | 4690 | int need_to_clear; |
4af3895e | 4691 | tree domain = TYPE_DOMAIN (type); |
4af3895e | 4692 | tree elttype = TREE_TYPE (type); |
85f3d674 RK |
4693 | int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0) |
4694 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
4695 | HOST_WIDE_INT minelt; | |
4696 | HOST_WIDE_INT maxelt; | |
4697 | ||
4698 | /* If we have constant bounds for the range of the type, get them. */ | |
4699 | if (const_bounds_p) | |
4700 | { | |
4701 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
4702 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
4703 | } | |
bbf6f052 | 4704 | |
e1a43f73 | 4705 | /* If the constructor has fewer elements than the array, |
38e01259 | 4706 | clear the whole array first. Similarly if this is |
e1a43f73 PB |
4707 | static constructor of a non-BLKmode object. */ |
4708 | if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp))) | |
4709 | need_to_clear = 1; | |
4710 | else | |
4711 | { | |
4712 | HOST_WIDE_INT count = 0, zero_count = 0; | |
85f3d674 RK |
4713 | need_to_clear = ! const_bounds_p; |
4714 | ||
e1a43f73 PB |
4715 | /* This loop is a more accurate version of the loop in |
4716 | mostly_zeros_p (it handles RANGE_EXPR in an index). | |
4717 | It is also needed to check for missing elements. */ | |
4718 | for (elt = CONSTRUCTOR_ELTS (exp); | |
85f3d674 | 4719 | elt != NULL_TREE && ! need_to_clear; |
df0faff1 | 4720 | elt = TREE_CHAIN (elt)) |
e1a43f73 PB |
4721 | { |
4722 | tree index = TREE_PURPOSE (elt); | |
4723 | HOST_WIDE_INT this_node_count; | |
19caa751 | 4724 | |
e1a43f73 PB |
4725 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4726 | { | |
4727 | tree lo_index = TREE_OPERAND (index, 0); | |
4728 | tree hi_index = TREE_OPERAND (index, 1); | |
05bccae2 | 4729 | |
19caa751 RK |
4730 | if (! host_integerp (lo_index, 1) |
4731 | || ! host_integerp (hi_index, 1)) | |
e1a43f73 PB |
4732 | { |
4733 | need_to_clear = 1; | |
4734 | break; | |
4735 | } | |
19caa751 RK |
4736 | |
4737 | this_node_count = (tree_low_cst (hi_index, 1) | |
4738 | - tree_low_cst (lo_index, 1) + 1); | |
e1a43f73 PB |
4739 | } |
4740 | else | |
4741 | this_node_count = 1; | |
85f3d674 | 4742 | |
e1a43f73 PB |
4743 | count += this_node_count; |
4744 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4745 | zero_count += this_node_count; | |
4746 | } | |
85f3d674 | 4747 | |
8e958f70 | 4748 | /* Clear the entire array first if there are any missing elements, |
0f41302f | 4749 | or if the incidence of zero elements is >= 75%. */ |
85f3d674 RK |
4750 | if (! need_to_clear |
4751 | && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) | |
e1a43f73 PB |
4752 | need_to_clear = 1; |
4753 | } | |
85f3d674 | 4754 | |
9376fcd6 | 4755 | if (need_to_clear && size > 0) |
9de08200 RK |
4756 | { |
4757 | if (! cleared) | |
19caa751 | 4758 | clear_storage (target, GEN_INT (size), align); |
9de08200 RK |
4759 | cleared = 1; |
4760 | } | |
bbf6f052 RK |
4761 | else |
4762 | /* Inform later passes that the old value is dead. */ | |
38a448ca | 4763 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
bbf6f052 RK |
4764 | |
4765 | /* Store each element of the constructor into | |
4766 | the corresponding element of TARGET, determined | |
4767 | by counting the elements. */ | |
4768 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
4769 | elt; | |
4770 | elt = TREE_CHAIN (elt), i++) | |
4771 | { | |
4772 | register enum machine_mode mode; | |
19caa751 RK |
4773 | HOST_WIDE_INT bitsize; |
4774 | HOST_WIDE_INT bitpos; | |
bbf6f052 | 4775 | int unsignedp; |
e1a43f73 | 4776 | tree value = TREE_VALUE (elt); |
729a2125 | 4777 | unsigned int align = TYPE_ALIGN (TREE_TYPE (value)); |
03dc44a6 RS |
4778 | tree index = TREE_PURPOSE (elt); |
4779 | rtx xtarget = target; | |
bbf6f052 | 4780 | |
e1a43f73 PB |
4781 | if (cleared && is_zeros_p (value)) |
4782 | continue; | |
9de08200 | 4783 | |
bbf6f052 | 4784 | unsignedp = TREE_UNSIGNED (elttype); |
14a774a9 RK |
4785 | mode = TYPE_MODE (elttype); |
4786 | if (mode == BLKmode) | |
19caa751 RK |
4787 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) |
4788 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
4789 | : -1); | |
14a774a9 RK |
4790 | else |
4791 | bitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 4792 | |
e1a43f73 PB |
4793 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4794 | { | |
4795 | tree lo_index = TREE_OPERAND (index, 0); | |
4796 | tree hi_index = TREE_OPERAND (index, 1); | |
4797 | rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end; | |
4798 | struct nesting *loop; | |
05c0b405 PB |
4799 | HOST_WIDE_INT lo, hi, count; |
4800 | tree position; | |
e1a43f73 | 4801 | |
0f41302f | 4802 | /* If the range is constant and "small", unroll the loop. */ |
85f3d674 RK |
4803 | if (const_bounds_p |
4804 | && host_integerp (lo_index, 0) | |
19caa751 RK |
4805 | && host_integerp (hi_index, 0) |
4806 | && (lo = tree_low_cst (lo_index, 0), | |
4807 | hi = tree_low_cst (hi_index, 0), | |
05c0b405 PB |
4808 | count = hi - lo + 1, |
4809 | (GET_CODE (target) != MEM | |
4810 | || count <= 2 | |
19caa751 RK |
4811 | || (host_integerp (TYPE_SIZE (elttype), 1) |
4812 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
4813 | <= 40 * 8))))) | |
e1a43f73 | 4814 | { |
05c0b405 PB |
4815 | lo -= minelt; hi -= minelt; |
4816 | for (; lo <= hi; lo++) | |
e1a43f73 | 4817 | { |
19caa751 | 4818 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); |
23cb1766 RK |
4819 | store_constructor_field |
4820 | (target, bitsize, bitpos, mode, value, type, align, | |
4821 | cleared, | |
4822 | TYPE_NONALIASED_COMPONENT (type) | |
4823 | ? MEM_ALIAS_SET (target) : get_alias_set (elttype)); | |
e1a43f73 PB |
4824 | } |
4825 | } | |
4826 | else | |
4827 | { | |
4828 | hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0); | |
4829 | loop_top = gen_label_rtx (); | |
4830 | loop_end = gen_label_rtx (); | |
4831 | ||
4832 | unsignedp = TREE_UNSIGNED (domain); | |
4833 | ||
4834 | index = build_decl (VAR_DECL, NULL_TREE, domain); | |
4835 | ||
19e7881c | 4836 | index_r |
e1a43f73 PB |
4837 | = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), |
4838 | &unsignedp, 0)); | |
19e7881c | 4839 | SET_DECL_RTL (index, index_r); |
e1a43f73 PB |
4840 | if (TREE_CODE (value) == SAVE_EXPR |
4841 | && SAVE_EXPR_RTL (value) == 0) | |
4842 | { | |
0f41302f MS |
4843 | /* Make sure value gets expanded once before the |
4844 | loop. */ | |
e1a43f73 PB |
4845 | expand_expr (value, const0_rtx, VOIDmode, 0); |
4846 | emit_queue (); | |
4847 | } | |
4848 | store_expr (lo_index, index_r, 0); | |
4849 | loop = expand_start_loop (0); | |
4850 | ||
0f41302f | 4851 | /* Assign value to element index. */ |
fed3cef0 RK |
4852 | position |
4853 | = convert (ssizetype, | |
4854 | fold (build (MINUS_EXPR, TREE_TYPE (index), | |
4855 | index, TYPE_MIN_VALUE (domain)))); | |
4856 | position = size_binop (MULT_EXPR, position, | |
4857 | convert (ssizetype, | |
4858 | TYPE_SIZE_UNIT (elttype))); | |
4859 | ||
e1a43f73 | 4860 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); |
38a448ca | 4861 | addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx); |
e1a43f73 PB |
4862 | xtarget = change_address (target, mode, addr); |
4863 | if (TREE_CODE (value) == CONSTRUCTOR) | |
b7010412 RK |
4864 | store_constructor (value, xtarget, align, cleared, |
4865 | bitsize / BITS_PER_UNIT); | |
e1a43f73 PB |
4866 | else |
4867 | store_expr (value, xtarget, 0); | |
4868 | ||
4869 | expand_exit_loop_if_false (loop, | |
4870 | build (LT_EXPR, integer_type_node, | |
4871 | index, hi_index)); | |
4872 | ||
4873 | expand_increment (build (PREINCREMENT_EXPR, | |
4874 | TREE_TYPE (index), | |
7b8b9722 | 4875 | index, integer_one_node), 0, 0); |
e1a43f73 PB |
4876 | expand_end_loop (); |
4877 | emit_label (loop_end); | |
e1a43f73 PB |
4878 | } |
4879 | } | |
19caa751 RK |
4880 | else if ((index != 0 && ! host_integerp (index, 0)) |
4881 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
03dc44a6 | 4882 | { |
e1a43f73 | 4883 | rtx pos_rtx, addr; |
03dc44a6 RS |
4884 | tree position; |
4885 | ||
5b6c44ff | 4886 | if (index == 0) |
fed3cef0 | 4887 | index = ssize_int (1); |
5b6c44ff | 4888 | |
e1a43f73 | 4889 | if (minelt) |
fed3cef0 RK |
4890 | index = convert (ssizetype, |
4891 | fold (build (MINUS_EXPR, index, | |
4892 | TYPE_MIN_VALUE (domain)))); | |
19caa751 | 4893 | |
fed3cef0 RK |
4894 | position = size_binop (MULT_EXPR, index, |
4895 | convert (ssizetype, | |
4896 | TYPE_SIZE_UNIT (elttype))); | |
03dc44a6 | 4897 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); |
38a448ca | 4898 | addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx); |
03dc44a6 | 4899 | xtarget = change_address (target, mode, addr); |
e1a43f73 | 4900 | store_expr (value, xtarget, 0); |
03dc44a6 RS |
4901 | } |
4902 | else | |
4903 | { | |
4904 | if (index != 0) | |
19caa751 RK |
4905 | bitpos = ((tree_low_cst (index, 0) - minelt) |
4906 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
03dc44a6 | 4907 | else |
19caa751 RK |
4908 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); |
4909 | ||
c5c76735 | 4910 | store_constructor_field (target, bitsize, bitpos, mode, value, |
23cb1766 RK |
4911 | type, align, cleared, |
4912 | TYPE_NONALIASED_COMPONENT (type) | |
831ecbd4 | 4913 | && GET_CODE (target) == MEM |
23cb1766 RK |
4914 | ? MEM_ALIAS_SET (target) : |
4915 | get_alias_set (elttype)); | |
4916 | ||
03dc44a6 | 4917 | } |
bbf6f052 RK |
4918 | } |
4919 | } | |
19caa751 | 4920 | |
3a94c984 | 4921 | /* Set constructor assignments. */ |
071a6595 PB |
4922 | else if (TREE_CODE (type) == SET_TYPE) |
4923 | { | |
e1a43f73 | 4924 | tree elt = CONSTRUCTOR_ELTS (exp); |
19caa751 | 4925 | unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; |
071a6595 PB |
4926 | tree domain = TYPE_DOMAIN (type); |
4927 | tree domain_min, domain_max, bitlength; | |
4928 | ||
9faa82d8 | 4929 | /* The default implementation strategy is to extract the constant |
071a6595 PB |
4930 | parts of the constructor, use that to initialize the target, |
4931 | and then "or" in whatever non-constant ranges we need in addition. | |
4932 | ||
4933 | If a large set is all zero or all ones, it is | |
4934 | probably better to set it using memset (if available) or bzero. | |
4935 | Also, if a large set has just a single range, it may also be | |
4936 | better to first clear all the first clear the set (using | |
0f41302f | 4937 | bzero/memset), and set the bits we want. */ |
3a94c984 | 4938 | |
0f41302f | 4939 | /* Check for all zeros. */ |
9376fcd6 | 4940 | if (elt == NULL_TREE && size > 0) |
071a6595 | 4941 | { |
e1a43f73 | 4942 | if (!cleared) |
19caa751 | 4943 | clear_storage (target, GEN_INT (size), TYPE_ALIGN (type)); |
071a6595 PB |
4944 | return; |
4945 | } | |
4946 | ||
071a6595 PB |
4947 | domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); |
4948 | domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); | |
4949 | bitlength = size_binop (PLUS_EXPR, | |
fed3cef0 RK |
4950 | size_diffop (domain_max, domain_min), |
4951 | ssize_int (1)); | |
071a6595 | 4952 | |
19caa751 | 4953 | nbits = tree_low_cst (bitlength, 1); |
e1a43f73 PB |
4954 | |
4955 | /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that | |
4956 | are "complicated" (more than one range), initialize (the | |
3a94c984 | 4957 | constant parts) by copying from a constant. */ |
e1a43f73 PB |
4958 | if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD |
4959 | || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) | |
071a6595 | 4960 | { |
19caa751 | 4961 | unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); |
b4ee5a72 | 4962 | enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); |
0f41302f | 4963 | char *bit_buffer = (char *) alloca (nbits); |
b4ee5a72 | 4964 | HOST_WIDE_INT word = 0; |
19caa751 RK |
4965 | unsigned int bit_pos = 0; |
4966 | unsigned int ibit = 0; | |
4967 | unsigned int offset = 0; /* In bytes from beginning of set. */ | |
4968 | ||
e1a43f73 | 4969 | elt = get_set_constructor_bits (exp, bit_buffer, nbits); |
b4ee5a72 | 4970 | for (;;) |
071a6595 | 4971 | { |
b4ee5a72 PB |
4972 | if (bit_buffer[ibit]) |
4973 | { | |
b09f3348 | 4974 | if (BYTES_BIG_ENDIAN) |
b4ee5a72 PB |
4975 | word |= (1 << (set_word_size - 1 - bit_pos)); |
4976 | else | |
4977 | word |= 1 << bit_pos; | |
4978 | } | |
19caa751 | 4979 | |
b4ee5a72 PB |
4980 | bit_pos++; ibit++; |
4981 | if (bit_pos >= set_word_size || ibit == nbits) | |
071a6595 | 4982 | { |
e1a43f73 PB |
4983 | if (word != 0 || ! cleared) |
4984 | { | |
4985 | rtx datum = GEN_INT (word); | |
4986 | rtx to_rtx; | |
19caa751 | 4987 | |
0f41302f MS |
4988 | /* The assumption here is that it is safe to use |
4989 | XEXP if the set is multi-word, but not if | |
4990 | it's single-word. */ | |
e1a43f73 PB |
4991 | if (GET_CODE (target) == MEM) |
4992 | { | |
4993 | to_rtx = plus_constant (XEXP (target, 0), offset); | |
4994 | to_rtx = change_address (target, mode, to_rtx); | |
4995 | } | |
3a94c984 | 4996 | else if (offset == 0) |
e1a43f73 PB |
4997 | to_rtx = target; |
4998 | else | |
4999 | abort (); | |
5000 | emit_move_insn (to_rtx, datum); | |
5001 | } | |
19caa751 | 5002 | |
b4ee5a72 PB |
5003 | if (ibit == nbits) |
5004 | break; | |
5005 | word = 0; | |
5006 | bit_pos = 0; | |
5007 | offset += set_word_size / BITS_PER_UNIT; | |
071a6595 PB |
5008 | } |
5009 | } | |
071a6595 | 5010 | } |
e1a43f73 | 5011 | else if (!cleared) |
19caa751 RK |
5012 | /* Don't bother clearing storage if the set is all ones. */ |
5013 | if (TREE_CHAIN (elt) != NULL_TREE | |
5014 | || (TREE_PURPOSE (elt) == NULL_TREE | |
5015 | ? nbits != 1 | |
5016 | : ( ! host_integerp (TREE_VALUE (elt), 0) | |
5017 | || ! host_integerp (TREE_PURPOSE (elt), 0) | |
5018 | || (tree_low_cst (TREE_VALUE (elt), 0) | |
5019 | - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 | |
5020 | != (HOST_WIDE_INT) nbits)))) | |
5021 | clear_storage (target, expr_size (exp), TYPE_ALIGN (type)); | |
3a94c984 | 5022 | |
e1a43f73 | 5023 | for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) |
071a6595 | 5024 | { |
3a94c984 | 5025 | /* Start of range of element or NULL. */ |
071a6595 | 5026 | tree startbit = TREE_PURPOSE (elt); |
3a94c984 | 5027 | /* End of range of element, or element value. */ |
071a6595 | 5028 | tree endbit = TREE_VALUE (elt); |
381127e8 | 5029 | #ifdef TARGET_MEM_FUNCTIONS |
071a6595 | 5030 | HOST_WIDE_INT startb, endb; |
381127e8 | 5031 | #endif |
19caa751 | 5032 | rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; |
071a6595 PB |
5033 | |
5034 | bitlength_rtx = expand_expr (bitlength, | |
19caa751 | 5035 | NULL_RTX, MEM, EXPAND_CONST_ADDRESS); |
071a6595 | 5036 | |
3a94c984 | 5037 | /* Handle non-range tuple element like [ expr ]. */ |
071a6595 PB |
5038 | if (startbit == NULL_TREE) |
5039 | { | |
5040 | startbit = save_expr (endbit); | |
5041 | endbit = startbit; | |
5042 | } | |
19caa751 | 5043 | |
071a6595 PB |
5044 | startbit = convert (sizetype, startbit); |
5045 | endbit = convert (sizetype, endbit); | |
5046 | if (! integer_zerop (domain_min)) | |
5047 | { | |
5048 | startbit = size_binop (MINUS_EXPR, startbit, domain_min); | |
5049 | endbit = size_binop (MINUS_EXPR, endbit, domain_min); | |
5050 | } | |
3a94c984 | 5051 | startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, |
071a6595 | 5052 | EXPAND_CONST_ADDRESS); |
3a94c984 | 5053 | endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, |
071a6595 PB |
5054 | EXPAND_CONST_ADDRESS); |
5055 | ||
5056 | if (REG_P (target)) | |
5057 | { | |
1da68f56 RK |
5058 | targetx |
5059 | = assign_temp | |
5060 | ((build_qualified_type (type_for_mode (GET_MODE (target), 0), | |
5061 | TYPE_QUAL_CONST)), | |
5062 | 0, 1, 1); | |
071a6595 PB |
5063 | emit_move_insn (targetx, target); |
5064 | } | |
19caa751 | 5065 | |
071a6595 PB |
5066 | else if (GET_CODE (target) == MEM) |
5067 | targetx = target; | |
5068 | else | |
5069 | abort (); | |
5070 | ||
5071 | #ifdef TARGET_MEM_FUNCTIONS | |
5072 | /* Optimization: If startbit and endbit are | |
9faa82d8 | 5073 | constants divisible by BITS_PER_UNIT, |
0f41302f | 5074 | call memset instead. */ |
071a6595 PB |
5075 | if (TREE_CODE (startbit) == INTEGER_CST |
5076 | && TREE_CODE (endbit) == INTEGER_CST | |
5077 | && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 | |
e1a43f73 | 5078 | && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) |
071a6595 | 5079 | { |
ebb1b59a | 5080 | emit_library_call (memset_libfunc, LCT_NORMAL, |
071a6595 | 5081 | VOIDmode, 3, |
e1a43f73 PB |
5082 | plus_constant (XEXP (targetx, 0), |
5083 | startb / BITS_PER_UNIT), | |
071a6595 | 5084 | Pmode, |
3b6f75e2 | 5085 | constm1_rtx, TYPE_MODE (integer_type_node), |
071a6595 | 5086 | GEN_INT ((endb - startb) / BITS_PER_UNIT), |
3b6f75e2 | 5087 | TYPE_MODE (sizetype)); |
071a6595 PB |
5088 | } |
5089 | else | |
5090 | #endif | |
19caa751 | 5091 | emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), |
ebb1b59a BS |
5092 | LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0), |
5093 | Pmode, bitlength_rtx, TYPE_MODE (sizetype), | |
19caa751 RK |
5094 | startbit_rtx, TYPE_MODE (sizetype), |
5095 | endbit_rtx, TYPE_MODE (sizetype)); | |
5096 | ||
071a6595 PB |
5097 | if (REG_P (target)) |
5098 | emit_move_insn (target, targetx); | |
5099 | } | |
5100 | } | |
bbf6f052 RK |
5101 | |
5102 | else | |
5103 | abort (); | |
5104 | } | |
5105 | ||
5106 | /* Store the value of EXP (an expression tree) | |
5107 | into a subfield of TARGET which has mode MODE and occupies | |
5108 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
5109 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
5110 | ||
5111 | If VALUE_MODE is VOIDmode, return nothing in particular. | |
5112 | UNSIGNEDP is not used in this case. | |
5113 | ||
5114 | Otherwise, return an rtx for the value stored. This rtx | |
5115 | has mode VALUE_MODE if that is convenient to do. | |
5116 | In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. | |
5117 | ||
19caa751 | 5118 | ALIGN is the alignment that TARGET is known to have. |
3a94c984 | 5119 | TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. |
ece32014 MM |
5120 | |
5121 | ALIAS_SET is the alias set for the destination. This value will | |
5122 | (in general) be different from that for TARGET, since TARGET is a | |
5123 | reference to the containing structure. */ | |
bbf6f052 RK |
5124 | |
5125 | static rtx | |
5126 | store_field (target, bitsize, bitpos, mode, exp, value_mode, | |
ece32014 | 5127 | unsignedp, align, total_size, alias_set) |
bbf6f052 | 5128 | rtx target; |
770ae6cc RK |
5129 | HOST_WIDE_INT bitsize; |
5130 | HOST_WIDE_INT bitpos; | |
bbf6f052 RK |
5131 | enum machine_mode mode; |
5132 | tree exp; | |
5133 | enum machine_mode value_mode; | |
5134 | int unsignedp; | |
729a2125 | 5135 | unsigned int align; |
770ae6cc | 5136 | HOST_WIDE_INT total_size; |
ece32014 | 5137 | int alias_set; |
bbf6f052 | 5138 | { |
906c4e36 | 5139 | HOST_WIDE_INT width_mask = 0; |
bbf6f052 | 5140 | |
e9a25f70 JL |
5141 | if (TREE_CODE (exp) == ERROR_MARK) |
5142 | return const0_rtx; | |
5143 | ||
2be6a7e9 RK |
5144 | /* If we have nothing to store, do nothing unless the expression has |
5145 | side-effects. */ | |
5146 | if (bitsize == 0) | |
5147 | return expand_expr (exp, const0_rtx, VOIDmode, 0); | |
5148 | ||
906c4e36 RK |
5149 | if (bitsize < HOST_BITS_PER_WIDE_INT) |
5150 | width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; | |
bbf6f052 RK |
5151 | |
5152 | /* If we are storing into an unaligned field of an aligned union that is | |
5153 | in a register, we may have the mode of TARGET being an integer mode but | |
5154 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5155 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5156 | the store if the field being stored is the entire width of TARGET). Then | |
5157 | call ourselves recursively to store the field into a BLKmode version of | |
5158 | that object. Finally, load from the object into TARGET. This is not | |
5159 | very efficient in general, but should only be slightly more expensive | |
5160 | than the otherwise-required unaligned accesses. Perhaps this can be | |
5161 | cleaned up later. */ | |
5162 | ||
5163 | if (mode == BLKmode | |
5164 | && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG)) | |
5165 | { | |
1da68f56 RK |
5166 | rtx object |
5167 | = assign_temp | |
5168 | (build_qualified_type (type_for_mode (GET_MODE (target), 0), | |
5169 | TYPE_QUAL_CONST), | |
5170 | 0, 1, 1); | |
bbf6f052 RK |
5171 | rtx blk_object = copy_rtx (object); |
5172 | ||
5173 | PUT_MODE (blk_object, BLKmode); | |
5174 | ||
8752c357 | 5175 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
bbf6f052 RK |
5176 | emit_move_insn (object, target); |
5177 | ||
5178 | store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, | |
ece32014 | 5179 | align, total_size, alias_set); |
bbf6f052 | 5180 | |
46093b97 RS |
5181 | /* Even though we aren't returning target, we need to |
5182 | give it the updated value. */ | |
bbf6f052 RK |
5183 | emit_move_insn (target, object); |
5184 | ||
46093b97 | 5185 | return blk_object; |
bbf6f052 | 5186 | } |
c3b247b4 JM |
5187 | |
5188 | if (GET_CODE (target) == CONCAT) | |
5189 | { | |
5190 | /* We're storing into a struct containing a single __complex. */ | |
5191 | ||
5192 | if (bitpos != 0) | |
5193 | abort (); | |
5194 | return store_expr (exp, target, 0); | |
5195 | } | |
bbf6f052 RK |
5196 | |
5197 | /* If the structure is in a register or if the component | |
5198 | is a bit field, we cannot use addressing to access it. | |
5199 | Use bit-field techniques or SUBREG to store in it. */ | |
5200 | ||
4fa52007 | 5201 | if (mode == VOIDmode |
6ab06cbb JW |
5202 | || (mode != BLKmode && ! direct_store[(int) mode] |
5203 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5204 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
4fa52007 | 5205 | || GET_CODE (target) == REG |
c980ac49 | 5206 | || GET_CODE (target) == SUBREG |
ccc98036 RS |
5207 | /* If the field isn't aligned enough to store as an ordinary memref, |
5208 | store it as a bit field. */ | |
e1565e65 | 5209 | || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align) |
19caa751 | 5210 | && (align < GET_MODE_ALIGNMENT (mode) |
14a774a9 | 5211 | || bitpos % GET_MODE_ALIGNMENT (mode))) |
e1565e65 | 5212 | || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align) |
19caa751 | 5213 | && (TYPE_ALIGN (TREE_TYPE (exp)) > align |
14a774a9 RK |
5214 | || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0)) |
5215 | /* If the RHS and field are a constant size and the size of the | |
5216 | RHS isn't the same size as the bitfield, we must use bitfield | |
5217 | operations. */ | |
05bccae2 RK |
5218 | || (bitsize >= 0 |
5219 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
5220 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) | |
bbf6f052 | 5221 | { |
906c4e36 | 5222 | rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
bbd6cf73 | 5223 | |
ef19912d RK |
5224 | /* If BITSIZE is narrower than the size of the type of EXP |
5225 | we will be narrowing TEMP. Normally, what's wanted are the | |
5226 | low-order bits. However, if EXP's type is a record and this is | |
5227 | big-endian machine, we want the upper BITSIZE bits. */ | |
5228 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
5229 | && bitsize < GET_MODE_BITSIZE (GET_MODE (temp)) | |
5230 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) | |
5231 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5232 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5233 | - bitsize), | |
5234 | temp, 1); | |
5235 | ||
bbd6cf73 RK |
5236 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5237 | MODE. */ | |
5238 | if (mode != VOIDmode && mode != BLKmode | |
5239 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5240 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5241 | ||
a281e72d RK |
5242 | /* If the modes of TARGET and TEMP are both BLKmode, both |
5243 | must be in memory and BITPOS must be aligned on a byte | |
5244 | boundary. If so, we simply do a block copy. */ | |
5245 | if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) | |
5246 | { | |
19caa751 | 5247 | unsigned int exp_align = expr_align (exp); |
729a2125 | 5248 | |
a281e72d RK |
5249 | if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM |
5250 | || bitpos % BITS_PER_UNIT != 0) | |
5251 | abort (); | |
5252 | ||
0086427c RK |
5253 | target = change_address (target, VOIDmode, |
5254 | plus_constant (XEXP (target, 0), | |
a281e72d RK |
5255 | bitpos / BITS_PER_UNIT)); |
5256 | ||
729a2125 RK |
5257 | /* Make sure that ALIGN is no stricter than the alignment of EXP. */ |
5258 | align = MIN (exp_align, align); | |
c297a34e | 5259 | |
14a774a9 | 5260 | /* Find an alignment that is consistent with the bit position. */ |
19caa751 | 5261 | while ((bitpos % align) != 0) |
14a774a9 RK |
5262 | align >>= 1; |
5263 | ||
a281e72d | 5264 | emit_block_move (target, temp, |
bd5dab53 RK |
5265 | bitsize == -1 ? expr_size (exp) |
5266 | : GEN_INT ((bitsize + BITS_PER_UNIT - 1) | |
5267 | / BITS_PER_UNIT), | |
14a774a9 | 5268 | align); |
a281e72d RK |
5269 | |
5270 | return value_mode == VOIDmode ? const0_rtx : target; | |
5271 | } | |
5272 | ||
bbf6f052 RK |
5273 | /* Store the value in the bitfield. */ |
5274 | store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size); | |
5275 | if (value_mode != VOIDmode) | |
5276 | { | |
5277 | /* The caller wants an rtx for the value. */ | |
5278 | /* If possible, avoid refetching from the bitfield itself. */ | |
5279 | if (width_mask != 0 | |
5280 | && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))) | |
5c4d7cfb | 5281 | { |
9074de27 | 5282 | tree count; |
5c4d7cfb | 5283 | enum machine_mode tmode; |
86a2c12a | 5284 | |
5c4d7cfb | 5285 | if (unsignedp) |
69107307 AO |
5286 | return expand_and (temp, |
5287 | GEN_INT | |
5288 | (trunc_int_for_mode | |
5289 | (width_mask, | |
5290 | GET_MODE (temp) == VOIDmode | |
5291 | ? value_mode | |
5292 | : GET_MODE (temp))), NULL_RTX); | |
5c4d7cfb | 5293 | tmode = GET_MODE (temp); |
86a2c12a RS |
5294 | if (tmode == VOIDmode) |
5295 | tmode = value_mode; | |
5c4d7cfb RS |
5296 | count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0); |
5297 | temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5298 | return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5299 | } | |
bbf6f052 | 5300 | return extract_bit_field (target, bitsize, bitpos, unsignedp, |
906c4e36 RK |
5301 | NULL_RTX, value_mode, 0, align, |
5302 | total_size); | |
bbf6f052 RK |
5303 | } |
5304 | return const0_rtx; | |
5305 | } | |
5306 | else | |
5307 | { | |
5308 | rtx addr = XEXP (target, 0); | |
5309 | rtx to_rtx; | |
5310 | ||
5311 | /* If a value is wanted, it must be the lhs; | |
5312 | so make the address stable for multiple use. */ | |
5313 | ||
5314 | if (value_mode != VOIDmode && GET_CODE (addr) != REG | |
5315 | && ! CONSTANT_ADDRESS_P (addr) | |
5316 | /* A frame-pointer reference is already stable. */ | |
5317 | && ! (GET_CODE (addr) == PLUS | |
5318 | && GET_CODE (XEXP (addr, 1)) == CONST_INT | |
5319 | && (XEXP (addr, 0) == virtual_incoming_args_rtx | |
5320 | || XEXP (addr, 0) == virtual_stack_vars_rtx))) | |
5321 | addr = copy_to_reg (addr); | |
5322 | ||
5323 | /* Now build a reference to just the desired component. */ | |
5324 | ||
effbcc6a RK |
5325 | to_rtx = copy_rtx (change_address (target, mode, |
5326 | plus_constant (addr, | |
5327 | (bitpos | |
5328 | / BITS_PER_UNIT)))); | |
c6df88cb | 5329 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
0ea834c1 MM |
5330 | /* If the address of the structure varies, then it might be on |
5331 | the stack. And, stack slots may be shared across scopes. | |
5332 | So, two different structures, of different types, can end up | |
5333 | at the same location. We will give the structures alias set | |
5334 | zero; here we must be careful not to give non-zero alias sets | |
5335 | to their fields. */ | |
5336 | if (!rtx_varies_p (addr, /*for_alias=*/0)) | |
5337 | MEM_ALIAS_SET (to_rtx) = alias_set; | |
5338 | else | |
5339 | MEM_ALIAS_SET (to_rtx) = 0; | |
bbf6f052 RK |
5340 | |
5341 | return store_expr (exp, to_rtx, value_mode != VOIDmode); | |
5342 | } | |
5343 | } | |
5344 | \f | |
5345 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
b4e3fabb RK |
5346 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
5347 | codes and find the ultimate containing object, which we return. | |
bbf6f052 RK |
5348 | |
5349 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
5350 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
7bb0943f RS |
5351 | If the position of the field is variable, we store a tree |
5352 | giving the variable offset (in units) in *POFFSET. | |
5353 | This offset is in addition to the bit position. | |
5354 | If the position is not variable, we store 0 in *POFFSET. | |
19caa751 | 5355 | We set *PALIGNMENT to the alignment of the address that will be |
839c4796 RK |
5356 | computed. This is the alignment of the thing we return if *POFFSET |
5357 | is zero, but can be more less strictly aligned if *POFFSET is nonzero. | |
bbf6f052 RK |
5358 | |
5359 | If any of the extraction expressions is volatile, | |
5360 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
5361 | ||
5362 | If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it | |
5363 | is a mode that can be used to access the field. In that case, *PBITSIZE | |
e7c33f54 RK |
5364 | is redundant. |
5365 | ||
5366 | If the field describes a variable-sized object, *PMODE is set to | |
5367 | VOIDmode and *PBITSIZE is set to -1. An access cannot be made in | |
839c4796 | 5368 | this case, but the address of the object can be found. */ |
bbf6f052 RK |
5369 | |
5370 | tree | |
4969d05d | 5371 | get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, |
839c4796 | 5372 | punsignedp, pvolatilep, palignment) |
bbf6f052 | 5373 | tree exp; |
770ae6cc RK |
5374 | HOST_WIDE_INT *pbitsize; |
5375 | HOST_WIDE_INT *pbitpos; | |
7bb0943f | 5376 | tree *poffset; |
bbf6f052 RK |
5377 | enum machine_mode *pmode; |
5378 | int *punsignedp; | |
5379 | int *pvolatilep; | |
729a2125 | 5380 | unsigned int *palignment; |
bbf6f052 RK |
5381 | { |
5382 | tree size_tree = 0; | |
5383 | enum machine_mode mode = VOIDmode; | |
fed3cef0 | 5384 | tree offset = size_zero_node; |
770ae6cc | 5385 | tree bit_offset = bitsize_zero_node; |
c84e2712 | 5386 | unsigned int alignment = BIGGEST_ALIGNMENT; |
770ae6cc | 5387 | tree tem; |
bbf6f052 | 5388 | |
770ae6cc RK |
5389 | /* First get the mode, signedness, and size. We do this from just the |
5390 | outermost expression. */ | |
bbf6f052 RK |
5391 | if (TREE_CODE (exp) == COMPONENT_REF) |
5392 | { | |
5393 | size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | |
5394 | if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) | |
5395 | mode = DECL_MODE (TREE_OPERAND (exp, 1)); | |
770ae6cc | 5396 | |
bbf6f052 RK |
5397 | *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1)); |
5398 | } | |
5399 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
5400 | { | |
5401 | size_tree = TREE_OPERAND (exp, 1); | |
5402 | *punsignedp = TREE_UNSIGNED (exp); | |
5403 | } | |
5404 | else | |
5405 | { | |
5406 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
770ae6cc RK |
5407 | *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); |
5408 | ||
ab87f8c8 JL |
5409 | if (mode == BLKmode) |
5410 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
770ae6cc RK |
5411 | else |
5412 | *pbitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 5413 | } |
3a94c984 | 5414 | |
770ae6cc | 5415 | if (size_tree != 0) |
bbf6f052 | 5416 | { |
770ae6cc | 5417 | if (! host_integerp (size_tree, 1)) |
e7c33f54 RK |
5418 | mode = BLKmode, *pbitsize = -1; |
5419 | else | |
770ae6cc | 5420 | *pbitsize = tree_low_cst (size_tree, 1); |
bbf6f052 RK |
5421 | } |
5422 | ||
5423 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
5424 | and find the ultimate containing object. */ | |
bbf6f052 RK |
5425 | while (1) |
5426 | { | |
770ae6cc RK |
5427 | if (TREE_CODE (exp) == BIT_FIELD_REF) |
5428 | bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); | |
5429 | else if (TREE_CODE (exp) == COMPONENT_REF) | |
bbf6f052 | 5430 | { |
770ae6cc RK |
5431 | tree field = TREE_OPERAND (exp, 1); |
5432 | tree this_offset = DECL_FIELD_OFFSET (field); | |
bbf6f052 | 5433 | |
e7f3c83f RK |
5434 | /* If this field hasn't been filled in yet, don't go |
5435 | past it. This should only happen when folding expressions | |
5436 | made during type construction. */ | |
770ae6cc | 5437 | if (this_offset == 0) |
e7f3c83f | 5438 | break; |
770ae6cc RK |
5439 | else if (! TREE_CONSTANT (this_offset) |
5440 | && contains_placeholder_p (this_offset)) | |
5441 | this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp); | |
e7f3c83f | 5442 | |
7156dead | 5443 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
770ae6cc RK |
5444 | bit_offset = size_binop (PLUS_EXPR, bit_offset, |
5445 | DECL_FIELD_BIT_OFFSET (field)); | |
e6d8c385 | 5446 | |
770ae6cc RK |
5447 | if (! host_integerp (offset, 0)) |
5448 | alignment = MIN (alignment, DECL_OFFSET_ALIGN (field)); | |
bbf6f052 | 5449 | } |
7156dead | 5450 | |
b4e3fabb RK |
5451 | else if (TREE_CODE (exp) == ARRAY_REF |
5452 | || TREE_CODE (exp) == ARRAY_RANGE_REF) | |
bbf6f052 | 5453 | { |
742920c7 | 5454 | tree index = TREE_OPERAND (exp, 1); |
b4e3fabb RK |
5455 | tree array = TREE_OPERAND (exp, 0); |
5456 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
770ae6cc | 5457 | tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0); |
b4e3fabb | 5458 | tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array))); |
742920c7 | 5459 | |
770ae6cc RK |
5460 | /* We assume all arrays have sizes that are a multiple of a byte. |
5461 | First subtract the lower bound, if any, in the type of the | |
5462 | index, then convert to sizetype and multiply by the size of the | |
5463 | array element. */ | |
5464 | if (low_bound != 0 && ! integer_zerop (low_bound)) | |
5465 | index = fold (build (MINUS_EXPR, TREE_TYPE (index), | |
5466 | index, low_bound)); | |
f8dac6eb | 5467 | |
7156dead RK |
5468 | /* If the index has a self-referential type, pass it to a |
5469 | WITH_RECORD_EXPR; if the component size is, pass our | |
5470 | component to one. */ | |
770ae6cc RK |
5471 | if (! TREE_CONSTANT (index) |
5472 | && contains_placeholder_p (index)) | |
5473 | index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp); | |
7156dead RK |
5474 | if (! TREE_CONSTANT (unit_size) |
5475 | && contains_placeholder_p (unit_size)) | |
b4e3fabb | 5476 | unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array); |
742920c7 | 5477 | |
770ae6cc RK |
5478 | offset = size_binop (PLUS_EXPR, offset, |
5479 | size_binop (MULT_EXPR, | |
5480 | convert (sizetype, index), | |
7156dead | 5481 | unit_size)); |
bbf6f052 | 5482 | } |
7156dead | 5483 | |
bbf6f052 RK |
5484 | else if (TREE_CODE (exp) != NON_LVALUE_EXPR |
5485 | && ! ((TREE_CODE (exp) == NOP_EXPR | |
5486 | || TREE_CODE (exp) == CONVERT_EXPR) | |
5487 | && (TYPE_MODE (TREE_TYPE (exp)) | |
5488 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) | |
5489 | break; | |
7bb0943f RS |
5490 | |
5491 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
5492 | if (TREE_THIS_VOLATILE (exp)) | |
5493 | *pvolatilep = 1; | |
839c4796 RK |
5494 | |
5495 | /* If the offset is non-constant already, then we can't assume any | |
5496 | alignment more than the alignment here. */ | |
770ae6cc | 5497 | if (! TREE_CONSTANT (offset)) |
839c4796 RK |
5498 | alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); |
5499 | ||
bbf6f052 RK |
5500 | exp = TREE_OPERAND (exp, 0); |
5501 | } | |
5502 | ||
2f939d94 | 5503 | if (DECL_P (exp)) |
839c4796 | 5504 | alignment = MIN (alignment, DECL_ALIGN (exp)); |
9293498f | 5505 | else if (TREE_TYPE (exp) != 0) |
839c4796 RK |
5506 | alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp))); |
5507 | ||
770ae6cc RK |
5508 | /* If OFFSET is constant, see if we can return the whole thing as a |
5509 | constant bit position. Otherwise, split it up. */ | |
5510 | if (host_integerp (offset, 0) | |
5511 | && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), | |
5512 | bitsize_unit_node)) | |
5513 | && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) | |
5514 | && host_integerp (tem, 0)) | |
5515 | *pbitpos = tree_low_cst (tem, 0), *poffset = 0; | |
5516 | else | |
5517 | *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; | |
b50d17a1 | 5518 | |
bbf6f052 | 5519 | *pmode = mode; |
19caa751 | 5520 | *palignment = alignment; |
bbf6f052 RK |
5521 | return exp; |
5522 | } | |
921b3427 RK |
5523 | |
5524 | /* Subroutine of expand_exp: compute memory_usage from modifier. */ | |
770ae6cc | 5525 | |
921b3427 RK |
5526 | static enum memory_use_mode |
5527 | get_memory_usage_from_modifier (modifier) | |
5528 | enum expand_modifier modifier; | |
5529 | { | |
5530 | switch (modifier) | |
5531 | { | |
5532 | case EXPAND_NORMAL: | |
e5e809f4 | 5533 | case EXPAND_SUM: |
921b3427 RK |
5534 | return MEMORY_USE_RO; |
5535 | break; | |
5536 | case EXPAND_MEMORY_USE_WO: | |
5537 | return MEMORY_USE_WO; | |
5538 | break; | |
5539 | case EXPAND_MEMORY_USE_RW: | |
5540 | return MEMORY_USE_RW; | |
5541 | break; | |
921b3427 | 5542 | case EXPAND_MEMORY_USE_DONT: |
e5e809f4 JL |
5543 | /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into |
5544 | MEMORY_USE_DONT, because they are modifiers to a call of | |
5545 | expand_expr in the ADDR_EXPR case of expand_expr. */ | |
921b3427 | 5546 | case EXPAND_CONST_ADDRESS: |
e5e809f4 | 5547 | case EXPAND_INITIALIZER: |
921b3427 RK |
5548 | return MEMORY_USE_DONT; |
5549 | case EXPAND_MEMORY_USE_BAD: | |
5550 | default: | |
5551 | abort (); | |
5552 | } | |
5553 | } | |
bbf6f052 | 5554 | \f |
3fe44edd RK |
5555 | /* Given an rtx VALUE that may contain additions and multiplications, return |
5556 | an equivalent value that just refers to a register, memory, or constant. | |
5557 | This is done by generating instructions to perform the arithmetic and | |
5558 | returning a pseudo-register containing the value. | |
c45a13a6 RK |
5559 | |
5560 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
bbf6f052 RK |
5561 | |
5562 | rtx | |
5563 | force_operand (value, target) | |
5564 | rtx value, target; | |
5565 | { | |
5566 | register optab binoptab = 0; | |
5567 | /* Use a temporary to force order of execution of calls to | |
5568 | `force_operand'. */ | |
5569 | rtx tmp; | |
5570 | register rtx op2; | |
5571 | /* Use subtarget as the target for operand 0 of a binary operation. */ | |
296b4ed9 | 5572 | register rtx subtarget = get_subtarget (target); |
bbf6f052 | 5573 | |
8b015896 RH |
5574 | /* Check for a PIC address load. */ |
5575 | if (flag_pic | |
5576 | && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS) | |
5577 | && XEXP (value, 0) == pic_offset_table_rtx | |
5578 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
5579 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
5580 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
5581 | { | |
5582 | if (!subtarget) | |
5583 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
5584 | emit_move_insn (subtarget, value); | |
5585 | return subtarget; | |
5586 | } | |
5587 | ||
bbf6f052 RK |
5588 | if (GET_CODE (value) == PLUS) |
5589 | binoptab = add_optab; | |
5590 | else if (GET_CODE (value) == MINUS) | |
5591 | binoptab = sub_optab; | |
5592 | else if (GET_CODE (value) == MULT) | |
5593 | { | |
5594 | op2 = XEXP (value, 1); | |
5595 | if (!CONSTANT_P (op2) | |
5596 | && !(GET_CODE (op2) == REG && op2 != subtarget)) | |
5597 | subtarget = 0; | |
5598 | tmp = force_operand (XEXP (value, 0), subtarget); | |
5599 | return expand_mult (GET_MODE (value), tmp, | |
906c4e36 | 5600 | force_operand (op2, NULL_RTX), |
91ce572a | 5601 | target, 1); |
bbf6f052 RK |
5602 | } |
5603 | ||
5604 | if (binoptab) | |
5605 | { | |
5606 | op2 = XEXP (value, 1); | |
5607 | if (!CONSTANT_P (op2) | |
5608 | && !(GET_CODE (op2) == REG && op2 != subtarget)) | |
5609 | subtarget = 0; | |
5610 | if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT) | |
5611 | { | |
5612 | binoptab = add_optab; | |
5613 | op2 = negate_rtx (GET_MODE (value), op2); | |
5614 | } | |
5615 | ||
5616 | /* Check for an addition with OP2 a constant integer and our first | |
5617 | operand a PLUS of a virtual register and something else. In that | |
5618 | case, we want to emit the sum of the virtual register and the | |
5619 | constant first and then add the other value. This allows virtual | |
5620 | register instantiation to simply modify the constant rather than | |
5621 | creating another one around this addition. */ | |
5622 | if (binoptab == add_optab && GET_CODE (op2) == CONST_INT | |
5623 | && GET_CODE (XEXP (value, 0)) == PLUS | |
5624 | && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG | |
5625 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER | |
5626 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
5627 | { | |
5628 | rtx temp = expand_binop (GET_MODE (value), binoptab, | |
5629 | XEXP (XEXP (value, 0), 0), op2, | |
5630 | subtarget, 0, OPTAB_LIB_WIDEN); | |
5631 | return expand_binop (GET_MODE (value), binoptab, temp, | |
5632 | force_operand (XEXP (XEXP (value, 0), 1), 0), | |
5633 | target, 0, OPTAB_LIB_WIDEN); | |
5634 | } | |
3a94c984 | 5635 | |
bbf6f052 RK |
5636 | tmp = force_operand (XEXP (value, 0), subtarget); |
5637 | return expand_binop (GET_MODE (value), binoptab, tmp, | |
906c4e36 | 5638 | force_operand (op2, NULL_RTX), |
bbf6f052 | 5639 | target, 0, OPTAB_LIB_WIDEN); |
8008b228 | 5640 | /* We give UNSIGNEDP = 0 to expand_binop |
bbf6f052 RK |
5641 | because the only operations we are expanding here are signed ones. */ |
5642 | } | |
5643 | return value; | |
5644 | } | |
5645 | \f | |
5646 | /* Subroutine of expand_expr: | |
5647 | save the non-copied parts (LIST) of an expr (LHS), and return a list | |
5648 | which can restore these values to their previous values, | |
5649 | should something modify their storage. */ | |
5650 | ||
5651 | static tree | |
5652 | save_noncopied_parts (lhs, list) | |
5653 | tree lhs; | |
5654 | tree list; | |
5655 | { | |
5656 | tree tail; | |
5657 | tree parts = 0; | |
5658 | ||
5659 | for (tail = list; tail; tail = TREE_CHAIN (tail)) | |
5660 | if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) | |
5661 | parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail))); | |
5662 | else | |
5663 | { | |
5664 | tree part = TREE_VALUE (tail); | |
5665 | tree part_type = TREE_TYPE (part); | |
906c4e36 | 5666 | tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part); |
1da68f56 RK |
5667 | rtx target |
5668 | = assign_temp (build_qualified_type (part_type, | |
5669 | (TYPE_QUALS (part_type) | |
5670 | | TYPE_QUAL_CONST)), | |
5671 | 0, 1, 1); | |
5672 | ||
bbf6f052 | 5673 | if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0))) |
906c4e36 | 5674 | target = change_address (target, TYPE_MODE (part_type), NULL_RTX); |
bbf6f052 | 5675 | parts = tree_cons (to_be_saved, |
906c4e36 RK |
5676 | build (RTL_EXPR, part_type, NULL_TREE, |
5677 | (tree) target), | |
bbf6f052 RK |
5678 | parts); |
5679 | store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0); | |
5680 | } | |
5681 | return parts; | |
5682 | } | |
5683 | ||
5684 | /* Subroutine of expand_expr: | |
5685 | record the non-copied parts (LIST) of an expr (LHS), and return a list | |
5686 | which specifies the initial values of these parts. */ | |
5687 | ||
5688 | static tree | |
5689 | init_noncopied_parts (lhs, list) | |
5690 | tree lhs; | |
5691 | tree list; | |
5692 | { | |
5693 | tree tail; | |
5694 | tree parts = 0; | |
5695 | ||
5696 | for (tail = list; tail; tail = TREE_CHAIN (tail)) | |
5697 | if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) | |
5698 | parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail))); | |
c15398de | 5699 | else if (TREE_PURPOSE (tail)) |
bbf6f052 RK |
5700 | { |
5701 | tree part = TREE_VALUE (tail); | |
5702 | tree part_type = TREE_TYPE (part); | |
906c4e36 | 5703 | tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part); |
bbf6f052 RK |
5704 | parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts); |
5705 | } | |
5706 | return parts; | |
5707 | } | |
5708 | ||
5709 | /* Subroutine of expand_expr: return nonzero iff there is no way that | |
e5e809f4 JL |
5710 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
5711 | call is going to be used to determine whether we need a temporary | |
ff439b5f CB |
5712 | for EXP, as opposed to a recursive call to this function. |
5713 | ||
5714 | It is always safe for this routine to return zero since it merely | |
5715 | searches for optimization opportunities. */ | |
bbf6f052 | 5716 | |
8f17b5c5 | 5717 | int |
e5e809f4 | 5718 | safe_from_p (x, exp, top_p) |
bbf6f052 RK |
5719 | rtx x; |
5720 | tree exp; | |
e5e809f4 | 5721 | int top_p; |
bbf6f052 RK |
5722 | { |
5723 | rtx exp_rtl = 0; | |
5724 | int i, nops; | |
1da68f56 | 5725 | static tree save_expr_list; |
bbf6f052 | 5726 | |
6676e72f RK |
5727 | if (x == 0 |
5728 | /* If EXP has varying size, we MUST use a target since we currently | |
8f6562d0 PB |
5729 | have no way of allocating temporaries of variable size |
5730 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
5731 | So we assume here that something at a higher level has prevented a | |
f4510f37 | 5732 | clash. This is somewhat bogus, but the best we can do. Only |
e5e809f4 | 5733 | do this when X is BLKmode and when we are at the top level. */ |
d0f062fb | 5734 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f4510f37 | 5735 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
8f6562d0 PB |
5736 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
5737 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
5738 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
5739 | != INTEGER_CST) | |
1da68f56 RK |
5740 | && GET_MODE (x) == BLKmode) |
5741 | /* If X is in the outgoing argument area, it is always safe. */ | |
5742 | || (GET_CODE (x) == MEM | |
5743 | && (XEXP (x, 0) == virtual_outgoing_args_rtx | |
5744 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
5745 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
bbf6f052 RK |
5746 | return 1; |
5747 | ||
5748 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
5749 | find the underlying pseudo. */ | |
5750 | if (GET_CODE (x) == SUBREG) | |
5751 | { | |
5752 | x = SUBREG_REG (x); | |
5753 | if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
5754 | return 0; | |
5755 | } | |
5756 | ||
1da68f56 RK |
5757 | /* A SAVE_EXPR might appear many times in the expression passed to the |
5758 | top-level safe_from_p call, and if it has a complex subexpression, | |
5759 | examining it multiple times could result in a combinatorial explosion. | |
5760 | E.g. on an Alpha running at least 200MHz, a Fortran test case compiled | |
5761 | with optimization took about 28 minutes to compile -- even though it was | |
5762 | only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE | |
5763 | and turn that off when we are done. We keep a list of the SAVE_EXPRs | |
5764 | we have processed. Note that the only test of top_p was above. */ | |
5765 | ||
5766 | if (top_p) | |
5767 | { | |
5768 | int rtn; | |
5769 | tree t; | |
5770 | ||
5771 | save_expr_list = 0; | |
5772 | ||
5773 | rtn = safe_from_p (x, exp, 0); | |
5774 | ||
5775 | for (t = save_expr_list; t != 0; t = TREE_CHAIN (t)) | |
5776 | TREE_PRIVATE (TREE_PURPOSE (t)) = 0; | |
5777 | ||
5778 | return rtn; | |
5779 | } | |
bbf6f052 | 5780 | |
1da68f56 | 5781 | /* Now look at our tree code and possibly recurse. */ |
bbf6f052 RK |
5782 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
5783 | { | |
5784 | case 'd': | |
19e7881c | 5785 | exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX; |
bbf6f052 RK |
5786 | break; |
5787 | ||
5788 | case 'c': | |
5789 | return 1; | |
5790 | ||
5791 | case 'x': | |
5792 | if (TREE_CODE (exp) == TREE_LIST) | |
f32fd778 | 5793 | return ((TREE_VALUE (exp) == 0 |
e5e809f4 | 5794 | || safe_from_p (x, TREE_VALUE (exp), 0)) |
bbf6f052 | 5795 | && (TREE_CHAIN (exp) == 0 |
e5e809f4 | 5796 | || safe_from_p (x, TREE_CHAIN (exp), 0))); |
ff439b5f CB |
5797 | else if (TREE_CODE (exp) == ERROR_MARK) |
5798 | return 1; /* An already-visited SAVE_EXPR? */ | |
bbf6f052 RK |
5799 | else |
5800 | return 0; | |
5801 | ||
5802 | case '1': | |
e5e809f4 | 5803 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 RK |
5804 | |
5805 | case '2': | |
5806 | case '<': | |
e5e809f4 JL |
5807 | return (safe_from_p (x, TREE_OPERAND (exp, 0), 0) |
5808 | && safe_from_p (x, TREE_OPERAND (exp, 1), 0)); | |
bbf6f052 RK |
5809 | |
5810 | case 'e': | |
5811 | case 'r': | |
5812 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in | |
5813 | the expression. If it is set, we conflict iff we are that rtx or | |
5814 | both are in memory. Otherwise, we check all operands of the | |
5815 | expression recursively. */ | |
5816 | ||
5817 | switch (TREE_CODE (exp)) | |
5818 | { | |
5819 | case ADDR_EXPR: | |
e44842fe | 5820 | return (staticp (TREE_OPERAND (exp, 0)) |
1da68f56 RK |
5821 | || TREE_STATIC (exp) |
5822 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)); | |
bbf6f052 RK |
5823 | |
5824 | case INDIRECT_REF: | |
1da68f56 RK |
5825 | if (GET_CODE (x) == MEM |
5826 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), | |
5827 | get_alias_set (exp))) | |
bbf6f052 RK |
5828 | return 0; |
5829 | break; | |
5830 | ||
5831 | case CALL_EXPR: | |
f9808f81 MM |
5832 | /* Assume that the call will clobber all hard registers and |
5833 | all of memory. */ | |
5834 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
5835 | || GET_CODE (x) == MEM) | |
5836 | return 0; | |
bbf6f052 RK |
5837 | break; |
5838 | ||
5839 | case RTL_EXPR: | |
3bb5826a RK |
5840 | /* If a sequence exists, we would have to scan every instruction |
5841 | in the sequence to see if it was safe. This is probably not | |
5842 | worthwhile. */ | |
5843 | if (RTL_EXPR_SEQUENCE (exp)) | |
bbf6f052 RK |
5844 | return 0; |
5845 | ||
3bb5826a | 5846 | exp_rtl = RTL_EXPR_RTL (exp); |
bbf6f052 RK |
5847 | break; |
5848 | ||
5849 | case WITH_CLEANUP_EXPR: | |
5850 | exp_rtl = RTL_EXPR_RTL (exp); | |
5851 | break; | |
5852 | ||
5dab5552 | 5853 | case CLEANUP_POINT_EXPR: |
e5e809f4 | 5854 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
5dab5552 | 5855 | |
bbf6f052 RK |
5856 | case SAVE_EXPR: |
5857 | exp_rtl = SAVE_EXPR_RTL (exp); | |
ff439b5f CB |
5858 | if (exp_rtl) |
5859 | break; | |
5860 | ||
1da68f56 RK |
5861 | /* If we've already scanned this, don't do it again. Otherwise, |
5862 | show we've scanned it and record for clearing the flag if we're | |
5863 | going on. */ | |
5864 | if (TREE_PRIVATE (exp)) | |
5865 | return 1; | |
ff439b5f | 5866 | |
1da68f56 RK |
5867 | TREE_PRIVATE (exp) = 1; |
5868 | if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
ff59bfe6 | 5869 | { |
1da68f56 RK |
5870 | TREE_PRIVATE (exp) = 0; |
5871 | return 0; | |
ff59bfe6 | 5872 | } |
1da68f56 RK |
5873 | |
5874 | save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list); | |
ff439b5f | 5875 | return 1; |
bbf6f052 | 5876 | |
8129842c RS |
5877 | case BIND_EXPR: |
5878 | /* The only operand we look at is operand 1. The rest aren't | |
5879 | part of the expression. */ | |
e5e809f4 | 5880 | return safe_from_p (x, TREE_OPERAND (exp, 1), 0); |
8129842c | 5881 | |
bbf6f052 | 5882 | case METHOD_CALL_EXPR: |
0f41302f | 5883 | /* This takes a rtx argument, but shouldn't appear here. */ |
bbf6f052 | 5884 | abort (); |
3a94c984 | 5885 | |
e9a25f70 JL |
5886 | default: |
5887 | break; | |
bbf6f052 RK |
5888 | } |
5889 | ||
5890 | /* If we have an rtx, we do not need to scan our operands. */ | |
5891 | if (exp_rtl) | |
5892 | break; | |
5893 | ||
8f17b5c5 | 5894 | nops = first_rtl_op (TREE_CODE (exp)); |
bbf6f052 RK |
5895 | for (i = 0; i < nops; i++) |
5896 | if (TREE_OPERAND (exp, i) != 0 | |
e5e809f4 | 5897 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
bbf6f052 | 5898 | return 0; |
8f17b5c5 MM |
5899 | |
5900 | /* If this is a language-specific tree code, it may require | |
5901 | special handling. */ | |
dbbbbf3b JDA |
5902 | if ((unsigned int) TREE_CODE (exp) |
5903 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE | |
8f17b5c5 MM |
5904 | && lang_safe_from_p |
5905 | && !(*lang_safe_from_p) (x, exp)) | |
5906 | return 0; | |
bbf6f052 RK |
5907 | } |
5908 | ||
5909 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
5910 | with it. */ | |
5911 | if (exp_rtl) | |
5912 | { | |
5913 | if (GET_CODE (exp_rtl) == SUBREG) | |
5914 | { | |
5915 | exp_rtl = SUBREG_REG (exp_rtl); | |
5916 | if (GET_CODE (exp_rtl) == REG | |
5917 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) | |
5918 | return 0; | |
5919 | } | |
5920 | ||
5921 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
1da68f56 | 5922 | are memory and they conflict. */ |
bbf6f052 RK |
5923 | return ! (rtx_equal_p (x, exp_rtl) |
5924 | || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM | |
1da68f56 RK |
5925 | && true_dependence (exp_rtl, GET_MODE (x), x, |
5926 | rtx_addr_varies_p))); | |
bbf6f052 RK |
5927 | } |
5928 | ||
5929 | /* If we reach here, it is safe. */ | |
5930 | return 1; | |
5931 | } | |
5932 | ||
5933 | /* Subroutine of expand_expr: return nonzero iff EXP is an | |
5934 | expression whose type is statically determinable. */ | |
5935 | ||
5936 | static int | |
5937 | fixed_type_p (exp) | |
5938 | tree exp; | |
5939 | { | |
5940 | if (TREE_CODE (exp) == PARM_DECL | |
5941 | || TREE_CODE (exp) == VAR_DECL | |
5942 | || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR | |
5943 | || TREE_CODE (exp) == COMPONENT_REF | |
5944 | || TREE_CODE (exp) == ARRAY_REF) | |
5945 | return 1; | |
5946 | return 0; | |
5947 | } | |
01c8a7c8 RK |
5948 | |
5949 | /* Subroutine of expand_expr: return rtx if EXP is a | |
5950 | variable or parameter; else return 0. */ | |
5951 | ||
5952 | static rtx | |
5953 | var_rtx (exp) | |
5954 | tree exp; | |
5955 | { | |
5956 | STRIP_NOPS (exp); | |
5957 | switch (TREE_CODE (exp)) | |
5958 | { | |
5959 | case PARM_DECL: | |
5960 | case VAR_DECL: | |
5961 | return DECL_RTL (exp); | |
5962 | default: | |
5963 | return 0; | |
5964 | } | |
5965 | } | |
dbecbbe4 JL |
5966 | |
5967 | #ifdef MAX_INTEGER_COMPUTATION_MODE | |
400500c4 | 5968 | |
dbecbbe4 JL |
5969 | void |
5970 | check_max_integer_computation_mode (exp) | |
3a94c984 | 5971 | tree exp; |
dbecbbe4 | 5972 | { |
5f652c07 | 5973 | enum tree_code code; |
dbecbbe4 JL |
5974 | enum machine_mode mode; |
5975 | ||
5f652c07 JM |
5976 | /* Strip any NOPs that don't change the mode. */ |
5977 | STRIP_NOPS (exp); | |
5978 | code = TREE_CODE (exp); | |
5979 | ||
71bca506 JL |
5980 | /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */ |
5981 | if (code == NOP_EXPR | |
5982 | && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
5983 | return; | |
5984 | ||
dbecbbe4 JL |
5985 | /* First check the type of the overall operation. We need only look at |
5986 | unary, binary and relational operations. */ | |
5987 | if (TREE_CODE_CLASS (code) == '1' | |
5988 | || TREE_CODE_CLASS (code) == '2' | |
5989 | || TREE_CODE_CLASS (code) == '<') | |
5990 | { | |
5991 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
5992 | if (GET_MODE_CLASS (mode) == MODE_INT | |
5993 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 5994 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
5995 | } |
5996 | ||
5997 | /* Check operand of a unary op. */ | |
5998 | if (TREE_CODE_CLASS (code) == '1') | |
5999 | { | |
6000 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6001 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6002 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 6003 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 | 6004 | } |
3a94c984 | 6005 | |
dbecbbe4 JL |
6006 | /* Check operands of a binary/comparison op. */ |
6007 | if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<') | |
6008 | { | |
6009 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6010 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6011 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 6012 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6013 | |
6014 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))); | |
6015 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6016 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 6017 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6018 | } |
6019 | } | |
6020 | #endif | |
14a774a9 | 6021 | \f |
bbf6f052 RK |
6022 | /* expand_expr: generate code for computing expression EXP. |
6023 | An rtx for the computed value is returned. The value is never null. | |
6024 | In the case of a void EXP, const0_rtx is returned. | |
6025 | ||
6026 | The value may be stored in TARGET if TARGET is nonzero. | |
6027 | TARGET is just a suggestion; callers must assume that | |
6028 | the rtx returned may not be the same as TARGET. | |
6029 | ||
6030 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
6031 | ||
6032 | If TMODE is not VOIDmode, it suggests generating the | |
6033 | result in mode TMODE. But this is done only when convenient. | |
6034 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
6035 | TMODE is just a suggestion; callers must assume that | |
6036 | the rtx returned may not have mode TMODE. | |
6037 | ||
d6a5ac33 RK |
6038 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
6039 | probably will not be used. | |
bbf6f052 RK |
6040 | |
6041 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
6042 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
6043 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
6044 | products as above, or REG or MEM, or constant. | |
6045 | Ordinarily in such cases we would output mul or add instructions | |
6046 | and then return a pseudo reg containing the sum. | |
6047 | ||
6048 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
6049 | it also marks a label as absolutely required (it can't be dead). | |
26fcb35a | 6050 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d6a5ac33 RK |
6051 | This is used for outputting expressions used in initializers. |
6052 | ||
6053 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
6054 | with a constant address even if that address is not normally legitimate. | |
6055 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */ | |
bbf6f052 RK |
6056 | |
6057 | rtx | |
6058 | expand_expr (exp, target, tmode, modifier) | |
6059 | register tree exp; | |
6060 | rtx target; | |
6061 | enum machine_mode tmode; | |
6062 | enum expand_modifier modifier; | |
6063 | { | |
6064 | register rtx op0, op1, temp; | |
6065 | tree type = TREE_TYPE (exp); | |
6066 | int unsignedp = TREE_UNSIGNED (type); | |
68557e14 | 6067 | register enum machine_mode mode; |
bbf6f052 RK |
6068 | register enum tree_code code = TREE_CODE (exp); |
6069 | optab this_optab; | |
68557e14 ML |
6070 | rtx subtarget, original_target; |
6071 | int ignore; | |
bbf6f052 | 6072 | tree context; |
921b3427 RK |
6073 | /* Used by check-memory-usage to make modifier read only. */ |
6074 | enum expand_modifier ro_modifier; | |
bbf6f052 | 6075 | |
3a94c984 | 6076 | /* Handle ERROR_MARK before anybody tries to access its type. */ |
85f3d674 | 6077 | if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK) |
68557e14 ML |
6078 | { |
6079 | op0 = CONST0_RTX (tmode); | |
6080 | if (op0 != 0) | |
6081 | return op0; | |
6082 | return const0_rtx; | |
6083 | } | |
6084 | ||
6085 | mode = TYPE_MODE (type); | |
6086 | /* Use subtarget as the target for operand 0 of a binary operation. */ | |
296b4ed9 | 6087 | subtarget = get_subtarget (target); |
68557e14 ML |
6088 | original_target = target; |
6089 | ignore = (target == const0_rtx | |
6090 | || ((code == NON_LVALUE_EXPR || code == NOP_EXPR | |
6091 | || code == CONVERT_EXPR || code == REFERENCE_EXPR | |
6092 | || code == COND_EXPR) | |
6093 | && TREE_CODE (type) == VOID_TYPE)); | |
6094 | ||
921b3427 RK |
6095 | /* Make a read-only version of the modifier. */ |
6096 | if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM | |
6097 | || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER) | |
6098 | ro_modifier = modifier; | |
6099 | else | |
6100 | ro_modifier = EXPAND_NORMAL; | |
ca695ac9 | 6101 | |
dd27116b RK |
6102 | /* If we are going to ignore this result, we need only do something |
6103 | if there is a side-effect somewhere in the expression. If there | |
b50d17a1 RK |
6104 | is, short-circuit the most common cases here. Note that we must |
6105 | not call expand_expr with anything but const0_rtx in case this | |
6106 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
bbf6f052 | 6107 | |
dd27116b RK |
6108 | if (ignore) |
6109 | { | |
6110 | if (! TREE_SIDE_EFFECTS (exp)) | |
6111 | return const0_rtx; | |
6112 | ||
14a774a9 RK |
6113 | /* Ensure we reference a volatile object even if value is ignored, but |
6114 | don't do this if all we are doing is taking its address. */ | |
dd27116b RK |
6115 | if (TREE_THIS_VOLATILE (exp) |
6116 | && TREE_CODE (exp) != FUNCTION_DECL | |
14a774a9 RK |
6117 | && mode != VOIDmode && mode != BLKmode |
6118 | && modifier != EXPAND_CONST_ADDRESS) | |
dd27116b | 6119 | { |
921b3427 | 6120 | temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier); |
dd27116b RK |
6121 | if (GET_CODE (temp) == MEM) |
6122 | temp = copy_to_reg (temp); | |
6123 | return const0_rtx; | |
6124 | } | |
6125 | ||
14a774a9 RK |
6126 | if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF |
6127 | || code == INDIRECT_REF || code == BUFFER_REF) | |
dd27116b | 6128 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, |
921b3427 | 6129 | VOIDmode, ro_modifier); |
14a774a9 | 6130 | else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' |
b4e3fabb | 6131 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) |
dd27116b | 6132 | { |
b4e3fabb RK |
6133 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6134 | ro_modifier); | |
6135 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, | |
6136 | ro_modifier); | |
dd27116b RK |
6137 | return const0_rtx; |
6138 | } | |
6139 | else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) | |
6140 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
6141 | /* If the second operand has no side effects, just evaluate | |
0f41302f | 6142 | the first. */ |
dd27116b | 6143 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, |
921b3427 | 6144 | VOIDmode, ro_modifier); |
14a774a9 RK |
6145 | else if (code == BIT_FIELD_REF) |
6146 | { | |
b4e3fabb RK |
6147 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6148 | ro_modifier); | |
6149 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, | |
6150 | ro_modifier); | |
6151 | expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, | |
6152 | ro_modifier); | |
14a774a9 RK |
6153 | return const0_rtx; |
6154 | } | |
3a94c984 | 6155 | ; |
90764a87 | 6156 | target = 0; |
dd27116b | 6157 | } |
bbf6f052 | 6158 | |
dbecbbe4 | 6159 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
5f652c07 JM |
6160 | /* Only check stuff here if the mode we want is different from the mode |
6161 | of the expression; if it's the same, check_max_integer_computiation_mode | |
6162 | will handle it. Do we really need to check this stuff at all? */ | |
6163 | ||
ce3c0b53 | 6164 | if (target |
5f652c07 | 6165 | && GET_MODE (target) != mode |
ce3c0b53 JL |
6166 | && TREE_CODE (exp) != INTEGER_CST |
6167 | && TREE_CODE (exp) != PARM_DECL | |
ee06cc21 | 6168 | && TREE_CODE (exp) != ARRAY_REF |
b4e3fabb | 6169 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
ee06cc21 JL |
6170 | && TREE_CODE (exp) != COMPONENT_REF |
6171 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6172 | && TREE_CODE (exp) != INDIRECT_REF | |
6bcd94ae | 6173 | && TREE_CODE (exp) != CALL_EXPR |
6ab46dff GRK |
6174 | && TREE_CODE (exp) != VAR_DECL |
6175 | && TREE_CODE (exp) != RTL_EXPR) | |
dbecbbe4 JL |
6176 | { |
6177 | enum machine_mode mode = GET_MODE (target); | |
6178 | ||
6179 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6180 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
400500c4 | 6181 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6182 | } |
6183 | ||
5f652c07 JM |
6184 | if (tmode != mode |
6185 | && TREE_CODE (exp) != INTEGER_CST | |
ce3c0b53 | 6186 | && TREE_CODE (exp) != PARM_DECL |
ee06cc21 | 6187 | && TREE_CODE (exp) != ARRAY_REF |
b4e3fabb | 6188 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
ee06cc21 JL |
6189 | && TREE_CODE (exp) != COMPONENT_REF |
6190 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6191 | && TREE_CODE (exp) != INDIRECT_REF | |
ce3c0b53 | 6192 | && TREE_CODE (exp) != VAR_DECL |
6bcd94ae | 6193 | && TREE_CODE (exp) != CALL_EXPR |
6ab46dff | 6194 | && TREE_CODE (exp) != RTL_EXPR |
71bca506 | 6195 | && GET_MODE_CLASS (tmode) == MODE_INT |
dbecbbe4 | 6196 | && tmode > MAX_INTEGER_COMPUTATION_MODE) |
400500c4 | 6197 | internal_error ("unsupported wide integer operation"); |
dbecbbe4 JL |
6198 | |
6199 | check_max_integer_computation_mode (exp); | |
6200 | #endif | |
6201 | ||
e44842fe RK |
6202 | /* If will do cse, generate all results into pseudo registers |
6203 | since 1) that allows cse to find more things | |
6204 | and 2) otherwise cse could produce an insn the machine | |
6205 | cannot support. */ | |
6206 | ||
bbf6f052 RK |
6207 | if (! cse_not_expected && mode != BLKmode && target |
6208 | && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
6209 | target = subtarget; | |
6210 | ||
bbf6f052 RK |
6211 | switch (code) |
6212 | { | |
6213 | case LABEL_DECL: | |
b552441b RS |
6214 | { |
6215 | tree function = decl_function_context (exp); | |
6216 | /* Handle using a label in a containing function. */ | |
d0977240 RK |
6217 | if (function != current_function_decl |
6218 | && function != inline_function_decl && function != 0) | |
b552441b RS |
6219 | { |
6220 | struct function *p = find_function_data (function); | |
49ad7cfa BS |
6221 | p->expr->x_forced_labels |
6222 | = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp), | |
6223 | p->expr->x_forced_labels); | |
b552441b | 6224 | } |
ab87f8c8 JL |
6225 | else |
6226 | { | |
ab87f8c8 JL |
6227 | if (modifier == EXPAND_INITIALIZER) |
6228 | forced_labels = gen_rtx_EXPR_LIST (VOIDmode, | |
6229 | label_rtx (exp), | |
6230 | forced_labels); | |
6231 | } | |
c5c76735 | 6232 | |
38a448ca RH |
6233 | temp = gen_rtx_MEM (FUNCTION_MODE, |
6234 | gen_rtx_LABEL_REF (Pmode, label_rtx (exp))); | |
d0977240 RK |
6235 | if (function != current_function_decl |
6236 | && function != inline_function_decl && function != 0) | |
26fcb35a RS |
6237 | LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1; |
6238 | return temp; | |
b552441b | 6239 | } |
bbf6f052 RK |
6240 | |
6241 | case PARM_DECL: | |
6242 | if (DECL_RTL (exp) == 0) | |
6243 | { | |
6244 | error_with_decl (exp, "prior parameter's size depends on `%s'"); | |
4af3895e | 6245 | return CONST0_RTX (mode); |
bbf6f052 RK |
6246 | } |
6247 | ||
0f41302f | 6248 | /* ... fall through ... */ |
d6a5ac33 | 6249 | |
bbf6f052 | 6250 | case VAR_DECL: |
2dca20cd RS |
6251 | /* If a static var's type was incomplete when the decl was written, |
6252 | but the type is complete now, lay out the decl now. */ | |
d0f062fb | 6253 | if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
2dca20cd RS |
6254 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) |
6255 | { | |
2dca20cd RS |
6256 | layout_decl (exp, 0); |
6257 | PUT_MODE (DECL_RTL (exp), DECL_MODE (exp)); | |
2dca20cd | 6258 | } |
d6a5ac33 | 6259 | |
7d384cc0 KR |
6260 | /* Although static-storage variables start off initialized, according to |
6261 | ANSI C, a memcpy could overwrite them with uninitialized values. So | |
6262 | we check them too. This also lets us check for read-only variables | |
6263 | accessed via a non-const declaration, in case it won't be detected | |
6264 | any other way (e.g., in an embedded system or OS kernel without | |
6265 | memory protection). | |
6266 | ||
6267 | Aggregates are not checked here; they're handled elsewhere. */ | |
01d939e8 | 6268 | if (cfun && current_function_check_memory_usage |
49ad7cfa | 6269 | && code == VAR_DECL |
921b3427 | 6270 | && GET_CODE (DECL_RTL (exp)) == MEM |
921b3427 RK |
6271 | && ! AGGREGATE_TYPE_P (TREE_TYPE (exp))) |
6272 | { | |
6273 | enum memory_use_mode memory_usage; | |
6274 | memory_usage = get_memory_usage_from_modifier (modifier); | |
6275 | ||
ea4da9db | 6276 | in_check_memory_usage = 1; |
921b3427 | 6277 | if (memory_usage != MEMORY_USE_DONT) |
ebb1b59a BS |
6278 | emit_library_call (chkr_check_addr_libfunc, |
6279 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
6a9c4aed | 6280 | XEXP (DECL_RTL (exp), 0), Pmode, |
921b3427 RK |
6281 | GEN_INT (int_size_in_bytes (type)), |
6282 | TYPE_MODE (sizetype), | |
956d6950 JL |
6283 | GEN_INT (memory_usage), |
6284 | TYPE_MODE (integer_type_node)); | |
ea4da9db | 6285 | in_check_memory_usage = 0; |
921b3427 RK |
6286 | } |
6287 | ||
0f41302f | 6288 | /* ... fall through ... */ |
d6a5ac33 | 6289 | |
2dca20cd | 6290 | case FUNCTION_DECL: |
bbf6f052 RK |
6291 | case RESULT_DECL: |
6292 | if (DECL_RTL (exp) == 0) | |
6293 | abort (); | |
d6a5ac33 | 6294 | |
e44842fe RK |
6295 | /* Ensure variable marked as used even if it doesn't go through |
6296 | a parser. If it hasn't be used yet, write out an external | |
6297 | definition. */ | |
6298 | if (! TREE_USED (exp)) | |
6299 | { | |
6300 | assemble_external (exp); | |
6301 | TREE_USED (exp) = 1; | |
6302 | } | |
6303 | ||
dc6d66b3 RK |
6304 | /* Show we haven't gotten RTL for this yet. */ |
6305 | temp = 0; | |
6306 | ||
bbf6f052 RK |
6307 | /* Handle variables inherited from containing functions. */ |
6308 | context = decl_function_context (exp); | |
6309 | ||
6310 | /* We treat inline_function_decl as an alias for the current function | |
6311 | because that is the inline function whose vars, types, etc. | |
6312 | are being merged into the current function. | |
6313 | See expand_inline_function. */ | |
d6a5ac33 | 6314 | |
bbf6f052 RK |
6315 | if (context != 0 && context != current_function_decl |
6316 | && context != inline_function_decl | |
6317 | /* If var is static, we don't need a static chain to access it. */ | |
6318 | && ! (GET_CODE (DECL_RTL (exp)) == MEM | |
6319 | && CONSTANT_P (XEXP (DECL_RTL (exp), 0)))) | |
6320 | { | |
6321 | rtx addr; | |
6322 | ||
6323 | /* Mark as non-local and addressable. */ | |
81feeecb | 6324 | DECL_NONLOCAL (exp) = 1; |
38ee6ed9 JM |
6325 | if (DECL_NO_STATIC_CHAIN (current_function_decl)) |
6326 | abort (); | |
bbf6f052 RK |
6327 | mark_addressable (exp); |
6328 | if (GET_CODE (DECL_RTL (exp)) != MEM) | |
6329 | abort (); | |
6330 | addr = XEXP (DECL_RTL (exp), 0); | |
6331 | if (GET_CODE (addr) == MEM) | |
3a94c984 | 6332 | addr = change_address (addr, Pmode, |
3bdf5ad1 | 6333 | fix_lexical_addr (XEXP (addr, 0), exp)); |
bbf6f052 RK |
6334 | else |
6335 | addr = fix_lexical_addr (addr, exp); | |
3bdf5ad1 | 6336 | |
dc6d66b3 | 6337 | temp = change_address (DECL_RTL (exp), mode, addr); |
bbf6f052 | 6338 | } |
4af3895e | 6339 | |
bbf6f052 RK |
6340 | /* This is the case of an array whose size is to be determined |
6341 | from its initializer, while the initializer is still being parsed. | |
6342 | See expand_decl. */ | |
d6a5ac33 | 6343 | |
dc6d66b3 RK |
6344 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6345 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG) | |
6346 | temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)), | |
bbf6f052 | 6347 | XEXP (DECL_RTL (exp), 0)); |
d6a5ac33 RK |
6348 | |
6349 | /* If DECL_RTL is memory, we are in the normal case and either | |
6350 | the address is not valid or it is not a register and -fforce-addr | |
6351 | is specified, get the address into a register. */ | |
6352 | ||
dc6d66b3 RK |
6353 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6354 | && modifier != EXPAND_CONST_ADDRESS | |
6355 | && modifier != EXPAND_SUM | |
6356 | && modifier != EXPAND_INITIALIZER | |
6357 | && (! memory_address_p (DECL_MODE (exp), | |
6358 | XEXP (DECL_RTL (exp), 0)) | |
6359 | || (flag_force_addr | |
6360 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG))) | |
6361 | temp = change_address (DECL_RTL (exp), VOIDmode, | |
d6a5ac33 | 6362 | copy_rtx (XEXP (DECL_RTL (exp), 0))); |
1499e0a8 | 6363 | |
dc6d66b3 | 6364 | /* If we got something, return it. But first, set the alignment |
04956a1a | 6365 | if the address is a register. */ |
dc6d66b3 RK |
6366 | if (temp != 0) |
6367 | { | |
6368 | if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG) | |
bdb429a5 | 6369 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); |
dc6d66b3 RK |
6370 | |
6371 | return temp; | |
6372 | } | |
6373 | ||
1499e0a8 RK |
6374 | /* If the mode of DECL_RTL does not match that of the decl, it |
6375 | must be a promoted value. We return a SUBREG of the wanted mode, | |
6376 | but mark it so that we know that it was already extended. */ | |
6377 | ||
6378 | if (GET_CODE (DECL_RTL (exp)) == REG | |
6379 | && GET_MODE (DECL_RTL (exp)) != mode) | |
6380 | { | |
1499e0a8 RK |
6381 | /* Get the signedness used for this variable. Ensure we get the |
6382 | same mode we got when the variable was declared. */ | |
78911e8b RK |
6383 | if (GET_MODE (DECL_RTL (exp)) |
6384 | != promote_mode (type, DECL_MODE (exp), &unsignedp, 0)) | |
1499e0a8 RK |
6385 | abort (); |
6386 | ||
ddef6bc7 | 6387 | temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); |
1499e0a8 RK |
6388 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
6389 | SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; | |
6390 | return temp; | |
6391 | } | |
6392 | ||
bbf6f052 RK |
6393 | return DECL_RTL (exp); |
6394 | ||
6395 | case INTEGER_CST: | |
6396 | return immed_double_const (TREE_INT_CST_LOW (exp), | |
05bccae2 | 6397 | TREE_INT_CST_HIGH (exp), mode); |
bbf6f052 RK |
6398 | |
6399 | case CONST_DECL: | |
921b3427 | 6400 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, |
3a94c984 | 6401 | EXPAND_MEMORY_USE_BAD); |
bbf6f052 RK |
6402 | |
6403 | case REAL_CST: | |
6404 | /* If optimized, generate immediate CONST_DOUBLE | |
3a94c984 KH |
6405 | which will be turned into memory by reload if necessary. |
6406 | ||
bbf6f052 RK |
6407 | We used to force a register so that loop.c could see it. But |
6408 | this does not allow gen_* patterns to perform optimizations with | |
6409 | the constants. It also produces two insns in cases like "x = 1.0;". | |
6410 | On most machines, floating-point constants are not permitted in | |
6411 | many insns, so we'd end up copying it to a register in any case. | |
6412 | ||
6413 | Now, we do the copying in expand_binop, if appropriate. */ | |
6414 | return immed_real_const (exp); | |
6415 | ||
6416 | case COMPLEX_CST: | |
6417 | case STRING_CST: | |
6418 | if (! TREE_CST_RTL (exp)) | |
bd7cf17e | 6419 | output_constant_def (exp, 1); |
bbf6f052 RK |
6420 | |
6421 | /* TREE_CST_RTL probably contains a constant address. | |
6422 | On RISC machines where a constant address isn't valid, | |
6423 | make some insns to get that address into a register. */ | |
6424 | if (GET_CODE (TREE_CST_RTL (exp)) == MEM | |
6425 | && modifier != EXPAND_CONST_ADDRESS | |
6426 | && modifier != EXPAND_INITIALIZER | |
6427 | && modifier != EXPAND_SUM | |
d6a5ac33 RK |
6428 | && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)) |
6429 | || (flag_force_addr | |
6430 | && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG))) | |
bbf6f052 RK |
6431 | return change_address (TREE_CST_RTL (exp), VOIDmode, |
6432 | copy_rtx (XEXP (TREE_CST_RTL (exp), 0))); | |
6433 | return TREE_CST_RTL (exp); | |
6434 | ||
bf1e5319 | 6435 | case EXPR_WITH_FILE_LOCATION: |
b24f65cd APB |
6436 | { |
6437 | rtx to_return; | |
3b304f5b | 6438 | const char *saved_input_filename = input_filename; |
b24f65cd APB |
6439 | int saved_lineno = lineno; |
6440 | input_filename = EXPR_WFL_FILENAME (exp); | |
6441 | lineno = EXPR_WFL_LINENO (exp); | |
6442 | if (EXPR_WFL_EMIT_LINE_NOTE (exp)) | |
6443 | emit_line_note (input_filename, lineno); | |
3a94c984 | 6444 | /* Possibly avoid switching back and force here. */ |
b24f65cd APB |
6445 | to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier); |
6446 | input_filename = saved_input_filename; | |
6447 | lineno = saved_lineno; | |
6448 | return to_return; | |
6449 | } | |
bf1e5319 | 6450 | |
bbf6f052 RK |
6451 | case SAVE_EXPR: |
6452 | context = decl_function_context (exp); | |
d6a5ac33 | 6453 | |
d0977240 RK |
6454 | /* If this SAVE_EXPR was at global context, assume we are an |
6455 | initialization function and move it into our context. */ | |
6456 | if (context == 0) | |
6457 | SAVE_EXPR_CONTEXT (exp) = current_function_decl; | |
6458 | ||
bbf6f052 RK |
6459 | /* We treat inline_function_decl as an alias for the current function |
6460 | because that is the inline function whose vars, types, etc. | |
6461 | are being merged into the current function. | |
6462 | See expand_inline_function. */ | |
6463 | if (context == current_function_decl || context == inline_function_decl) | |
6464 | context = 0; | |
6465 | ||
6466 | /* If this is non-local, handle it. */ | |
6467 | if (context) | |
6468 | { | |
d0977240 RK |
6469 | /* The following call just exists to abort if the context is |
6470 | not of a containing function. */ | |
6471 | find_function_data (context); | |
6472 | ||
bbf6f052 RK |
6473 | temp = SAVE_EXPR_RTL (exp); |
6474 | if (temp && GET_CODE (temp) == REG) | |
6475 | { | |
6476 | put_var_into_stack (exp); | |
6477 | temp = SAVE_EXPR_RTL (exp); | |
6478 | } | |
6479 | if (temp == 0 || GET_CODE (temp) != MEM) | |
6480 | abort (); | |
6481 | return change_address (temp, mode, | |
6482 | fix_lexical_addr (XEXP (temp, 0), exp)); | |
6483 | } | |
6484 | if (SAVE_EXPR_RTL (exp) == 0) | |
6485 | { | |
06089a8b RK |
6486 | if (mode == VOIDmode) |
6487 | temp = const0_rtx; | |
6488 | else | |
1da68f56 RK |
6489 | temp = assign_temp (build_qualified_type (type, |
6490 | (TYPE_QUALS (type) | |
6491 | | TYPE_QUAL_CONST)), | |
6492 | 3, 0, 0); | |
1499e0a8 | 6493 | |
bbf6f052 | 6494 | SAVE_EXPR_RTL (exp) = temp; |
bbf6f052 | 6495 | if (!optimize && GET_CODE (temp) == REG) |
38a448ca RH |
6496 | save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp, |
6497 | save_expr_regs); | |
ff78f773 RK |
6498 | |
6499 | /* If the mode of TEMP does not match that of the expression, it | |
6500 | must be a promoted value. We pass store_expr a SUBREG of the | |
6501 | wanted mode but mark it so that we know that it was already | |
6502 | extended. Note that `unsignedp' was modified above in | |
6503 | this case. */ | |
6504 | ||
6505 | if (GET_CODE (temp) == REG && GET_MODE (temp) != mode) | |
6506 | { | |
ddef6bc7 | 6507 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
ff78f773 RK |
6508 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
6509 | SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; | |
6510 | } | |
6511 | ||
4c7a0be9 | 6512 | if (temp == const0_rtx) |
921b3427 RK |
6513 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6514 | EXPAND_MEMORY_USE_BAD); | |
4c7a0be9 JW |
6515 | else |
6516 | store_expr (TREE_OPERAND (exp, 0), temp, 0); | |
e5e809f4 JL |
6517 | |
6518 | TREE_USED (exp) = 1; | |
bbf6f052 | 6519 | } |
1499e0a8 RK |
6520 | |
6521 | /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it | |
6522 | must be a promoted value. We return a SUBREG of the wanted mode, | |
0f41302f | 6523 | but mark it so that we know that it was already extended. */ |
1499e0a8 RK |
6524 | |
6525 | if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG | |
6526 | && GET_MODE (SAVE_EXPR_RTL (exp)) != mode) | |
6527 | { | |
e70d22c8 RK |
6528 | /* Compute the signedness and make the proper SUBREG. */ |
6529 | promote_mode (type, mode, &unsignedp, 0); | |
ddef6bc7 | 6530 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
1499e0a8 RK |
6531 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
6532 | SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp; | |
6533 | return temp; | |
6534 | } | |
6535 | ||
bbf6f052 RK |
6536 | return SAVE_EXPR_RTL (exp); |
6537 | ||
679163cf MS |
6538 | case UNSAVE_EXPR: |
6539 | { | |
6540 | rtx temp; | |
6541 | temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); | |
6542 | TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0)); | |
6543 | return temp; | |
6544 | } | |
6545 | ||
b50d17a1 | 6546 | case PLACEHOLDER_EXPR: |
e9a25f70 JL |
6547 | { |
6548 | tree placeholder_expr; | |
6549 | ||
6550 | /* If there is an object on the head of the placeholder list, | |
e5e809f4 | 6551 | see if some object in it of type TYPE or a pointer to it. For |
e9a25f70 JL |
6552 | further information, see tree.def. */ |
6553 | for (placeholder_expr = placeholder_list; | |
6554 | placeholder_expr != 0; | |
6555 | placeholder_expr = TREE_CHAIN (placeholder_expr)) | |
6556 | { | |
6557 | tree need_type = TYPE_MAIN_VARIANT (type); | |
6558 | tree object = 0; | |
6559 | tree old_list = placeholder_list; | |
6560 | tree elt; | |
6561 | ||
e5e809f4 | 6562 | /* Find the outermost reference that is of the type we want. |
3a94c984 | 6563 | If none, see if any object has a type that is a pointer to |
e5e809f4 JL |
6564 | the type we want. */ |
6565 | for (elt = TREE_PURPOSE (placeholder_expr); | |
6566 | elt != 0 && object == 0; | |
6567 | elt | |
6568 | = ((TREE_CODE (elt) == COMPOUND_EXPR | |
6569 | || TREE_CODE (elt) == COND_EXPR) | |
6570 | ? TREE_OPERAND (elt, 1) | |
6571 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
6572 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
6573 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
6574 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
6575 | ? TREE_OPERAND (elt, 0) : 0)) | |
6576 | if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) | |
6577 | object = elt; | |
e9a25f70 | 6578 | |
e9a25f70 | 6579 | for (elt = TREE_PURPOSE (placeholder_expr); |
e5e809f4 JL |
6580 | elt != 0 && object == 0; |
6581 | elt | |
6582 | = ((TREE_CODE (elt) == COMPOUND_EXPR | |
6583 | || TREE_CODE (elt) == COND_EXPR) | |
6584 | ? TREE_OPERAND (elt, 1) | |
6585 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
6586 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
6587 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
6588 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
6589 | ? TREE_OPERAND (elt, 0) : 0)) | |
6590 | if (POINTER_TYPE_P (TREE_TYPE (elt)) | |
6591 | && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) | |
e9a25f70 | 6592 | == need_type)) |
e5e809f4 | 6593 | object = build1 (INDIRECT_REF, need_type, elt); |
dc500fa1 | 6594 | |
e9a25f70 | 6595 | if (object != 0) |
2cde2255 | 6596 | { |
e9a25f70 JL |
6597 | /* Expand this object skipping the list entries before |
6598 | it was found in case it is also a PLACEHOLDER_EXPR. | |
6599 | In that case, we want to translate it using subsequent | |
6600 | entries. */ | |
6601 | placeholder_list = TREE_CHAIN (placeholder_expr); | |
6602 | temp = expand_expr (object, original_target, tmode, | |
6603 | ro_modifier); | |
6604 | placeholder_list = old_list; | |
6605 | return temp; | |
2cde2255 | 6606 | } |
e9a25f70 JL |
6607 | } |
6608 | } | |
b50d17a1 RK |
6609 | |
6610 | /* We can't find the object or there was a missing WITH_RECORD_EXPR. */ | |
6611 | abort (); | |
6612 | ||
6613 | case WITH_RECORD_EXPR: | |
6614 | /* Put the object on the placeholder list, expand our first operand, | |
6615 | and pop the list. */ | |
6616 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
6617 | placeholder_list); | |
6618 | target = expand_expr (TREE_OPERAND (exp, 0), original_target, | |
921b3427 | 6619 | tmode, ro_modifier); |
b50d17a1 RK |
6620 | placeholder_list = TREE_CHAIN (placeholder_list); |
6621 | return target; | |
6622 | ||
70e6ca43 APB |
6623 | case GOTO_EXPR: |
6624 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) | |
6625 | expand_goto (TREE_OPERAND (exp, 0)); | |
6626 | else | |
6627 | expand_computed_goto (TREE_OPERAND (exp, 0)); | |
6628 | return const0_rtx; | |
6629 | ||
bbf6f052 | 6630 | case EXIT_EXPR: |
df4ae160 | 6631 | expand_exit_loop_if_false (NULL, |
e44842fe | 6632 | invert_truthvalue (TREE_OPERAND (exp, 0))); |
bbf6f052 RK |
6633 | return const0_rtx; |
6634 | ||
f42e28dd APB |
6635 | case LABELED_BLOCK_EXPR: |
6636 | if (LABELED_BLOCK_BODY (exp)) | |
6637 | expand_expr_stmt (LABELED_BLOCK_BODY (exp)); | |
0a5fee32 PB |
6638 | /* Should perhaps use expand_label, but this is simpler and safer. */ |
6639 | do_pending_stack_adjust (); | |
f42e28dd APB |
6640 | emit_label (label_rtx (LABELED_BLOCK_LABEL (exp))); |
6641 | return const0_rtx; | |
6642 | ||
6643 | case EXIT_BLOCK_EXPR: | |
6644 | if (EXIT_BLOCK_RETURN (exp)) | |
ab87f8c8 | 6645 | sorry ("returned value in block_exit_expr"); |
f42e28dd APB |
6646 | expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp))); |
6647 | return const0_rtx; | |
6648 | ||
bbf6f052 | 6649 | case LOOP_EXPR: |
0088fcb1 | 6650 | push_temp_slots (); |
bbf6f052 RK |
6651 | expand_start_loop (1); |
6652 | expand_expr_stmt (TREE_OPERAND (exp, 0)); | |
6653 | expand_end_loop (); | |
0088fcb1 | 6654 | pop_temp_slots (); |
bbf6f052 RK |
6655 | |
6656 | return const0_rtx; | |
6657 | ||
6658 | case BIND_EXPR: | |
6659 | { | |
6660 | tree vars = TREE_OPERAND (exp, 0); | |
6661 | int vars_need_expansion = 0; | |
6662 | ||
6663 | /* Need to open a binding contour here because | |
e976b8b2 | 6664 | if there are any cleanups they must be contained here. */ |
8e91754e | 6665 | expand_start_bindings (2); |
bbf6f052 | 6666 | |
2df53c0b RS |
6667 | /* Mark the corresponding BLOCK for output in its proper place. */ |
6668 | if (TREE_OPERAND (exp, 2) != 0 | |
6669 | && ! TREE_USED (TREE_OPERAND (exp, 2))) | |
6670 | insert_block (TREE_OPERAND (exp, 2)); | |
bbf6f052 RK |
6671 | |
6672 | /* If VARS have not yet been expanded, expand them now. */ | |
6673 | while (vars) | |
6674 | { | |
19e7881c | 6675 | if (!DECL_RTL_SET_P (vars)) |
bbf6f052 RK |
6676 | { |
6677 | vars_need_expansion = 1; | |
6678 | expand_decl (vars); | |
6679 | } | |
6680 | expand_decl_init (vars); | |
6681 | vars = TREE_CHAIN (vars); | |
6682 | } | |
6683 | ||
921b3427 | 6684 | temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier); |
bbf6f052 RK |
6685 | |
6686 | expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0); | |
6687 | ||
6688 | return temp; | |
6689 | } | |
6690 | ||
6691 | case RTL_EXPR: | |
83b853c9 JM |
6692 | if (RTL_EXPR_SEQUENCE (exp)) |
6693 | { | |
6694 | if (RTL_EXPR_SEQUENCE (exp) == const0_rtx) | |
6695 | abort (); | |
6696 | emit_insns (RTL_EXPR_SEQUENCE (exp)); | |
6697 | RTL_EXPR_SEQUENCE (exp) = const0_rtx; | |
6698 | } | |
64dc53f3 MM |
6699 | preserve_rtl_expr_result (RTL_EXPR_RTL (exp)); |
6700 | free_temps_for_rtl_expr (exp); | |
bbf6f052 RK |
6701 | return RTL_EXPR_RTL (exp); |
6702 | ||
6703 | case CONSTRUCTOR: | |
dd27116b RK |
6704 | /* If we don't need the result, just ensure we evaluate any |
6705 | subexpressions. */ | |
6706 | if (ignore) | |
6707 | { | |
6708 | tree elt; | |
6709 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) | |
921b3427 RK |
6710 | expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, |
6711 | EXPAND_MEMORY_USE_BAD); | |
dd27116b RK |
6712 | return const0_rtx; |
6713 | } | |
3207b172 | 6714 | |
4af3895e JVA |
6715 | /* All elts simple constants => refer to a constant in memory. But |
6716 | if this is a non-BLKmode mode, let it store a field at a time | |
6717 | since that should make a CONST_INT or CONST_DOUBLE when we | |
3207b172 | 6718 | fold. Likewise, if we have a target we can use, it is best to |
d720b9d1 RK |
6719 | store directly into the target unless the type is large enough |
6720 | that memcpy will be used. If we are making an initializer and | |
3207b172 | 6721 | all operands are constant, put it in memory as well. */ |
dd27116b | 6722 | else if ((TREE_STATIC (exp) |
3207b172 | 6723 | && ((mode == BLKmode |
e5e809f4 | 6724 | && ! (target != 0 && safe_from_p (target, exp, 1))) |
d720b9d1 | 6725 | || TREE_ADDRESSABLE (exp) |
19caa751 | 6726 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) |
3a94c984 | 6727 | && (! MOVE_BY_PIECES_P |
19caa751 RK |
6728 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), |
6729 | TYPE_ALIGN (type))) | |
9de08200 | 6730 | && ! mostly_zeros_p (exp)))) |
dd27116b | 6731 | || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp))) |
bbf6f052 | 6732 | { |
bd7cf17e | 6733 | rtx constructor = output_constant_def (exp, 1); |
19caa751 | 6734 | |
b552441b RS |
6735 | if (modifier != EXPAND_CONST_ADDRESS |
6736 | && modifier != EXPAND_INITIALIZER | |
6737 | && modifier != EXPAND_SUM | |
d6a5ac33 RK |
6738 | && (! memory_address_p (GET_MODE (constructor), |
6739 | XEXP (constructor, 0)) | |
6740 | || (flag_force_addr | |
6741 | && GET_CODE (XEXP (constructor, 0)) != REG))) | |
bbf6f052 RK |
6742 | constructor = change_address (constructor, VOIDmode, |
6743 | XEXP (constructor, 0)); | |
6744 | return constructor; | |
6745 | } | |
bbf6f052 RK |
6746 | else |
6747 | { | |
e9ac02a6 JW |
6748 | /* Handle calls that pass values in multiple non-contiguous |
6749 | locations. The Irix 6 ABI has examples of this. */ | |
e5e809f4 | 6750 | if (target == 0 || ! safe_from_p (target, exp, 1) |
e9ac02a6 | 6751 | || GET_CODE (target) == PARALLEL) |
1da68f56 RK |
6752 | target |
6753 | = assign_temp (build_qualified_type (type, | |
6754 | (TYPE_QUALS (type) | |
6755 | | (TREE_READONLY (exp) | |
6756 | * TYPE_QUAL_CONST))), | |
6757 | TREE_ADDRESSABLE (exp), 1, 1); | |
07604beb | 6758 | |
b7010412 RK |
6759 | store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0, |
6760 | int_size_in_bytes (TREE_TYPE (exp))); | |
bbf6f052 RK |
6761 | return target; |
6762 | } | |
6763 | ||
6764 | case INDIRECT_REF: | |
6765 | { | |
6766 | tree exp1 = TREE_OPERAND (exp, 0); | |
7581a30f | 6767 | tree index; |
3a94c984 KH |
6768 | tree string = string_constant (exp1, &index); |
6769 | ||
06eaa86f | 6770 | /* Try to optimize reads from const strings. */ |
7581a30f JW |
6771 | if (string |
6772 | && TREE_CODE (string) == STRING_CST | |
6773 | && TREE_CODE (index) == INTEGER_CST | |
05bccae2 | 6774 | && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 |
7581a30f | 6775 | && GET_MODE_CLASS (mode) == MODE_INT |
06eaa86f JW |
6776 | && GET_MODE_SIZE (mode) == 1 |
6777 | && modifier != EXPAND_MEMORY_USE_WO) | |
05bccae2 RK |
6778 | return |
6779 | GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]); | |
bbf6f052 | 6780 | |
405f0da6 JW |
6781 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
6782 | op0 = memory_address (mode, op0); | |
8c8a8e34 | 6783 | |
01d939e8 | 6784 | if (cfun && current_function_check_memory_usage |
49ad7cfa | 6785 | && ! AGGREGATE_TYPE_P (TREE_TYPE (exp))) |
921b3427 RK |
6786 | { |
6787 | enum memory_use_mode memory_usage; | |
6788 | memory_usage = get_memory_usage_from_modifier (modifier); | |
6789 | ||
6790 | if (memory_usage != MEMORY_USE_DONT) | |
c85f7c16 JL |
6791 | { |
6792 | in_check_memory_usage = 1; | |
ebb1b59a BS |
6793 | emit_library_call (chkr_check_addr_libfunc, |
6794 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0, | |
6795 | Pmode, GEN_INT (int_size_in_bytes (type)), | |
c85f7c16 JL |
6796 | TYPE_MODE (sizetype), |
6797 | GEN_INT (memory_usage), | |
6798 | TYPE_MODE (integer_type_node)); | |
6799 | in_check_memory_usage = 0; | |
6800 | } | |
921b3427 RK |
6801 | } |
6802 | ||
38a448ca | 6803 | temp = gen_rtx_MEM (mode, op0); |
3bdf5ad1 | 6804 | set_mem_attributes (temp, exp, 0); |
1125706f RK |
6805 | |
6806 | /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY | |
6807 | here, because, in C and C++, the fact that a location is accessed | |
6808 | through a pointer to const does not mean that the value there can | |
6809 | never change. Languages where it can never change should | |
6810 | also set TREE_STATIC. */ | |
5cb7a25a | 6811 | RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp); |
14a774a9 RK |
6812 | |
6813 | /* If we are writing to this object and its type is a record with | |
6814 | readonly fields, we must mark it as readonly so it will | |
6815 | conflict with readonly references to those fields. */ | |
1da68f56 | 6816 | if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type)) |
14a774a9 RK |
6817 | RTX_UNCHANGING_P (temp) = 1; |
6818 | ||
8c8a8e34 JW |
6819 | return temp; |
6820 | } | |
bbf6f052 RK |
6821 | |
6822 | case ARRAY_REF: | |
742920c7 RK |
6823 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) |
6824 | abort (); | |
bbf6f052 | 6825 | |
bbf6f052 | 6826 | { |
742920c7 RK |
6827 | tree array = TREE_OPERAND (exp, 0); |
6828 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
6829 | tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; | |
fed3cef0 | 6830 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
08293add | 6831 | HOST_WIDE_INT i; |
b50d17a1 | 6832 | |
d4c89139 PB |
6833 | /* Optimize the special-case of a zero lower bound. |
6834 | ||
6835 | We convert the low_bound to sizetype to avoid some problems | |
6836 | with constant folding. (E.g. suppose the lower bound is 1, | |
6837 | and its mode is QI. Without the conversion, (ARRAY | |
6838 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
fed3cef0 | 6839 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
d4c89139 | 6840 | |
742920c7 | 6841 | if (! integer_zerop (low_bound)) |
fed3cef0 | 6842 | index = size_diffop (index, convert (sizetype, low_bound)); |
742920c7 | 6843 | |
742920c7 | 6844 | /* Fold an expression like: "foo"[2]. |
ad2e7dd0 RK |
6845 | This is not done in fold so it won't happen inside &. |
6846 | Don't fold if this is for wide characters since it's too | |
6847 | difficult to do correctly and this is a very rare case. */ | |
742920c7 | 6848 | |
cb5fa0f8 RK |
6849 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
6850 | && TREE_CODE (array) == STRING_CST | |
742920c7 | 6851 | && TREE_CODE (index) == INTEGER_CST |
05bccae2 | 6852 | && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0 |
ad2e7dd0 RK |
6853 | && GET_MODE_CLASS (mode) == MODE_INT |
6854 | && GET_MODE_SIZE (mode) == 1) | |
05bccae2 RK |
6855 | return |
6856 | GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]); | |
bbf6f052 | 6857 | |
742920c7 RK |
6858 | /* If this is a constant index into a constant array, |
6859 | just get the value from the array. Handle both the cases when | |
6860 | we have an explicit constructor and when our operand is a variable | |
6861 | that was declared const. */ | |
4af3895e | 6862 | |
cb5fa0f8 RK |
6863 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
6864 | && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array) | |
05bccae2 | 6865 | && TREE_CODE (index) == INTEGER_CST |
3a94c984 | 6866 | && 0 > compare_tree_int (index, |
05bccae2 RK |
6867 | list_length (CONSTRUCTOR_ELTS |
6868 | (TREE_OPERAND (exp, 0))))) | |
742920c7 | 6869 | { |
05bccae2 RK |
6870 | tree elem; |
6871 | ||
6872 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
6873 | i = TREE_INT_CST_LOW (index); | |
6874 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) | |
6875 | ; | |
6876 | ||
6877 | if (elem) | |
6878 | return expand_expr (fold (TREE_VALUE (elem)), target, | |
6879 | tmode, ro_modifier); | |
742920c7 | 6880 | } |
3a94c984 | 6881 | |
742920c7 | 6882 | else if (optimize >= 1 |
cb5fa0f8 RK |
6883 | && modifier != EXPAND_CONST_ADDRESS |
6884 | && modifier != EXPAND_INITIALIZER | |
742920c7 RK |
6885 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) |
6886 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
6887 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK) | |
6888 | { | |
08293add | 6889 | if (TREE_CODE (index) == INTEGER_CST) |
742920c7 RK |
6890 | { |
6891 | tree init = DECL_INITIAL (array); | |
6892 | ||
742920c7 RK |
6893 | if (TREE_CODE (init) == CONSTRUCTOR) |
6894 | { | |
665f2503 | 6895 | tree elem; |
742920c7 | 6896 | |
05bccae2 | 6897 | for (elem = CONSTRUCTOR_ELTS (init); |
5cb1bea4 JM |
6898 | (elem |
6899 | && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); | |
05bccae2 RK |
6900 | elem = TREE_CHAIN (elem)) |
6901 | ; | |
6902 | ||
e69decfd | 6903 | if (elem && !TREE_SIDE_EFFECTS (elem)) |
742920c7 | 6904 | return expand_expr (fold (TREE_VALUE (elem)), target, |
921b3427 | 6905 | tmode, ro_modifier); |
742920c7 RK |
6906 | } |
6907 | else if (TREE_CODE (init) == STRING_CST | |
05bccae2 RK |
6908 | && 0 > compare_tree_int (index, |
6909 | TREE_STRING_LENGTH (init))) | |
5c80f6e6 JJ |
6910 | { |
6911 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
6912 | enum machine_mode mode = TYPE_MODE (type); | |
6913 | ||
6914 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6915 | && GET_MODE_SIZE (mode) == 1) | |
6916 | return (GEN_INT | |
6917 | (TREE_STRING_POINTER | |
6918 | (init)[TREE_INT_CST_LOW (index)])); | |
6919 | } | |
742920c7 RK |
6920 | } |
6921 | } | |
6922 | } | |
3a94c984 | 6923 | /* Fall through. */ |
bbf6f052 RK |
6924 | |
6925 | case COMPONENT_REF: | |
6926 | case BIT_FIELD_REF: | |
b4e3fabb | 6927 | case ARRAY_RANGE_REF: |
4af3895e | 6928 | /* If the operand is a CONSTRUCTOR, we can just extract the |
7a0b7b9a RK |
6929 | appropriate field if it is present. Don't do this if we have |
6930 | already written the data since we want to refer to that copy | |
6931 | and varasm.c assumes that's what we'll do. */ | |
b4e3fabb | 6932 | if (code == COMPONENT_REF |
7a0b7b9a RK |
6933 | && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR |
6934 | && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0) | |
4af3895e JVA |
6935 | { |
6936 | tree elt; | |
6937 | ||
6938 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
6939 | elt = TREE_CHAIN (elt)) | |
86b5812c RK |
6940 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) |
6941 | /* We can normally use the value of the field in the | |
6942 | CONSTRUCTOR. However, if this is a bitfield in | |
6943 | an integral mode that we can fit in a HOST_WIDE_INT, | |
6944 | we must mask only the number of bits in the bitfield, | |
6945 | since this is done implicitly by the constructor. If | |
6946 | the bitfield does not meet either of those conditions, | |
6947 | we can't do this optimization. */ | |
6948 | && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) | |
6949 | || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) | |
6950 | == MODE_INT) | |
6951 | && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) | |
6952 | <= HOST_BITS_PER_WIDE_INT)))) | |
6953 | { | |
3a94c984 | 6954 | op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); |
86b5812c RK |
6955 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) |
6956 | { | |
9df2c88c RK |
6957 | HOST_WIDE_INT bitsize |
6958 | = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); | |
86b5812c RK |
6959 | |
6960 | if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) | |
6961 | { | |
6962 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
6963 | op0 = expand_and (op0, op1, target); | |
6964 | } | |
6965 | else | |
6966 | { | |
e5e809f4 JL |
6967 | enum machine_mode imode |
6968 | = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); | |
86b5812c | 6969 | tree count |
e5e809f4 JL |
6970 | = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize, |
6971 | 0); | |
86b5812c RK |
6972 | |
6973 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
6974 | target, 0); | |
6975 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
6976 | target, 0); | |
6977 | } | |
6978 | } | |
6979 | ||
6980 | return op0; | |
6981 | } | |
4af3895e JVA |
6982 | } |
6983 | ||
bbf6f052 RK |
6984 | { |
6985 | enum machine_mode mode1; | |
770ae6cc | 6986 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 6987 | tree offset; |
bbf6f052 | 6988 | int volatilep = 0; |
729a2125 | 6989 | unsigned int alignment; |
839c4796 RK |
6990 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
6991 | &mode1, &unsignedp, &volatilep, | |
6992 | &alignment); | |
bbf6f052 | 6993 | |
e7f3c83f RK |
6994 | /* If we got back the original object, something is wrong. Perhaps |
6995 | we are evaluating an expression too early. In any event, don't | |
6996 | infinitely recurse. */ | |
6997 | if (tem == exp) | |
6998 | abort (); | |
6999 | ||
3d27140a | 7000 | /* If TEM's type is a union of variable size, pass TARGET to the inner |
b74f5ff2 RK |
7001 | computation, since it will need a temporary and TARGET is known |
7002 | to have to do. This occurs in unchecked conversion in Ada. */ | |
3a94c984 | 7003 | |
b74f5ff2 RK |
7004 | op0 = expand_expr (tem, |
7005 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
7006 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
7007 | != INTEGER_CST) | |
7008 | ? target : NULL_RTX), | |
4ed67205 | 7009 | VOIDmode, |
14a774a9 RK |
7010 | (modifier == EXPAND_INITIALIZER |
7011 | || modifier == EXPAND_CONST_ADDRESS) | |
e5e809f4 | 7012 | ? modifier : EXPAND_NORMAL); |
bbf6f052 | 7013 | |
8c8a8e34 | 7014 | /* If this is a constant, put it into a register if it is a |
14a774a9 | 7015 | legitimate constant and OFFSET is 0 and memory if it isn't. */ |
8c8a8e34 JW |
7016 | if (CONSTANT_P (op0)) |
7017 | { | |
7018 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); | |
14a774a9 RK |
7019 | if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) |
7020 | && offset == 0) | |
8c8a8e34 JW |
7021 | op0 = force_reg (mode, op0); |
7022 | else | |
7023 | op0 = validize_mem (force_const_mem (mode, op0)); | |
7024 | } | |
7025 | ||
7bb0943f RS |
7026 | if (offset != 0) |
7027 | { | |
906c4e36 | 7028 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
7bb0943f | 7029 | |
a2725049 | 7030 | /* If this object is in a register, put it into memory. |
14a774a9 RK |
7031 | This case can't occur in C, but can in Ada if we have |
7032 | unchecked conversion of an expression from a scalar type to | |
7033 | an array or record type. */ | |
7034 | if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG | |
7035 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) | |
7036 | { | |
d04218c0 RK |
7037 | /* If the operand is a SAVE_EXPR, we can deal with this by |
7038 | forcing the SAVE_EXPR into memory. */ | |
7039 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) | |
7040 | put_var_into_stack (TREE_OPERAND (exp, 0)); | |
7041 | else | |
7042 | { | |
7043 | tree nt | |
7044 | = build_qualified_type (TREE_TYPE (tem), | |
7045 | (TYPE_QUALS (TREE_TYPE (tem)) | |
7046 | | TYPE_QUAL_CONST)); | |
7047 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
7048 | ||
7049 | mark_temp_addr_taken (memloc); | |
7050 | emit_move_insn (memloc, op0); | |
7051 | op0 = memloc; | |
7052 | } | |
14a774a9 RK |
7053 | } |
7054 | ||
7bb0943f RS |
7055 | if (GET_CODE (op0) != MEM) |
7056 | abort (); | |
2d48c13d JL |
7057 | |
7058 | if (GET_MODE (offset_rtx) != ptr_mode) | |
bd070e1a | 7059 | { |
2d48c13d | 7060 | #ifdef POINTERS_EXTEND_UNSIGNED |
822a3443 | 7061 | offset_rtx = convert_memory_address (ptr_mode, offset_rtx); |
2d48c13d | 7062 | #else |
bd070e1a | 7063 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
2d48c13d | 7064 | #endif |
bd070e1a | 7065 | } |
2d48c13d | 7066 | |
14a774a9 | 7067 | /* A constant address in OP0 can have VOIDmode, we must not try |
efd07ca7 | 7068 | to call force_reg for that case. Avoid that case. */ |
89752202 HB |
7069 | if (GET_CODE (op0) == MEM |
7070 | && GET_MODE (op0) == BLKmode | |
efd07ca7 | 7071 | && GET_MODE (XEXP (op0, 0)) != VOIDmode |
14a774a9 | 7072 | && bitsize != 0 |
3a94c984 | 7073 | && (bitpos % bitsize) == 0 |
89752202 | 7074 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
19caa751 | 7075 | && alignment == GET_MODE_ALIGNMENT (mode1)) |
89752202 HB |
7076 | { |
7077 | rtx temp = change_address (op0, mode1, | |
7078 | plus_constant (XEXP (op0, 0), | |
7079 | (bitpos / | |
7080 | BITS_PER_UNIT))); | |
7081 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
7082 | op0 = temp; | |
7083 | else | |
7084 | op0 = change_address (op0, mode1, | |
7085 | force_reg (GET_MODE (XEXP (temp, 0)), | |
7086 | XEXP (temp, 0))); | |
7087 | bitpos = 0; | |
7088 | } | |
7089 | ||
7bb0943f | 7090 | op0 = change_address (op0, VOIDmode, |
38a448ca | 7091 | gen_rtx_PLUS (ptr_mode, XEXP (op0, 0), |
c5c76735 JL |
7092 | force_reg (ptr_mode, |
7093 | offset_rtx))); | |
7bb0943f RS |
7094 | } |
7095 | ||
bbf6f052 RK |
7096 | /* Don't forget about volatility even if this is a bitfield. */ |
7097 | if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0)) | |
7098 | { | |
7099 | op0 = copy_rtx (op0); | |
7100 | MEM_VOLATILE_P (op0) = 1; | |
7101 | } | |
7102 | ||
921b3427 | 7103 | /* Check the access. */ |
32919a0d RK |
7104 | if (cfun != 0 && current_function_check_memory_usage |
7105 | && GET_CODE (op0) == MEM) | |
3a94c984 | 7106 | { |
921b3427 RK |
7107 | enum memory_use_mode memory_usage; |
7108 | memory_usage = get_memory_usage_from_modifier (modifier); | |
7109 | ||
7110 | if (memory_usage != MEMORY_USE_DONT) | |
7111 | { | |
7112 | rtx to; | |
7113 | int size; | |
7114 | ||
7115 | to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT)); | |
7116 | size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1; | |
7117 | ||
7118 | /* Check the access right of the pointer. */ | |
ea4da9db | 7119 | in_check_memory_usage = 1; |
e9a25f70 | 7120 | if (size > BITS_PER_UNIT) |
ebb1b59a BS |
7121 | emit_library_call (chkr_check_addr_libfunc, |
7122 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to, | |
7123 | Pmode, GEN_INT (size / BITS_PER_UNIT), | |
e9a25f70 | 7124 | TYPE_MODE (sizetype), |
3a94c984 | 7125 | GEN_INT (memory_usage), |
956d6950 | 7126 | TYPE_MODE (integer_type_node)); |
ea4da9db | 7127 | in_check_memory_usage = 0; |
921b3427 RK |
7128 | } |
7129 | } | |
7130 | ||
ccc98036 RS |
7131 | /* In cases where an aligned union has an unaligned object |
7132 | as a field, we might be extracting a BLKmode value from | |
7133 | an integer-mode (e.g., SImode) object. Handle this case | |
7134 | by doing the extract into an object as wide as the field | |
7135 | (which we know to be the width of a basic mode), then | |
cb5fa0f8 | 7136 | storing into memory, and changing the mode to BLKmode. */ |
bbf6f052 | 7137 | if (mode1 == VOIDmode |
ccc98036 | 7138 | || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
cb5fa0f8 RK |
7139 | || (mode1 != BLKmode && ! direct_load[(int) mode1] |
7140 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
7141 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
7142 | /* If the field isn't aligned enough to fetch as a memref, | |
7143 | fetch it as a bit field. */ | |
7144 | || (mode1 != BLKmode | |
7145 | && SLOW_UNALIGNED_ACCESS (mode1, alignment) | |
7146 | && ((TYPE_ALIGN (TREE_TYPE (tem)) | |
7147 | < GET_MODE_ALIGNMENT (mode)) | |
7148 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) | |
7149 | /* If the type and the field are a constant size and the | |
7150 | size of the type isn't the same size as the bitfield, | |
7151 | we must use bitfield operations. */ | |
7152 | || (bitsize >= 0 | |
7153 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) | |
7154 | == INTEGER_CST) | |
7155 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | |
7156 | bitsize)) | |
7157 | || (mode == BLKmode | |
e1565e65 | 7158 | && SLOW_UNALIGNED_ACCESS (mode, alignment) |
19caa751 | 7159 | && (TYPE_ALIGN (type) > alignment |
14a774a9 | 7160 | || bitpos % TYPE_ALIGN (type) != 0))) |
bbf6f052 | 7161 | { |
bbf6f052 RK |
7162 | enum machine_mode ext_mode = mode; |
7163 | ||
14a774a9 RK |
7164 | if (ext_mode == BLKmode |
7165 | && ! (target != 0 && GET_CODE (op0) == MEM | |
7166 | && GET_CODE (target) == MEM | |
7167 | && bitpos % BITS_PER_UNIT == 0)) | |
bbf6f052 RK |
7168 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); |
7169 | ||
7170 | if (ext_mode == BLKmode) | |
a281e72d RK |
7171 | { |
7172 | /* In this case, BITPOS must start at a byte boundary and | |
7173 | TARGET, if specified, must be a MEM. */ | |
7174 | if (GET_CODE (op0) != MEM | |
7175 | || (target != 0 && GET_CODE (target) != MEM) | |
7176 | || bitpos % BITS_PER_UNIT != 0) | |
7177 | abort (); | |
7178 | ||
7179 | op0 = change_address (op0, VOIDmode, | |
7180 | plus_constant (XEXP (op0, 0), | |
7181 | bitpos / BITS_PER_UNIT)); | |
7182 | if (target == 0) | |
7183 | target = assign_temp (type, 0, 1, 1); | |
7184 | ||
7185 | emit_block_move (target, op0, | |
bd5dab53 RK |
7186 | bitsize == -1 ? expr_size (exp) |
7187 | : GEN_INT ((bitsize + BITS_PER_UNIT - 1) | |
7188 | / BITS_PER_UNIT), | |
19caa751 | 7189 | BITS_PER_UNIT); |
3a94c984 | 7190 | |
a281e72d RK |
7191 | return target; |
7192 | } | |
bbf6f052 | 7193 | |
dc6d66b3 RK |
7194 | op0 = validize_mem (op0); |
7195 | ||
7196 | if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG) | |
bdb429a5 | 7197 | mark_reg_pointer (XEXP (op0, 0), alignment); |
dc6d66b3 RK |
7198 | |
7199 | op0 = extract_bit_field (op0, bitsize, bitpos, | |
bbf6f052 | 7200 | unsignedp, target, ext_mode, ext_mode, |
034f9101 | 7201 | alignment, |
bbf6f052 | 7202 | int_size_in_bytes (TREE_TYPE (tem))); |
ef19912d RK |
7203 | |
7204 | /* If the result is a record type and BITSIZE is narrower than | |
7205 | the mode of OP0, an integral mode, and this is a big endian | |
7206 | machine, we must put the field into the high-order bits. */ | |
7207 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
7208 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
7209 | && bitsize < GET_MODE_BITSIZE (GET_MODE (op0))) | |
7210 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, | |
7211 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
7212 | - bitsize), | |
7213 | op0, 1); | |
7214 | ||
bbf6f052 RK |
7215 | if (mode == BLKmode) |
7216 | { | |
27fb3e16 | 7217 | tree nt = build_qualified_type (type_for_mode (ext_mode, 0), |
1da68f56 RK |
7218 | TYPE_QUAL_CONST); |
7219 | rtx new = assign_temp (nt, 0, 1, 1); | |
bbf6f052 RK |
7220 | |
7221 | emit_move_insn (new, op0); | |
7222 | op0 = copy_rtx (new); | |
7223 | PUT_MODE (op0, BLKmode); | |
7224 | } | |
7225 | ||
7226 | return op0; | |
7227 | } | |
7228 | ||
05019f83 RK |
7229 | /* If the result is BLKmode, use that to access the object |
7230 | now as well. */ | |
7231 | if (mode == BLKmode) | |
7232 | mode1 = BLKmode; | |
7233 | ||
bbf6f052 RK |
7234 | /* Get a reference to just this component. */ |
7235 | if (modifier == EXPAND_CONST_ADDRESS | |
7236 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
fe7a8445 RK |
7237 | { |
7238 | rtx new = gen_rtx_MEM (mode1, | |
7239 | plus_constant (XEXP (op0, 0), | |
7240 | (bitpos / BITS_PER_UNIT))); | |
7241 | ||
7242 | MEM_COPY_ATTRIBUTES (new, op0); | |
7243 | op0 = new; | |
7244 | } | |
bbf6f052 RK |
7245 | else |
7246 | op0 = change_address (op0, mode1, | |
7247 | plus_constant (XEXP (op0, 0), | |
7248 | (bitpos / BITS_PER_UNIT))); | |
41472af8 | 7249 | |
3bdf5ad1 | 7250 | set_mem_attributes (op0, exp, 0); |
dc6d66b3 | 7251 | if (GET_CODE (XEXP (op0, 0)) == REG) |
bdb429a5 | 7252 | mark_reg_pointer (XEXP (op0, 0), alignment); |
dc6d66b3 | 7253 | |
bbf6f052 | 7254 | MEM_VOLATILE_P (op0) |= volatilep; |
0d15e60c | 7255 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode |
08bbd316 | 7256 | || modifier == EXPAND_CONST_ADDRESS |
0d15e60c | 7257 | || modifier == EXPAND_INITIALIZER) |
bbf6f052 | 7258 | return op0; |
0d15e60c | 7259 | else if (target == 0) |
bbf6f052 | 7260 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
0d15e60c | 7261 | |
bbf6f052 RK |
7262 | convert_move (target, op0, unsignedp); |
7263 | return target; | |
7264 | } | |
7265 | ||
bbf6f052 RK |
7266 | /* Intended for a reference to a buffer of a file-object in Pascal. |
7267 | But it's not certain that a special tree code will really be | |
7268 | necessary for these. INDIRECT_REF might work for them. */ | |
7269 | case BUFFER_REF: | |
7270 | abort (); | |
7271 | ||
7308a047 | 7272 | case IN_EXPR: |
7308a047 | 7273 | { |
d6a5ac33 RK |
7274 | /* Pascal set IN expression. |
7275 | ||
7276 | Algorithm: | |
7277 | rlo = set_low - (set_low%bits_per_word); | |
7278 | the_word = set [ (index - rlo)/bits_per_word ]; | |
7279 | bit_index = index % bits_per_word; | |
7280 | bitmask = 1 << bit_index; | |
7281 | return !!(the_word & bitmask); */ | |
7282 | ||
7308a047 RS |
7283 | tree set = TREE_OPERAND (exp, 0); |
7284 | tree index = TREE_OPERAND (exp, 1); | |
d6a5ac33 | 7285 | int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index)); |
7308a047 | 7286 | tree set_type = TREE_TYPE (set); |
7308a047 RS |
7287 | tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type)); |
7288 | tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type)); | |
d6a5ac33 RK |
7289 | rtx index_val = expand_expr (index, 0, VOIDmode, 0); |
7290 | rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0); | |
7291 | rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0); | |
7292 | rtx setval = expand_expr (set, 0, VOIDmode, 0); | |
7293 | rtx setaddr = XEXP (setval, 0); | |
7294 | enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index)); | |
7308a047 RS |
7295 | rtx rlow; |
7296 | rtx diff, quo, rem, addr, bit, result; | |
7308a047 | 7297 | |
d6a5ac33 RK |
7298 | /* If domain is empty, answer is no. Likewise if index is constant |
7299 | and out of bounds. */ | |
51723711 | 7300 | if (((TREE_CODE (set_high_bound) == INTEGER_CST |
d6a5ac33 | 7301 | && TREE_CODE (set_low_bound) == INTEGER_CST |
51723711 | 7302 | && tree_int_cst_lt (set_high_bound, set_low_bound)) |
d6a5ac33 RK |
7303 | || (TREE_CODE (index) == INTEGER_CST |
7304 | && TREE_CODE (set_low_bound) == INTEGER_CST | |
7305 | && tree_int_cst_lt (index, set_low_bound)) | |
7306 | || (TREE_CODE (set_high_bound) == INTEGER_CST | |
7307 | && TREE_CODE (index) == INTEGER_CST | |
7308 | && tree_int_cst_lt (set_high_bound, index)))) | |
7308a047 RS |
7309 | return const0_rtx; |
7310 | ||
d6a5ac33 RK |
7311 | if (target == 0) |
7312 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
7308a047 RS |
7313 | |
7314 | /* If we get here, we have to generate the code for both cases | |
7315 | (in range and out of range). */ | |
7316 | ||
7317 | op0 = gen_label_rtx (); | |
7318 | op1 = gen_label_rtx (); | |
7319 | ||
7320 | if (! (GET_CODE (index_val) == CONST_INT | |
7321 | && GET_CODE (lo_r) == CONST_INT)) | |
7322 | { | |
c5d5d461 JL |
7323 | emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, |
7324 | GET_MODE (index_val), iunsignedp, 0, op1); | |
7308a047 RS |
7325 | } |
7326 | ||
7327 | if (! (GET_CODE (index_val) == CONST_INT | |
7328 | && GET_CODE (hi_r) == CONST_INT)) | |
7329 | { | |
c5d5d461 JL |
7330 | emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, |
7331 | GET_MODE (index_val), iunsignedp, 0, op1); | |
7308a047 RS |
7332 | } |
7333 | ||
7334 | /* Calculate the element number of bit zero in the first word | |
7335 | of the set. */ | |
7336 | if (GET_CODE (lo_r) == CONST_INT) | |
17938e57 | 7337 | rlow = GEN_INT (INTVAL (lo_r) |
3a94c984 | 7338 | & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); |
7308a047 | 7339 | else |
17938e57 RK |
7340 | rlow = expand_binop (index_mode, and_optab, lo_r, |
7341 | GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)), | |
d6a5ac33 | 7342 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); |
7308a047 | 7343 | |
d6a5ac33 RK |
7344 | diff = expand_binop (index_mode, sub_optab, index_val, rlow, |
7345 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); | |
7308a047 RS |
7346 | |
7347 | quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff, | |
d6a5ac33 | 7348 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7308a047 | 7349 | rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val, |
d6a5ac33 RK |
7350 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7351 | ||
7308a047 | 7352 | addr = memory_address (byte_mode, |
d6a5ac33 RK |
7353 | expand_binop (index_mode, add_optab, diff, |
7354 | setaddr, NULL_RTX, iunsignedp, | |
17938e57 | 7355 | OPTAB_LIB_WIDEN)); |
d6a5ac33 | 7356 | |
3a94c984 | 7357 | /* Extract the bit we want to examine. */ |
7308a047 | 7358 | bit = expand_shift (RSHIFT_EXPR, byte_mode, |
38a448ca | 7359 | gen_rtx_MEM (byte_mode, addr), |
17938e57 RK |
7360 | make_tree (TREE_TYPE (index), rem), |
7361 | NULL_RTX, 1); | |
7362 | result = expand_binop (byte_mode, and_optab, bit, const1_rtx, | |
7363 | GET_MODE (target) == byte_mode ? target : 0, | |
7308a047 | 7364 | 1, OPTAB_LIB_WIDEN); |
17938e57 RK |
7365 | |
7366 | if (result != target) | |
7367 | convert_move (target, result, 1); | |
7308a047 RS |
7368 | |
7369 | /* Output the code to handle the out-of-range case. */ | |
7370 | emit_jump (op0); | |
7371 | emit_label (op1); | |
7372 | emit_move_insn (target, const0_rtx); | |
7373 | emit_label (op0); | |
7374 | return target; | |
7375 | } | |
7376 | ||
bbf6f052 RK |
7377 | case WITH_CLEANUP_EXPR: |
7378 | if (RTL_EXPR_RTL (exp) == 0) | |
7379 | { | |
7380 | RTL_EXPR_RTL (exp) | |
921b3427 | 7381 | = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier); |
e976b8b2 MS |
7382 | expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2)); |
7383 | ||
bbf6f052 RK |
7384 | /* That's it for this cleanup. */ |
7385 | TREE_OPERAND (exp, 2) = 0; | |
7386 | } | |
7387 | return RTL_EXPR_RTL (exp); | |
7388 | ||
5dab5552 MS |
7389 | case CLEANUP_POINT_EXPR: |
7390 | { | |
e976b8b2 MS |
7391 | /* Start a new binding layer that will keep track of all cleanup |
7392 | actions to be performed. */ | |
8e91754e | 7393 | expand_start_bindings (2); |
e976b8b2 | 7394 | |
d93d4205 | 7395 | target_temp_slot_level = temp_slot_level; |
e976b8b2 | 7396 | |
921b3427 | 7397 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier); |
f283f66b JM |
7398 | /* If we're going to use this value, load it up now. */ |
7399 | if (! ignore) | |
7400 | op0 = force_not_mem (op0); | |
d93d4205 | 7401 | preserve_temp_slots (op0); |
e976b8b2 | 7402 | expand_end_bindings (NULL_TREE, 0, 0); |
5dab5552 MS |
7403 | } |
7404 | return op0; | |
7405 | ||
bbf6f052 RK |
7406 | case CALL_EXPR: |
7407 | /* Check for a built-in function. */ | |
7408 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
d6a5ac33 RK |
7409 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7410 | == FUNCTION_DECL) | |
bbf6f052 | 7411 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
c70eaeaf KG |
7412 | { |
7413 | if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | |
7414 | == BUILT_IN_FRONTEND) | |
7415 | return (*lang_expand_expr) (exp, original_target, tmode, modifier); | |
7416 | else | |
7417 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
7418 | } | |
d6a5ac33 | 7419 | |
8129842c | 7420 | return expand_call (exp, target, ignore); |
bbf6f052 RK |
7421 | |
7422 | case NON_LVALUE_EXPR: | |
7423 | case NOP_EXPR: | |
7424 | case CONVERT_EXPR: | |
7425 | case REFERENCE_EXPR: | |
4a53008b | 7426 | if (TREE_OPERAND (exp, 0) == error_mark_node) |
a592f288 | 7427 | return const0_rtx; |
4a53008b | 7428 | |
bbf6f052 RK |
7429 | if (TREE_CODE (type) == UNION_TYPE) |
7430 | { | |
7431 | tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
14a774a9 RK |
7432 | |
7433 | /* If both input and output are BLKmode, this conversion | |
7434 | isn't actually doing anything unless we need to make the | |
7435 | alignment stricter. */ | |
7436 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode | |
7437 | && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype) | |
7438 | || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT)) | |
7439 | return expand_expr (TREE_OPERAND (exp, 0), target, tmode, | |
7440 | modifier); | |
7441 | ||
bbf6f052 | 7442 | if (target == 0) |
1da68f56 | 7443 | target = assign_temp (type, 0, 1, 1); |
d6a5ac33 | 7444 | |
bbf6f052 RK |
7445 | if (GET_CODE (target) == MEM) |
7446 | /* Store data into beginning of memory target. */ | |
7447 | store_expr (TREE_OPERAND (exp, 0), | |
1499e0a8 RK |
7448 | change_address (target, TYPE_MODE (valtype), 0), 0); |
7449 | ||
bbf6f052 RK |
7450 | else if (GET_CODE (target) == REG) |
7451 | /* Store this field into a union of the proper type. */ | |
14a774a9 RK |
7452 | store_field (target, |
7453 | MIN ((int_size_in_bytes (TREE_TYPE | |
7454 | (TREE_OPERAND (exp, 0))) | |
7455 | * BITS_PER_UNIT), | |
8752c357 | 7456 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
14a774a9 | 7457 | 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), |
7156dead RK |
7458 | VOIDmode, 0, BITS_PER_UNIT, |
7459 | int_size_in_bytes (type), 0); | |
bbf6f052 RK |
7460 | else |
7461 | abort (); | |
7462 | ||
7463 | /* Return the entire union. */ | |
7464 | return target; | |
7465 | } | |
d6a5ac33 | 7466 | |
7f62854a RK |
7467 | if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
7468 | { | |
7469 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, | |
921b3427 | 7470 | ro_modifier); |
7f62854a RK |
7471 | |
7472 | /* If the signedness of the conversion differs and OP0 is | |
7473 | a promoted SUBREG, clear that indication since we now | |
7474 | have to do the proper extension. */ | |
7475 | if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp | |
7476 | && GET_CODE (op0) == SUBREG) | |
7477 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
7478 | ||
7479 | return op0; | |
7480 | } | |
7481 | ||
1499e0a8 | 7482 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0); |
12342f90 RS |
7483 | if (GET_MODE (op0) == mode) |
7484 | return op0; | |
12342f90 | 7485 | |
d6a5ac33 RK |
7486 | /* If OP0 is a constant, just convert it into the proper mode. */ |
7487 | if (CONSTANT_P (op0)) | |
7488 | return | |
7489 | convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
7490 | op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
12342f90 | 7491 | |
26fcb35a | 7492 | if (modifier == EXPAND_INITIALIZER) |
38a448ca | 7493 | return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
d6a5ac33 | 7494 | |
bbf6f052 | 7495 | if (target == 0) |
d6a5ac33 RK |
7496 | return |
7497 | convert_to_mode (mode, op0, | |
7498 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
bbf6f052 | 7499 | else |
d6a5ac33 RK |
7500 | convert_move (target, op0, |
7501 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
bbf6f052 RK |
7502 | return target; |
7503 | ||
7504 | case PLUS_EXPR: | |
0f41302f MS |
7505 | /* We come here from MINUS_EXPR when the second operand is a |
7506 | constant. */ | |
bbf6f052 | 7507 | plus_expr: |
91ce572a CC |
7508 | this_optab = ! unsignedp && flag_trapv |
7509 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7510 | ? addv_optab : add_optab; | |
bbf6f052 RK |
7511 | |
7512 | /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and | |
7513 | something else, make sure we add the register to the constant and | |
7514 | then to the other thing. This case can occur during strength | |
7515 | reduction and doing it this way will produce better code if the | |
7516 | frame pointer or argument pointer is eliminated. | |
7517 | ||
7518 | fold-const.c will ensure that the constant is always in the inner | |
7519 | PLUS_EXPR, so the only case we need to do anything about is if | |
7520 | sp, ap, or fp is our second argument, in which case we must swap | |
7521 | the innermost first argument and our second argument. */ | |
7522 | ||
7523 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR | |
7524 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST | |
7525 | && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR | |
7526 | && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx | |
7527 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx | |
7528 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) | |
7529 | { | |
7530 | tree t = TREE_OPERAND (exp, 1); | |
7531 | ||
7532 | TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
7533 | TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; | |
7534 | } | |
7535 | ||
88f63c77 | 7536 | /* If the result is to be ptr_mode and we are adding an integer to |
bbf6f052 RK |
7537 | something, we might be forming a constant. So try to use |
7538 | plus_constant. If it produces a sum and we can't accept it, | |
7539 | use force_operand. This allows P = &ARR[const] to generate | |
7540 | efficient code on machines where a SYMBOL_REF is not a valid | |
7541 | address. | |
7542 | ||
7543 | If this is an EXPAND_SUM call, always return the sum. */ | |
c980ac49 | 7544 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
91ce572a | 7545 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
bbf6f052 | 7546 | { |
c980ac49 RS |
7547 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
7548 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7549 | && TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
7550 | { | |
cbbc503e JL |
7551 | rtx constant_part; |
7552 | ||
c980ac49 RS |
7553 | op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
7554 | EXPAND_SUM); | |
cbbc503e JL |
7555 | /* Use immed_double_const to ensure that the constant is |
7556 | truncated according to the mode of OP1, then sign extended | |
7557 | to a HOST_WIDE_INT. Using the constant directly can result | |
7558 | in non-canonical RTL in a 64x32 cross compile. */ | |
7559 | constant_part | |
7560 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), | |
7561 | (HOST_WIDE_INT) 0, | |
a5efcd63 | 7562 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
7f401c74 | 7563 | op1 = plus_constant (op1, INTVAL (constant_part)); |
c980ac49 RS |
7564 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7565 | op1 = force_operand (op1, target); | |
7566 | return op1; | |
7567 | } | |
bbf6f052 | 7568 | |
c980ac49 RS |
7569 | else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
7570 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT | |
7571 | && TREE_CONSTANT (TREE_OPERAND (exp, 0))) | |
7572 | { | |
cbbc503e JL |
7573 | rtx constant_part; |
7574 | ||
c980ac49 RS |
7575 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7576 | EXPAND_SUM); | |
7577 | if (! CONSTANT_P (op0)) | |
7578 | { | |
7579 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7580 | VOIDmode, modifier); | |
709f5be1 RS |
7581 | /* Don't go to both_summands if modifier |
7582 | says it's not right to return a PLUS. */ | |
7583 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7584 | goto binop2; | |
c980ac49 RS |
7585 | goto both_summands; |
7586 | } | |
cbbc503e JL |
7587 | /* Use immed_double_const to ensure that the constant is |
7588 | truncated according to the mode of OP1, then sign extended | |
7589 | to a HOST_WIDE_INT. Using the constant directly can result | |
7590 | in non-canonical RTL in a 64x32 cross compile. */ | |
7591 | constant_part | |
7592 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), | |
7593 | (HOST_WIDE_INT) 0, | |
2a94e396 | 7594 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
7f401c74 | 7595 | op0 = plus_constant (op0, INTVAL (constant_part)); |
c980ac49 RS |
7596 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7597 | op0 = force_operand (op0, target); | |
7598 | return op0; | |
7599 | } | |
bbf6f052 RK |
7600 | } |
7601 | ||
7602 | /* No sense saving up arithmetic to be done | |
7603 | if it's all in the wrong mode to form part of an address. | |
7604 | And force_operand won't know whether to sign-extend or | |
7605 | zero-extend. */ | |
7606 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
88f63c77 | 7607 | || mode != ptr_mode) |
c980ac49 | 7608 | goto binop; |
bbf6f052 | 7609 | |
e5e809f4 | 7610 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7611 | subtarget = 0; |
7612 | ||
921b3427 RK |
7613 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier); |
7614 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier); | |
bbf6f052 | 7615 | |
c980ac49 | 7616 | both_summands: |
bbf6f052 RK |
7617 | /* Make sure any term that's a sum with a constant comes last. */ |
7618 | if (GET_CODE (op0) == PLUS | |
7619 | && CONSTANT_P (XEXP (op0, 1))) | |
7620 | { | |
7621 | temp = op0; | |
7622 | op0 = op1; | |
7623 | op1 = temp; | |
7624 | } | |
7625 | /* If adding to a sum including a constant, | |
7626 | associate it to put the constant outside. */ | |
7627 | if (GET_CODE (op1) == PLUS | |
7628 | && CONSTANT_P (XEXP (op1, 1))) | |
7629 | { | |
7630 | rtx constant_term = const0_rtx; | |
7631 | ||
7632 | temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0); | |
7633 | if (temp != 0) | |
7634 | op0 = temp; | |
6f90e075 JW |
7635 | /* Ensure that MULT comes first if there is one. */ |
7636 | else if (GET_CODE (op0) == MULT) | |
38a448ca | 7637 | op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0)); |
bbf6f052 | 7638 | else |
38a448ca | 7639 | op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0); |
bbf6f052 RK |
7640 | |
7641 | /* Let's also eliminate constants from op0 if possible. */ | |
7642 | op0 = eliminate_constant_term (op0, &constant_term); | |
7643 | ||
7644 | /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so | |
3a94c984 | 7645 | their sum should be a constant. Form it into OP1, since the |
bbf6f052 RK |
7646 | result we want will then be OP0 + OP1. */ |
7647 | ||
7648 | temp = simplify_binary_operation (PLUS, mode, constant_term, | |
7649 | XEXP (op1, 1)); | |
7650 | if (temp != 0) | |
7651 | op1 = temp; | |
7652 | else | |
38a448ca | 7653 | op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1)); |
bbf6f052 RK |
7654 | } |
7655 | ||
7656 | /* Put a constant term last and put a multiplication first. */ | |
7657 | if (CONSTANT_P (op0) || GET_CODE (op1) == MULT) | |
7658 | temp = op1, op1 = op0, op0 = temp; | |
7659 | ||
7660 | temp = simplify_binary_operation (PLUS, mode, op0, op1); | |
38a448ca | 7661 | return temp ? temp : gen_rtx_PLUS (mode, op0, op1); |
bbf6f052 RK |
7662 | |
7663 | case MINUS_EXPR: | |
ea87523e RK |
7664 | /* For initializers, we are allowed to return a MINUS of two |
7665 | symbolic constants. Here we handle all cases when both operands | |
7666 | are constant. */ | |
bbf6f052 RK |
7667 | /* Handle difference of two symbolic constants, |
7668 | for the sake of an initializer. */ | |
7669 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
7670 | && really_constant_p (TREE_OPERAND (exp, 0)) | |
7671 | && really_constant_p (TREE_OPERAND (exp, 1))) | |
7672 | { | |
906c4e36 | 7673 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, |
921b3427 | 7674 | VOIDmode, ro_modifier); |
906c4e36 | 7675 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, |
921b3427 | 7676 | VOIDmode, ro_modifier); |
ea87523e | 7677 | |
ea87523e RK |
7678 | /* If the last operand is a CONST_INT, use plus_constant of |
7679 | the negated constant. Else make the MINUS. */ | |
7680 | if (GET_CODE (op1) == CONST_INT) | |
7681 | return plus_constant (op0, - INTVAL (op1)); | |
7682 | else | |
38a448ca | 7683 | return gen_rtx_MINUS (mode, op0, op1); |
bbf6f052 RK |
7684 | } |
7685 | /* Convert A - const to A + (-const). */ | |
7686 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
7687 | { | |
ae431183 RK |
7688 | tree negated = fold (build1 (NEGATE_EXPR, type, |
7689 | TREE_OPERAND (exp, 1))); | |
7690 | ||
ae431183 | 7691 | if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated)) |
6fbfac92 JM |
7692 | /* If we can't negate the constant in TYPE, leave it alone and |
7693 | expand_binop will negate it for us. We used to try to do it | |
7694 | here in the signed version of TYPE, but that doesn't work | |
7695 | on POINTER_TYPEs. */; | |
ae431183 RK |
7696 | else |
7697 | { | |
7698 | exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated); | |
7699 | goto plus_expr; | |
7700 | } | |
bbf6f052 | 7701 | } |
91ce572a CC |
7702 | this_optab = ! unsignedp && flag_trapv |
7703 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7704 | ? subv_optab : sub_optab; | |
bbf6f052 RK |
7705 | goto binop; |
7706 | ||
7707 | case MULT_EXPR: | |
bbf6f052 RK |
7708 | /* If first operand is constant, swap them. |
7709 | Thus the following special case checks need only | |
7710 | check the second operand. */ | |
7711 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
7712 | { | |
7713 | register tree t1 = TREE_OPERAND (exp, 0); | |
7714 | TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); | |
7715 | TREE_OPERAND (exp, 1) = t1; | |
7716 | } | |
7717 | ||
7718 | /* Attempt to return something suitable for generating an | |
7719 | indexed address, for machines that support that. */ | |
7720 | ||
88f63c77 | 7721 | if (modifier == EXPAND_SUM && mode == ptr_mode |
bbf6f052 | 7722 | && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
906c4e36 | 7723 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
bbf6f052 | 7724 | { |
921b3427 RK |
7725 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
7726 | EXPAND_SUM); | |
bbf6f052 RK |
7727 | |
7728 | /* Apply distributive law if OP0 is x+c. */ | |
7729 | if (GET_CODE (op0) == PLUS | |
7730 | && GET_CODE (XEXP (op0, 1)) == CONST_INT) | |
c5c76735 JL |
7731 | return |
7732 | gen_rtx_PLUS | |
7733 | (mode, | |
7734 | gen_rtx_MULT | |
7735 | (mode, XEXP (op0, 0), | |
7736 | GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))), | |
7737 | GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) | |
7738 | * INTVAL (XEXP (op0, 1)))); | |
bbf6f052 RK |
7739 | |
7740 | if (GET_CODE (op0) != REG) | |
906c4e36 | 7741 | op0 = force_operand (op0, NULL_RTX); |
bbf6f052 RK |
7742 | if (GET_CODE (op0) != REG) |
7743 | op0 = copy_to_mode_reg (mode, op0); | |
7744 | ||
c5c76735 JL |
7745 | return |
7746 | gen_rtx_MULT (mode, op0, | |
7747 | GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))); | |
bbf6f052 RK |
7748 | } |
7749 | ||
e5e809f4 | 7750 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7751 | subtarget = 0; |
7752 | ||
7753 | /* Check for multiplying things that have been extended | |
7754 | from a narrower type. If this machine supports multiplying | |
7755 | in that narrower type with a result in the desired type, | |
7756 | do it that way, and avoid the explicit type-conversion. */ | |
7757 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR | |
7758 | && TREE_CODE (type) == INTEGER_TYPE | |
7759 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7760 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
7761 | && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
7762 | && int_fits_type_p (TREE_OPERAND (exp, 1), | |
7763 | TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
7764 | /* Don't use a widening multiply if a shift will do. */ | |
7765 | && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
906c4e36 | 7766 | > HOST_BITS_PER_WIDE_INT) |
bbf6f052 RK |
7767 | || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
7768 | || | |
7769 | (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
7770 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7771 | == | |
7772 | TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))) | |
7773 | /* If both operands are extended, they must either both | |
7774 | be zero-extended or both be sign-extended. */ | |
7775 | && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
7776 | == | |
7777 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))))) | |
7778 | { | |
7779 | enum machine_mode innermode | |
7780 | = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))); | |
b10af0c8 TG |
7781 | optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
7782 | ? smul_widen_optab : umul_widen_optab); | |
bbf6f052 RK |
7783 | this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
7784 | ? umul_widen_optab : smul_widen_optab); | |
b10af0c8 | 7785 | if (mode == GET_MODE_WIDER_MODE (innermode)) |
bbf6f052 | 7786 | { |
b10af0c8 TG |
7787 | if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
7788 | { | |
7789 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
7790 | NULL_RTX, VOIDmode, 0); | |
7791 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
7792 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7793 | VOIDmode, 0); | |
7794 | else | |
7795 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7796 | NULL_RTX, VOIDmode, 0); | |
7797 | goto binop2; | |
7798 | } | |
7799 | else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
7800 | && innermode == word_mode) | |
7801 | { | |
7802 | rtx htem; | |
7803 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
7804 | NULL_RTX, VOIDmode, 0); | |
7805 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
8c118062 GK |
7806 | op1 = convert_modes (innermode, mode, |
7807 | expand_expr (TREE_OPERAND (exp, 1), | |
7808 | NULL_RTX, VOIDmode, 0), | |
7809 | unsignedp); | |
b10af0c8 TG |
7810 | else |
7811 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
7812 | NULL_RTX, VOIDmode, 0); | |
7813 | temp = expand_binop (mode, other_optab, op0, op1, target, | |
7814 | unsignedp, OPTAB_LIB_WIDEN); | |
7815 | htem = expand_mult_highpart_adjust (innermode, | |
7816 | gen_highpart (innermode, temp), | |
7817 | op0, op1, | |
7818 | gen_highpart (innermode, temp), | |
7819 | unsignedp); | |
7820 | emit_move_insn (gen_highpart (innermode, temp), htem); | |
7821 | return temp; | |
7822 | } | |
bbf6f052 RK |
7823 | } |
7824 | } | |
7825 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7826 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7827 | return expand_mult (mode, op0, op1, target, unsignedp); |
7828 | ||
7829 | case TRUNC_DIV_EXPR: | |
7830 | case FLOOR_DIV_EXPR: | |
7831 | case CEIL_DIV_EXPR: | |
7832 | case ROUND_DIV_EXPR: | |
7833 | case EXACT_DIV_EXPR: | |
e5e809f4 | 7834 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7835 | subtarget = 0; |
7836 | /* Possible optimization: compute the dividend with EXPAND_SUM | |
7837 | then if the divisor is constant can optimize the case | |
7838 | where some terms of the dividend have coeffs divisible by it. */ | |
7839 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7840 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7841 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
7842 | ||
7843 | case RDIV_EXPR: | |
7844 | this_optab = flodiv_optab; | |
7845 | goto binop; | |
7846 | ||
7847 | case TRUNC_MOD_EXPR: | |
7848 | case FLOOR_MOD_EXPR: | |
7849 | case CEIL_MOD_EXPR: | |
7850 | case ROUND_MOD_EXPR: | |
e5e809f4 | 7851 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
7852 | subtarget = 0; |
7853 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 7854 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7855 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
7856 | ||
7857 | case FIX_ROUND_EXPR: | |
7858 | case FIX_FLOOR_EXPR: | |
7859 | case FIX_CEIL_EXPR: | |
7860 | abort (); /* Not used for C. */ | |
7861 | ||
7862 | case FIX_TRUNC_EXPR: | |
906c4e36 | 7863 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7864 | if (target == 0) |
7865 | target = gen_reg_rtx (mode); | |
7866 | expand_fix (target, op0, unsignedp); | |
7867 | return target; | |
7868 | ||
7869 | case FLOAT_EXPR: | |
906c4e36 | 7870 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7871 | if (target == 0) |
7872 | target = gen_reg_rtx (mode); | |
7873 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
7874 | So give it the correct mode. With -O, cse will optimize this. */ | |
7875 | if (GET_MODE (op0) == VOIDmode) | |
7876 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
7877 | op0); | |
7878 | expand_float (target, op0, | |
7879 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
7880 | return target; | |
7881 | ||
7882 | case NEGATE_EXPR: | |
5b22bee8 | 7883 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
91ce572a CC |
7884 | temp = expand_unop (mode, |
7885 | ! unsignedp && flag_trapv | |
7886 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
7887 | ? negv_optab : neg_optab, op0, target, 0); | |
bbf6f052 RK |
7888 | if (temp == 0) |
7889 | abort (); | |
7890 | return temp; | |
7891 | ||
7892 | case ABS_EXPR: | |
7893 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7894 | ||
2d7050fd | 7895 | /* Handle complex values specially. */ |
d6a5ac33 RK |
7896 | if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT |
7897 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) | |
7898 | return expand_complex_abs (mode, op0, target, unsignedp); | |
2d7050fd | 7899 | |
bbf6f052 RK |
7900 | /* Unsigned abs is simply the operand. Testing here means we don't |
7901 | risk generating incorrect code below. */ | |
7902 | if (TREE_UNSIGNED (type)) | |
7903 | return op0; | |
7904 | ||
91ce572a | 7905 | return expand_abs (mode, op0, target, unsignedp, |
e5e809f4 | 7906 | safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
bbf6f052 RK |
7907 | |
7908 | case MAX_EXPR: | |
7909 | case MIN_EXPR: | |
7910 | target = original_target; | |
e5e809f4 | 7911 | if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1) |
fc155707 | 7912 | || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
d6a5ac33 | 7913 | || GET_MODE (target) != mode |
bbf6f052 RK |
7914 | || (GET_CODE (target) == REG |
7915 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
7916 | target = gen_reg_rtx (mode); | |
906c4e36 | 7917 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
7918 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); |
7919 | ||
7920 | /* First try to do it with a special MIN or MAX instruction. | |
7921 | If that does not win, use a conditional jump to select the proper | |
7922 | value. */ | |
7923 | this_optab = (TREE_UNSIGNED (type) | |
7924 | ? (code == MIN_EXPR ? umin_optab : umax_optab) | |
7925 | : (code == MIN_EXPR ? smin_optab : smax_optab)); | |
7926 | ||
7927 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
7928 | OPTAB_WIDEN); | |
7929 | if (temp != 0) | |
7930 | return temp; | |
7931 | ||
fa2981d8 JW |
7932 | /* At this point, a MEM target is no longer useful; we will get better |
7933 | code without it. */ | |
3a94c984 | 7934 | |
fa2981d8 JW |
7935 | if (GET_CODE (target) == MEM) |
7936 | target = gen_reg_rtx (mode); | |
7937 | ||
ee456b1c RK |
7938 | if (target != op0) |
7939 | emit_move_insn (target, op0); | |
d6a5ac33 | 7940 | |
bbf6f052 | 7941 | op0 = gen_label_rtx (); |
d6a5ac33 | 7942 | |
f81497d9 RS |
7943 | /* If this mode is an integer too wide to compare properly, |
7944 | compare word by word. Rely on cse to optimize constant cases. */ | |
1eb8759b RH |
7945 | if (GET_MODE_CLASS (mode) == MODE_INT |
7946 | && ! can_compare_p (GE, mode, ccp_jump)) | |
bbf6f052 | 7947 | { |
f81497d9 | 7948 | if (code == MAX_EXPR) |
d6a5ac33 RK |
7949 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
7950 | target, op1, NULL_RTX, op0); | |
bbf6f052 | 7951 | else |
d6a5ac33 RK |
7952 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
7953 | op1, target, NULL_RTX, op0); | |
bbf6f052 | 7954 | } |
f81497d9 RS |
7955 | else |
7956 | { | |
b30f05db BS |
7957 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))); |
7958 | do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, | |
7959 | unsignedp, mode, NULL_RTX, 0, NULL_RTX, | |
7960 | op0); | |
f81497d9 | 7961 | } |
b30f05db | 7962 | emit_move_insn (target, op1); |
bbf6f052 RK |
7963 | emit_label (op0); |
7964 | return target; | |
7965 | ||
bbf6f052 RK |
7966 | case BIT_NOT_EXPR: |
7967 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7968 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); | |
7969 | if (temp == 0) | |
7970 | abort (); | |
7971 | return temp; | |
7972 | ||
7973 | case FFS_EXPR: | |
7974 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7975 | temp = expand_unop (mode, ffs_optab, op0, target, 1); | |
7976 | if (temp == 0) | |
7977 | abort (); | |
7978 | return temp; | |
7979 | ||
d6a5ac33 RK |
7980 | /* ??? Can optimize bitwise operations with one arg constant. |
7981 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
7982 | and (a bitwise1 b) bitwise2 b (etc) | |
7983 | but that is probably not worth while. */ | |
7984 | ||
7985 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two | |
7986 | boolean values when we want in all cases to compute both of them. In | |
7987 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
7988 | as actual zero-or-1 values and then bitwise anding. In cases where | |
7989 | there cannot be any side effects, better code would be made by | |
7990 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
7991 | how to recognize those cases. */ | |
7992 | ||
bbf6f052 RK |
7993 | case TRUTH_AND_EXPR: |
7994 | case BIT_AND_EXPR: | |
7995 | this_optab = and_optab; | |
7996 | goto binop; | |
7997 | ||
bbf6f052 RK |
7998 | case TRUTH_OR_EXPR: |
7999 | case BIT_IOR_EXPR: | |
8000 | this_optab = ior_optab; | |
8001 | goto binop; | |
8002 | ||
874726a8 | 8003 | case TRUTH_XOR_EXPR: |
bbf6f052 RK |
8004 | case BIT_XOR_EXPR: |
8005 | this_optab = xor_optab; | |
8006 | goto binop; | |
8007 | ||
8008 | case LSHIFT_EXPR: | |
8009 | case RSHIFT_EXPR: | |
8010 | case LROTATE_EXPR: | |
8011 | case RROTATE_EXPR: | |
e5e809f4 | 8012 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
8013 | subtarget = 0; |
8014 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8015 | return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, | |
8016 | unsignedp); | |
8017 | ||
d6a5ac33 RK |
8018 | /* Could determine the answer when only additive constants differ. Also, |
8019 | the addition of one can be handled by changing the condition. */ | |
bbf6f052 RK |
8020 | case LT_EXPR: |
8021 | case LE_EXPR: | |
8022 | case GT_EXPR: | |
8023 | case GE_EXPR: | |
8024 | case EQ_EXPR: | |
8025 | case NE_EXPR: | |
1eb8759b RH |
8026 | case UNORDERED_EXPR: |
8027 | case ORDERED_EXPR: | |
8028 | case UNLT_EXPR: | |
8029 | case UNLE_EXPR: | |
8030 | case UNGT_EXPR: | |
8031 | case UNGE_EXPR: | |
8032 | case UNEQ_EXPR: | |
bbf6f052 RK |
8033 | temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0); |
8034 | if (temp != 0) | |
8035 | return temp; | |
d6a5ac33 | 8036 | |
0f41302f | 8037 | /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
bbf6f052 RK |
8038 | if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
8039 | && original_target | |
8040 | && GET_CODE (original_target) == REG | |
8041 | && (GET_MODE (original_target) | |
8042 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
8043 | { | |
d6a5ac33 RK |
8044 | temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
8045 | VOIDmode, 0); | |
8046 | ||
bbf6f052 RK |
8047 | if (temp != original_target) |
8048 | temp = copy_to_reg (temp); | |
d6a5ac33 | 8049 | |
bbf6f052 | 8050 | op1 = gen_label_rtx (); |
c5d5d461 JL |
8051 | emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
8052 | GET_MODE (temp), unsignedp, 0, op1); | |
bbf6f052 RK |
8053 | emit_move_insn (temp, const1_rtx); |
8054 | emit_label (op1); | |
8055 | return temp; | |
8056 | } | |
d6a5ac33 | 8057 | |
bbf6f052 RK |
8058 | /* If no set-flag instruction, must generate a conditional |
8059 | store into a temporary variable. Drop through | |
8060 | and handle this like && and ||. */ | |
8061 | ||
8062 | case TRUTH_ANDIF_EXPR: | |
8063 | case TRUTH_ORIF_EXPR: | |
e44842fe | 8064 | if (! ignore |
e5e809f4 | 8065 | && (target == 0 || ! safe_from_p (target, exp, 1) |
e44842fe RK |
8066 | /* Make sure we don't have a hard reg (such as function's return |
8067 | value) live across basic blocks, if not optimizing. */ | |
8068 | || (!optimize && GET_CODE (target) == REG | |
8069 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
bbf6f052 | 8070 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
e44842fe RK |
8071 | |
8072 | if (target) | |
8073 | emit_clr_insn (target); | |
8074 | ||
bbf6f052 RK |
8075 | op1 = gen_label_rtx (); |
8076 | jumpifnot (exp, op1); | |
e44842fe RK |
8077 | |
8078 | if (target) | |
8079 | emit_0_to_1_insn (target); | |
8080 | ||
bbf6f052 | 8081 | emit_label (op1); |
e44842fe | 8082 | return ignore ? const0_rtx : target; |
bbf6f052 RK |
8083 | |
8084 | case TRUTH_NOT_EXPR: | |
8085 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); | |
8086 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
8087 | only with operands that are always zero or one. */ | |
906c4e36 | 8088 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
bbf6f052 RK |
8089 | target, 1, OPTAB_LIB_WIDEN); |
8090 | if (temp == 0) | |
8091 | abort (); | |
8092 | return temp; | |
8093 | ||
8094 | case COMPOUND_EXPR: | |
8095 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
8096 | emit_queue (); | |
8097 | return expand_expr (TREE_OPERAND (exp, 1), | |
8098 | (ignore ? const0_rtx : target), | |
8099 | VOIDmode, 0); | |
8100 | ||
8101 | case COND_EXPR: | |
ac01eace RK |
8102 | /* If we would have a "singleton" (see below) were it not for a |
8103 | conversion in each arm, bring that conversion back out. */ | |
8104 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8105 | && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR | |
8106 | && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) | |
8107 | == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0)))) | |
8108 | { | |
d6edb99e ZW |
8109 | tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); |
8110 | tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); | |
8111 | ||
8112 | if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2' | |
8113 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8114 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2' | |
8115 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)) | |
8116 | || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1' | |
8117 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8118 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1' | |
8119 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))) | |
ac01eace | 8120 | return expand_expr (build1 (NOP_EXPR, type, |
d6edb99e | 8121 | build (COND_EXPR, TREE_TYPE (iftrue), |
ac01eace | 8122 | TREE_OPERAND (exp, 0), |
d6edb99e | 8123 | iftrue, iffalse)), |
ac01eace RK |
8124 | target, tmode, modifier); |
8125 | } | |
8126 | ||
bbf6f052 RK |
8127 | { |
8128 | /* Note that COND_EXPRs whose type is a structure or union | |
8129 | are required to be constructed to contain assignments of | |
8130 | a temporary variable, so that we can evaluate them here | |
8131 | for side effect only. If type is void, we must do likewise. */ | |
8132 | ||
8133 | /* If an arm of the branch requires a cleanup, | |
8134 | only that cleanup is performed. */ | |
8135 | ||
8136 | tree singleton = 0; | |
8137 | tree binary_op = 0, unary_op = 0; | |
bbf6f052 RK |
8138 | |
8139 | /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and | |
8140 | convert it to our mode, if necessary. */ | |
8141 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
8142 | && integer_zerop (TREE_OPERAND (exp, 2)) | |
8143 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') | |
8144 | { | |
dd27116b RK |
8145 | if (ignore) |
8146 | { | |
8147 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, | |
921b3427 | 8148 | ro_modifier); |
dd27116b RK |
8149 | return const0_rtx; |
8150 | } | |
8151 | ||
921b3427 | 8152 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier); |
bbf6f052 RK |
8153 | if (GET_MODE (op0) == mode) |
8154 | return op0; | |
d6a5ac33 | 8155 | |
bbf6f052 RK |
8156 | if (target == 0) |
8157 | target = gen_reg_rtx (mode); | |
8158 | convert_move (target, op0, unsignedp); | |
8159 | return target; | |
8160 | } | |
8161 | ||
ac01eace RK |
8162 | /* Check for X ? A + B : A. If we have this, we can copy A to the |
8163 | output and conditionally add B. Similarly for unary operations. | |
8164 | Don't do this if X has side-effects because those side effects | |
8165 | might affect A or B and the "?" operation is a sequence point in | |
8166 | ANSI. (operand_equal_p tests for side effects.) */ | |
bbf6f052 RK |
8167 | |
8168 | if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2' | |
8169 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8170 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8171 | singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1); | |
8172 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2' | |
8173 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8174 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8175 | singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2); | |
8176 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1' | |
8177 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8178 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8179 | singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1); | |
8180 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1' | |
8181 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8182 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8183 | singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2); | |
8184 | ||
01c8a7c8 RK |
8185 | /* If we are not to produce a result, we have no target. Otherwise, |
8186 | if a target was specified use it; it will not be used as an | |
3a94c984 | 8187 | intermediate target unless it is safe. If no target, use a |
01c8a7c8 RK |
8188 | temporary. */ |
8189 | ||
8190 | if (ignore) | |
8191 | temp = 0; | |
8192 | else if (original_target | |
e5e809f4 | 8193 | && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) |
01c8a7c8 RK |
8194 | || (singleton && GET_CODE (original_target) == REG |
8195 | && REGNO (original_target) >= FIRST_PSEUDO_REGISTER | |
8196 | && original_target == var_rtx (singleton))) | |
8197 | && GET_MODE (original_target) == mode | |
7c00d1fe RK |
8198 | #ifdef HAVE_conditional_move |
8199 | && (! can_conditionally_move_p (mode) | |
8200 | || GET_CODE (original_target) == REG | |
8201 | || TREE_ADDRESSABLE (type)) | |
8202 | #endif | |
01c8a7c8 RK |
8203 | && ! (GET_CODE (original_target) == MEM |
8204 | && MEM_VOLATILE_P (original_target))) | |
8205 | temp = original_target; | |
8206 | else if (TREE_ADDRESSABLE (type)) | |
8207 | abort (); | |
8208 | else | |
8209 | temp = assign_temp (type, 0, 0, 1); | |
8210 | ||
ac01eace RK |
8211 | /* If we had X ? A + C : A, with C a constant power of 2, and we can |
8212 | do the test of X as a store-flag operation, do this as | |
8213 | A + ((X != 0) << log C). Similarly for other simple binary | |
8214 | operators. Only do for C == 1 if BRANCH_COST is low. */ | |
dd27116b | 8215 | if (temp && singleton && binary_op |
bbf6f052 RK |
8216 | && (TREE_CODE (binary_op) == PLUS_EXPR |
8217 | || TREE_CODE (binary_op) == MINUS_EXPR | |
8218 | || TREE_CODE (binary_op) == BIT_IOR_EXPR | |
9fbd9f58 | 8219 | || TREE_CODE (binary_op) == BIT_XOR_EXPR) |
ac01eace RK |
8220 | && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1)) |
8221 | : integer_onep (TREE_OPERAND (binary_op, 1))) | |
bbf6f052 RK |
8222 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') |
8223 | { | |
8224 | rtx result; | |
91ce572a CC |
8225 | optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR |
8226 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8227 | ? addv_optab : add_optab) | |
8228 | : TREE_CODE (binary_op) == MINUS_EXPR | |
8229 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8230 | ? subv_optab : sub_optab) | |
8231 | : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab | |
8232 | : xor_optab); | |
bbf6f052 RK |
8233 | |
8234 | /* If we had X ? A : A + 1, do this as A + (X == 0). | |
8235 | ||
8236 | We have to invert the truth value here and then put it | |
8237 | back later if do_store_flag fails. We cannot simply copy | |
8238 | TREE_OPERAND (exp, 0) to another variable and modify that | |
8239 | because invert_truthvalue can modify the tree pointed to | |
8240 | by its argument. */ | |
8241 | if (singleton == TREE_OPERAND (exp, 1)) | |
8242 | TREE_OPERAND (exp, 0) | |
8243 | = invert_truthvalue (TREE_OPERAND (exp, 0)); | |
8244 | ||
8245 | result = do_store_flag (TREE_OPERAND (exp, 0), | |
e5e809f4 | 8246 | (safe_from_p (temp, singleton, 1) |
906c4e36 | 8247 | ? temp : NULL_RTX), |
bbf6f052 RK |
8248 | mode, BRANCH_COST <= 1); |
8249 | ||
ac01eace RK |
8250 | if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1))) |
8251 | result = expand_shift (LSHIFT_EXPR, mode, result, | |
8252 | build_int_2 (tree_log2 | |
8253 | (TREE_OPERAND | |
8254 | (binary_op, 1)), | |
8255 | 0), | |
e5e809f4 | 8256 | (safe_from_p (temp, singleton, 1) |
ac01eace RK |
8257 | ? temp : NULL_RTX), 0); |
8258 | ||
bbf6f052 RK |
8259 | if (result) |
8260 | { | |
906c4e36 | 8261 | op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8262 | return expand_binop (mode, boptab, op1, result, temp, |
8263 | unsignedp, OPTAB_LIB_WIDEN); | |
8264 | } | |
8265 | else if (singleton == TREE_OPERAND (exp, 1)) | |
8266 | TREE_OPERAND (exp, 0) | |
8267 | = invert_truthvalue (TREE_OPERAND (exp, 0)); | |
8268 | } | |
3a94c984 | 8269 | |
dabf8373 | 8270 | do_pending_stack_adjust (); |
bbf6f052 RK |
8271 | NO_DEFER_POP; |
8272 | op0 = gen_label_rtx (); | |
8273 | ||
8274 | if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))) | |
8275 | { | |
8276 | if (temp != 0) | |
8277 | { | |
8278 | /* If the target conflicts with the other operand of the | |
8279 | binary op, we can't use it. Also, we can't use the target | |
8280 | if it is a hard register, because evaluating the condition | |
8281 | might clobber it. */ | |
8282 | if ((binary_op | |
e5e809f4 | 8283 | && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1)) |
bbf6f052 RK |
8284 | || (GET_CODE (temp) == REG |
8285 | && REGNO (temp) < FIRST_PSEUDO_REGISTER)) | |
8286 | temp = gen_reg_rtx (mode); | |
8287 | store_expr (singleton, temp, 0); | |
8288 | } | |
8289 | else | |
906c4e36 | 8290 | expand_expr (singleton, |
2937cf87 | 8291 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8292 | if (singleton == TREE_OPERAND (exp, 1)) |
8293 | jumpif (TREE_OPERAND (exp, 0), op0); | |
8294 | else | |
8295 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
8296 | ||
956d6950 | 8297 | start_cleanup_deferral (); |
bbf6f052 RK |
8298 | if (binary_op && temp == 0) |
8299 | /* Just touch the other operand. */ | |
8300 | expand_expr (TREE_OPERAND (binary_op, 1), | |
906c4e36 | 8301 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8302 | else if (binary_op) |
8303 | store_expr (build (TREE_CODE (binary_op), type, | |
8304 | make_tree (type, temp), | |
8305 | TREE_OPERAND (binary_op, 1)), | |
8306 | temp, 0); | |
8307 | else | |
8308 | store_expr (build1 (TREE_CODE (unary_op), type, | |
8309 | make_tree (type, temp)), | |
8310 | temp, 0); | |
8311 | op1 = op0; | |
bbf6f052 | 8312 | } |
bbf6f052 RK |
8313 | /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any |
8314 | comparison operator. If we have one of these cases, set the | |
8315 | output to A, branch on A (cse will merge these two references), | |
8316 | then set the output to FOO. */ | |
8317 | else if (temp | |
8318 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8319 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8320 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8321 | TREE_OPERAND (exp, 1), 0) | |
e9a25f70 JL |
8322 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8323 | || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR) | |
e5e809f4 | 8324 | && safe_from_p (temp, TREE_OPERAND (exp, 2), 1)) |
bbf6f052 | 8325 | { |
3a94c984 KH |
8326 | if (GET_CODE (temp) == REG |
8327 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
bbf6f052 RK |
8328 | temp = gen_reg_rtx (mode); |
8329 | store_expr (TREE_OPERAND (exp, 1), temp, 0); | |
8330 | jumpif (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8331 | |
956d6950 | 8332 | start_cleanup_deferral (); |
bbf6f052 RK |
8333 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8334 | op1 = op0; | |
8335 | } | |
8336 | else if (temp | |
8337 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8338 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8339 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8340 | TREE_OPERAND (exp, 2), 0) | |
e9a25f70 JL |
8341 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8342 | || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR) | |
e5e809f4 | 8343 | && safe_from_p (temp, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 | 8344 | { |
3a94c984 KH |
8345 | if (GET_CODE (temp) == REG |
8346 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
bbf6f052 RK |
8347 | temp = gen_reg_rtx (mode); |
8348 | store_expr (TREE_OPERAND (exp, 2), temp, 0); | |
8349 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8350 | |
956d6950 | 8351 | start_cleanup_deferral (); |
bbf6f052 RK |
8352 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8353 | op1 = op0; | |
8354 | } | |
8355 | else | |
8356 | { | |
8357 | op1 = gen_label_rtx (); | |
8358 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
5dab5552 | 8359 | |
956d6950 | 8360 | start_cleanup_deferral (); |
3a94c984 | 8361 | |
2ac84cfe | 8362 | /* One branch of the cond can be void, if it never returns. For |
3a94c984 | 8363 | example A ? throw : E */ |
2ac84cfe | 8364 | if (temp != 0 |
3a94c984 | 8365 | && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) |
bbf6f052 RK |
8366 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8367 | else | |
906c4e36 RK |
8368 | expand_expr (TREE_OPERAND (exp, 1), |
8369 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
956d6950 | 8370 | end_cleanup_deferral (); |
bbf6f052 RK |
8371 | emit_queue (); |
8372 | emit_jump_insn (gen_jump (op1)); | |
8373 | emit_barrier (); | |
8374 | emit_label (op0); | |
956d6950 | 8375 | start_cleanup_deferral (); |
2ac84cfe | 8376 | if (temp != 0 |
3a94c984 | 8377 | && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) |
bbf6f052 RK |
8378 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8379 | else | |
906c4e36 RK |
8380 | expand_expr (TREE_OPERAND (exp, 2), |
8381 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
bbf6f052 RK |
8382 | } |
8383 | ||
956d6950 | 8384 | end_cleanup_deferral (); |
bbf6f052 RK |
8385 | |
8386 | emit_queue (); | |
8387 | emit_label (op1); | |
8388 | OK_DEFER_POP; | |
5dab5552 | 8389 | |
bbf6f052 RK |
8390 | return temp; |
8391 | } | |
8392 | ||
8393 | case TARGET_EXPR: | |
8394 | { | |
8395 | /* Something needs to be initialized, but we didn't know | |
8396 | where that thing was when building the tree. For example, | |
8397 | it could be the return value of a function, or a parameter | |
8398 | to a function which lays down in the stack, or a temporary | |
8399 | variable which must be passed by reference. | |
8400 | ||
8401 | We guarantee that the expression will either be constructed | |
8402 | or copied into our original target. */ | |
8403 | ||
8404 | tree slot = TREE_OPERAND (exp, 0); | |
2a888d4c | 8405 | tree cleanups = NULL_TREE; |
5c062816 | 8406 | tree exp1; |
bbf6f052 RK |
8407 | |
8408 | if (TREE_CODE (slot) != VAR_DECL) | |
8409 | abort (); | |
8410 | ||
9c51f375 RK |
8411 | if (! ignore) |
8412 | target = original_target; | |
8413 | ||
6fbfac92 JM |
8414 | /* Set this here so that if we get a target that refers to a |
8415 | register variable that's already been used, put_reg_into_stack | |
3a94c984 | 8416 | knows that it should fix up those uses. */ |
6fbfac92 JM |
8417 | TREE_USED (slot) = 1; |
8418 | ||
bbf6f052 RK |
8419 | if (target == 0) |
8420 | { | |
19e7881c | 8421 | if (DECL_RTL_SET_P (slot)) |
ac993f4f MS |
8422 | { |
8423 | target = DECL_RTL (slot); | |
5c062816 | 8424 | /* If we have already expanded the slot, so don't do |
ac993f4f | 8425 | it again. (mrs) */ |
5c062816 MS |
8426 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8427 | return target; | |
ac993f4f | 8428 | } |
bbf6f052 RK |
8429 | else |
8430 | { | |
e9a25f70 | 8431 | target = assign_temp (type, 2, 0, 1); |
bbf6f052 RK |
8432 | /* All temp slots at this level must not conflict. */ |
8433 | preserve_temp_slots (target); | |
19e7881c | 8434 | SET_DECL_RTL (slot, target); |
e9a25f70 | 8435 | if (TREE_ADDRESSABLE (slot)) |
4361b41d | 8436 | put_var_into_stack (slot); |
bbf6f052 | 8437 | |
e287fd6e RK |
8438 | /* Since SLOT is not known to the called function |
8439 | to belong to its stack frame, we must build an explicit | |
8440 | cleanup. This case occurs when we must build up a reference | |
8441 | to pass the reference as an argument. In this case, | |
8442 | it is very likely that such a reference need not be | |
8443 | built here. */ | |
8444 | ||
8445 | if (TREE_OPERAND (exp, 2) == 0) | |
8446 | TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot); | |
2a888d4c | 8447 | cleanups = TREE_OPERAND (exp, 2); |
e287fd6e | 8448 | } |
bbf6f052 RK |
8449 | } |
8450 | else | |
8451 | { | |
8452 | /* This case does occur, when expanding a parameter which | |
8453 | needs to be constructed on the stack. The target | |
8454 | is the actual stack address that we want to initialize. | |
8455 | The function we call will perform the cleanup in this case. */ | |
8456 | ||
8c042b47 RS |
8457 | /* If we have already assigned it space, use that space, |
8458 | not target that we were passed in, as our target | |
8459 | parameter is only a hint. */ | |
19e7881c | 8460 | if (DECL_RTL_SET_P (slot)) |
3a94c984 KH |
8461 | { |
8462 | target = DECL_RTL (slot); | |
8463 | /* If we have already expanded the slot, so don't do | |
8c042b47 | 8464 | it again. (mrs) */ |
3a94c984 KH |
8465 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8466 | return target; | |
8c042b47 | 8467 | } |
21002281 JW |
8468 | else |
8469 | { | |
19e7881c | 8470 | SET_DECL_RTL (slot, target); |
21002281 JW |
8471 | /* If we must have an addressable slot, then make sure that |
8472 | the RTL that we just stored in slot is OK. */ | |
8473 | if (TREE_ADDRESSABLE (slot)) | |
4361b41d | 8474 | put_var_into_stack (slot); |
21002281 | 8475 | } |
bbf6f052 RK |
8476 | } |
8477 | ||
4847c938 | 8478 | exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1); |
5c062816 MS |
8479 | /* Mark it as expanded. */ |
8480 | TREE_OPERAND (exp, 1) = NULL_TREE; | |
8481 | ||
41531e5b | 8482 | store_expr (exp1, target, 0); |
61d6b1cc | 8483 | |
e976b8b2 | 8484 | expand_decl_cleanup (NULL_TREE, cleanups); |
3a94c984 | 8485 | |
41531e5b | 8486 | return target; |
bbf6f052 RK |
8487 | } |
8488 | ||
8489 | case INIT_EXPR: | |
8490 | { | |
8491 | tree lhs = TREE_OPERAND (exp, 0); | |
8492 | tree rhs = TREE_OPERAND (exp, 1); | |
8493 | tree noncopied_parts = 0; | |
8494 | tree lhs_type = TREE_TYPE (lhs); | |
8495 | ||
8496 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); | |
8497 | if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs)) | |
b4e3fabb RK |
8498 | noncopied_parts |
8499 | = init_noncopied_parts (stabilize_reference (lhs), | |
8500 | TYPE_NONCOPIED_PARTS (lhs_type)); | |
8501 | ||
bbf6f052 RK |
8502 | while (noncopied_parts != 0) |
8503 | { | |
8504 | expand_assignment (TREE_VALUE (noncopied_parts), | |
8505 | TREE_PURPOSE (noncopied_parts), 0, 0); | |
8506 | noncopied_parts = TREE_CHAIN (noncopied_parts); | |
8507 | } | |
8508 | return temp; | |
8509 | } | |
8510 | ||
8511 | case MODIFY_EXPR: | |
8512 | { | |
8513 | /* If lhs is complex, expand calls in rhs before computing it. | |
8514 | That's so we don't compute a pointer and save it over a call. | |
8515 | If lhs is simple, compute it first so we can give it as a | |
8516 | target if the rhs is just a call. This avoids an extra temp and copy | |
8517 | and that prevents a partial-subsumption which makes bad code. | |
8518 | Actually we could treat component_ref's of vars like vars. */ | |
8519 | ||
8520 | tree lhs = TREE_OPERAND (exp, 0); | |
8521 | tree rhs = TREE_OPERAND (exp, 1); | |
8522 | tree noncopied_parts = 0; | |
8523 | tree lhs_type = TREE_TYPE (lhs); | |
8524 | ||
8525 | temp = 0; | |
8526 | ||
bbf6f052 RK |
8527 | /* Check for |= or &= of a bitfield of size one into another bitfield |
8528 | of size 1. In this case, (unless we need the result of the | |
8529 | assignment) we can do this more efficiently with a | |
8530 | test followed by an assignment, if necessary. | |
8531 | ||
8532 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
8533 | things change so we do, this code should be enhanced to | |
8534 | support it. */ | |
8535 | if (ignore | |
8536 | && TREE_CODE (lhs) == COMPONENT_REF | |
8537 | && (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8538 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
8539 | && TREE_OPERAND (rhs, 0) == lhs | |
8540 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
05bccae2 RK |
8541 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
8542 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
bbf6f052 RK |
8543 | { |
8544 | rtx label = gen_label_rtx (); | |
8545 | ||
8546 | do_jump (TREE_OPERAND (rhs, 1), | |
8547 | TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0, | |
8548 | TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0); | |
8549 | expand_assignment (lhs, convert (TREE_TYPE (rhs), | |
8550 | (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8551 | ? integer_one_node | |
8552 | : integer_zero_node)), | |
8553 | 0, 0); | |
e7c33f54 | 8554 | do_pending_stack_adjust (); |
bbf6f052 RK |
8555 | emit_label (label); |
8556 | return const0_rtx; | |
8557 | } | |
8558 | ||
8559 | if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 | |
8560 | && ! (fixed_type_p (lhs) && fixed_type_p (rhs))) | |
b4e3fabb RK |
8561 | noncopied_parts |
8562 | = save_noncopied_parts (stabilize_reference (lhs), | |
8563 | TYPE_NONCOPIED_PARTS (lhs_type)); | |
bbf6f052 RK |
8564 | |
8565 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); | |
8566 | while (noncopied_parts != 0) | |
8567 | { | |
8568 | expand_assignment (TREE_PURPOSE (noncopied_parts), | |
8569 | TREE_VALUE (noncopied_parts), 0, 0); | |
8570 | noncopied_parts = TREE_CHAIN (noncopied_parts); | |
8571 | } | |
8572 | return temp; | |
8573 | } | |
8574 | ||
6e7f84a7 APB |
8575 | case RETURN_EXPR: |
8576 | if (!TREE_OPERAND (exp, 0)) | |
8577 | expand_null_return (); | |
8578 | else | |
8579 | expand_return (TREE_OPERAND (exp, 0)); | |
8580 | return const0_rtx; | |
8581 | ||
bbf6f052 RK |
8582 | case PREINCREMENT_EXPR: |
8583 | case PREDECREMENT_EXPR: | |
7b8b9722 | 8584 | return expand_increment (exp, 0, ignore); |
bbf6f052 RK |
8585 | |
8586 | case POSTINCREMENT_EXPR: | |
8587 | case POSTDECREMENT_EXPR: | |
8588 | /* Faster to treat as pre-increment if result is not used. */ | |
7b8b9722 | 8589 | return expand_increment (exp, ! ignore, ignore); |
bbf6f052 RK |
8590 | |
8591 | case ADDR_EXPR: | |
987c71d9 | 8592 | /* If nonzero, TEMP will be set to the address of something that might |
0f41302f | 8593 | be a MEM corresponding to a stack slot. */ |
987c71d9 RK |
8594 | temp = 0; |
8595 | ||
bbf6f052 RK |
8596 | /* Are we taking the address of a nested function? */ |
8597 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL | |
38ee6ed9 | 8598 | && decl_function_context (TREE_OPERAND (exp, 0)) != 0 |
e5e809f4 JL |
8599 | && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)) |
8600 | && ! TREE_STATIC (exp)) | |
bbf6f052 RK |
8601 | { |
8602 | op0 = trampoline_address (TREE_OPERAND (exp, 0)); | |
8603 | op0 = force_operand (op0, target); | |
8604 | } | |
682ba3a6 RK |
8605 | /* If we are taking the address of something erroneous, just |
8606 | return a zero. */ | |
8607 | else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) | |
8608 | return const0_rtx; | |
bbf6f052 RK |
8609 | else |
8610 | { | |
e287fd6e RK |
8611 | /* We make sure to pass const0_rtx down if we came in with |
8612 | ignore set, to avoid doing the cleanups twice for something. */ | |
8613 | op0 = expand_expr (TREE_OPERAND (exp, 0), | |
8614 | ignore ? const0_rtx : NULL_RTX, VOIDmode, | |
bbf6f052 RK |
8615 | (modifier == EXPAND_INITIALIZER |
8616 | ? modifier : EXPAND_CONST_ADDRESS)); | |
896102d0 | 8617 | |
119af78a RK |
8618 | /* If we are going to ignore the result, OP0 will have been set |
8619 | to const0_rtx, so just return it. Don't get confused and | |
8620 | think we are taking the address of the constant. */ | |
8621 | if (ignore) | |
8622 | return op0; | |
8623 | ||
73b7f58c BS |
8624 | /* Pass 1 for MODIFY, so that protect_from_queue doesn't get |
8625 | clever and returns a REG when given a MEM. */ | |
8626 | op0 = protect_from_queue (op0, 1); | |
3539e816 | 8627 | |
c5c76735 JL |
8628 | /* We would like the object in memory. If it is a constant, we can |
8629 | have it be statically allocated into memory. For a non-constant, | |
8630 | we need to allocate some memory and store the value into it. */ | |
896102d0 RK |
8631 | |
8632 | if (CONSTANT_P (op0)) | |
8633 | op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8634 | op0); | |
987c71d9 | 8635 | else if (GET_CODE (op0) == MEM) |
af5b53ed RK |
8636 | { |
8637 | mark_temp_addr_taken (op0); | |
8638 | temp = XEXP (op0, 0); | |
8639 | } | |
896102d0 | 8640 | |
682ba3a6 | 8641 | else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
df6018fd JJ |
8642 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF |
8643 | || GET_CODE (op0) == PARALLEL) | |
896102d0 RK |
8644 | { |
8645 | /* If this object is in a register, it must be not | |
0f41302f | 8646 | be BLKmode. */ |
896102d0 | 8647 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
1da68f56 RK |
8648 | tree nt = build_qualified_type (inner_type, |
8649 | (TYPE_QUALS (inner_type) | |
8650 | | TYPE_QUAL_CONST)); | |
8651 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
896102d0 | 8652 | |
7a0b7b9a | 8653 | mark_temp_addr_taken (memloc); |
df6018fd JJ |
8654 | if (GET_CODE (op0) == PARALLEL) |
8655 | /* Handle calls that pass values in multiple non-contiguous | |
8656 | locations. The Irix 6 ABI has examples of this. */ | |
8657 | emit_group_store (memloc, op0, | |
8658 | int_size_in_bytes (inner_type), | |
8659 | TYPE_ALIGN (inner_type)); | |
8660 | else | |
8661 | emit_move_insn (memloc, op0); | |
896102d0 RK |
8662 | op0 = memloc; |
8663 | } | |
8664 | ||
bbf6f052 RK |
8665 | if (GET_CODE (op0) != MEM) |
8666 | abort (); | |
3a94c984 | 8667 | |
bbf6f052 | 8668 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
88f63c77 RK |
8669 | { |
8670 | temp = XEXP (op0, 0); | |
8671 | #ifdef POINTERS_EXTEND_UNSIGNED | |
8672 | if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode | |
8673 | && mode == ptr_mode) | |
9fcfcce7 | 8674 | temp = convert_memory_address (ptr_mode, temp); |
88f63c77 RK |
8675 | #endif |
8676 | return temp; | |
8677 | } | |
987c71d9 | 8678 | |
bbf6f052 RK |
8679 | op0 = force_operand (XEXP (op0, 0), target); |
8680 | } | |
987c71d9 | 8681 | |
bbf6f052 | 8682 | if (flag_force_addr && GET_CODE (op0) != REG) |
987c71d9 RK |
8683 | op0 = force_reg (Pmode, op0); |
8684 | ||
dc6d66b3 RK |
8685 | if (GET_CODE (op0) == REG |
8686 | && ! REG_USERVAR_P (op0)) | |
bdb429a5 | 8687 | mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type))); |
987c71d9 RK |
8688 | |
8689 | /* If we might have had a temp slot, add an equivalent address | |
8690 | for it. */ | |
8691 | if (temp != 0) | |
8692 | update_temp_slot_address (temp, op0); | |
8693 | ||
88f63c77 RK |
8694 | #ifdef POINTERS_EXTEND_UNSIGNED |
8695 | if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode | |
8696 | && mode == ptr_mode) | |
9fcfcce7 | 8697 | op0 = convert_memory_address (ptr_mode, op0); |
88f63c77 RK |
8698 | #endif |
8699 | ||
bbf6f052 RK |
8700 | return op0; |
8701 | ||
8702 | case ENTRY_VALUE_EXPR: | |
8703 | abort (); | |
8704 | ||
7308a047 RS |
8705 | /* COMPLEX type for Extended Pascal & Fortran */ |
8706 | case COMPLEX_EXPR: | |
8707 | { | |
8708 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
6551fa4d | 8709 | rtx insns; |
7308a047 RS |
8710 | |
8711 | /* Get the rtx code of the operands. */ | |
8712 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8713 | op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0); | |
8714 | ||
8715 | if (! target) | |
8716 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
8717 | ||
6551fa4d | 8718 | start_sequence (); |
7308a047 RS |
8719 | |
8720 | /* Move the real (op0) and imaginary (op1) parts to their location. */ | |
2d7050fd RS |
8721 | emit_move_insn (gen_realpart (mode, target), op0); |
8722 | emit_move_insn (gen_imagpart (mode, target), op1); | |
7308a047 | 8723 | |
6551fa4d JW |
8724 | insns = get_insns (); |
8725 | end_sequence (); | |
8726 | ||
7308a047 | 8727 | /* Complex construction should appear as a single unit. */ |
6551fa4d JW |
8728 | /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS, |
8729 | each with a separate pseudo as destination. | |
8730 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8731 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8732 | emit_no_conflict_block (insns, target, op0, op1, NULL_RTX); |
8733 | else | |
8734 | emit_insns (insns); | |
7308a047 RS |
8735 | |
8736 | return target; | |
8737 | } | |
8738 | ||
8739 | case REALPART_EXPR: | |
2d7050fd RS |
8740 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8741 | return gen_realpart (mode, op0); | |
3a94c984 | 8742 | |
7308a047 | 8743 | case IMAGPART_EXPR: |
2d7050fd RS |
8744 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
8745 | return gen_imagpart (mode, op0); | |
7308a047 RS |
8746 | |
8747 | case CONJ_EXPR: | |
8748 | { | |
62acb978 | 8749 | enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
7308a047 | 8750 | rtx imag_t; |
6551fa4d | 8751 | rtx insns; |
3a94c984 KH |
8752 | |
8753 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
7308a047 RS |
8754 | |
8755 | if (! target) | |
d6a5ac33 | 8756 | target = gen_reg_rtx (mode); |
3a94c984 | 8757 | |
6551fa4d | 8758 | start_sequence (); |
7308a047 RS |
8759 | |
8760 | /* Store the realpart and the negated imagpart to target. */ | |
62acb978 RK |
8761 | emit_move_insn (gen_realpart (partmode, target), |
8762 | gen_realpart (partmode, op0)); | |
7308a047 | 8763 | |
62acb978 | 8764 | imag_t = gen_imagpart (partmode, target); |
91ce572a CC |
8765 | temp = expand_unop (partmode, |
8766 | ! unsignedp && flag_trapv | |
8767 | && (GET_MODE_CLASS(partmode) == MODE_INT) | |
8768 | ? negv_optab : neg_optab, | |
3a94c984 | 8769 | gen_imagpart (partmode, op0), imag_t, 0); |
7308a047 RS |
8770 | if (temp != imag_t) |
8771 | emit_move_insn (imag_t, temp); | |
8772 | ||
6551fa4d JW |
8773 | insns = get_insns (); |
8774 | end_sequence (); | |
8775 | ||
3a94c984 | 8776 | /* Conjugate should appear as a single unit |
d6a5ac33 | 8777 | If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS, |
6551fa4d JW |
8778 | each with a separate pseudo as destination. |
8779 | It's not correct for flow to treat them as a unit. */ | |
6d6e61ce | 8780 | if (GET_CODE (target) != CONCAT) |
6551fa4d JW |
8781 | emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX); |
8782 | else | |
8783 | emit_insns (insns); | |
7308a047 RS |
8784 | |
8785 | return target; | |
8786 | } | |
8787 | ||
e976b8b2 MS |
8788 | case TRY_CATCH_EXPR: |
8789 | { | |
8790 | tree handler = TREE_OPERAND (exp, 1); | |
8791 | ||
8792 | expand_eh_region_start (); | |
8793 | ||
8794 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
8795 | ||
52a11cbf | 8796 | expand_eh_region_end_cleanup (handler); |
e976b8b2 MS |
8797 | |
8798 | return op0; | |
8799 | } | |
8800 | ||
b335b813 PB |
8801 | case TRY_FINALLY_EXPR: |
8802 | { | |
8803 | tree try_block = TREE_OPERAND (exp, 0); | |
8804 | tree finally_block = TREE_OPERAND (exp, 1); | |
8805 | rtx finally_label = gen_label_rtx (); | |
8806 | rtx done_label = gen_label_rtx (); | |
8807 | rtx return_link = gen_reg_rtx (Pmode); | |
8808 | tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node, | |
8809 | (tree) finally_label, (tree) return_link); | |
8810 | TREE_SIDE_EFFECTS (cleanup) = 1; | |
8811 | ||
8812 | /* Start a new binding layer that will keep track of all cleanup | |
8813 | actions to be performed. */ | |
8e91754e | 8814 | expand_start_bindings (2); |
b335b813 PB |
8815 | |
8816 | target_temp_slot_level = temp_slot_level; | |
8817 | ||
8818 | expand_decl_cleanup (NULL_TREE, cleanup); | |
8819 | op0 = expand_expr (try_block, target, tmode, modifier); | |
8820 | ||
8821 | preserve_temp_slots (op0); | |
8822 | expand_end_bindings (NULL_TREE, 0, 0); | |
8823 | emit_jump (done_label); | |
8824 | emit_label (finally_label); | |
8825 | expand_expr (finally_block, const0_rtx, VOIDmode, 0); | |
8826 | emit_indirect_jump (return_link); | |
8827 | emit_label (done_label); | |
8828 | return op0; | |
8829 | } | |
8830 | ||
3a94c984 | 8831 | case GOTO_SUBROUTINE_EXPR: |
b335b813 PB |
8832 | { |
8833 | rtx subr = (rtx) TREE_OPERAND (exp, 0); | |
8834 | rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1); | |
8835 | rtx return_address = gen_label_rtx (); | |
3a94c984 KH |
8836 | emit_move_insn (return_link, |
8837 | gen_rtx_LABEL_REF (Pmode, return_address)); | |
b335b813 PB |
8838 | emit_jump (subr); |
8839 | emit_label (return_address); | |
8840 | return const0_rtx; | |
8841 | } | |
8842 | ||
d3707adb RH |
8843 | case VA_ARG_EXPR: |
8844 | return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type); | |
8845 | ||
52a11cbf | 8846 | case EXC_PTR_EXPR: |
86c99549 | 8847 | return get_exception_pointer (cfun); |
52a11cbf | 8848 | |
bbf6f052 | 8849 | default: |
90764a87 | 8850 | return (*lang_expand_expr) (exp, original_target, tmode, modifier); |
bbf6f052 RK |
8851 | } |
8852 | ||
8853 | /* Here to do an ordinary binary operator, generating an instruction | |
8854 | from the optab already placed in `this_optab'. */ | |
8855 | binop: | |
e5e809f4 | 8856 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
bbf6f052 RK |
8857 | subtarget = 0; |
8858 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
906c4e36 | 8859 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
bbf6f052 RK |
8860 | binop2: |
8861 | temp = expand_binop (mode, this_optab, op0, op1, target, | |
8862 | unsignedp, OPTAB_LIB_WIDEN); | |
8863 | if (temp == 0) | |
8864 | abort (); | |
8865 | return temp; | |
8866 | } | |
b93a436e | 8867 | \f |
14a774a9 RK |
8868 | /* Similar to expand_expr, except that we don't specify a target, target |
8869 | mode, or modifier and we return the alignment of the inner type. This is | |
8870 | used in cases where it is not necessary to align the result to the | |
8871 | alignment of its type as long as we know the alignment of the result, for | |
8872 | example for comparisons of BLKmode values. */ | |
8873 | ||
8874 | static rtx | |
8875 | expand_expr_unaligned (exp, palign) | |
8876 | register tree exp; | |
729a2125 | 8877 | unsigned int *palign; |
14a774a9 RK |
8878 | { |
8879 | register rtx op0; | |
8880 | tree type = TREE_TYPE (exp); | |
8881 | register enum machine_mode mode = TYPE_MODE (type); | |
8882 | ||
8883 | /* Default the alignment we return to that of the type. */ | |
8884 | *palign = TYPE_ALIGN (type); | |
8885 | ||
8886 | /* The only cases in which we do anything special is if the resulting mode | |
8887 | is BLKmode. */ | |
8888 | if (mode != BLKmode) | |
8889 | return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL); | |
8890 | ||
8891 | switch (TREE_CODE (exp)) | |
8892 | { | |
8893 | case CONVERT_EXPR: | |
8894 | case NOP_EXPR: | |
8895 | case NON_LVALUE_EXPR: | |
8896 | /* Conversions between BLKmode values don't change the underlying | |
8897 | alignment or value. */ | |
8898 | if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode) | |
8899 | return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign); | |
8900 | break; | |
8901 | ||
8902 | case ARRAY_REF: | |
8903 | /* Much of the code for this case is copied directly from expand_expr. | |
8904 | We need to duplicate it here because we will do something different | |
8905 | in the fall-through case, so we need to handle the same exceptions | |
8906 | it does. */ | |
8907 | { | |
8908 | tree array = TREE_OPERAND (exp, 0); | |
8909 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
8910 | tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; | |
fed3cef0 | 8911 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
14a774a9 RK |
8912 | HOST_WIDE_INT i; |
8913 | ||
8914 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) | |
8915 | abort (); | |
8916 | ||
8917 | /* Optimize the special-case of a zero lower bound. | |
8918 | ||
8919 | We convert the low_bound to sizetype to avoid some problems | |
8920 | with constant folding. (E.g. suppose the lower bound is 1, | |
8921 | and its mode is QI. Without the conversion, (ARRAY | |
8922 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
fed3cef0 | 8923 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
14a774a9 RK |
8924 | |
8925 | if (! integer_zerop (low_bound)) | |
fed3cef0 | 8926 | index = size_diffop (index, convert (sizetype, low_bound)); |
14a774a9 RK |
8927 | |
8928 | /* If this is a constant index into a constant array, | |
8929 | just get the value from the array. Handle both the cases when | |
8930 | we have an explicit constructor and when our operand is a variable | |
8931 | that was declared const. */ | |
8932 | ||
05bccae2 | 8933 | if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array) |
235783d1 | 8934 | && host_integerp (index, 0) |
3a94c984 | 8935 | && 0 > compare_tree_int (index, |
05bccae2 RK |
8936 | list_length (CONSTRUCTOR_ELTS |
8937 | (TREE_OPERAND (exp, 0))))) | |
14a774a9 | 8938 | { |
05bccae2 RK |
8939 | tree elem; |
8940 | ||
8941 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
235783d1 | 8942 | i = tree_low_cst (index, 0); |
05bccae2 RK |
8943 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) |
8944 | ; | |
8945 | ||
8946 | if (elem) | |
8947 | return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign); | |
14a774a9 | 8948 | } |
3a94c984 | 8949 | |
14a774a9 RK |
8950 | else if (optimize >= 1 |
8951 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) | |
8952 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
8953 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK) | |
8954 | { | |
8955 | if (TREE_CODE (index) == INTEGER_CST) | |
8956 | { | |
8957 | tree init = DECL_INITIAL (array); | |
8958 | ||
14a774a9 RK |
8959 | if (TREE_CODE (init) == CONSTRUCTOR) |
8960 | { | |
05bccae2 RK |
8961 | tree elem; |
8962 | ||
8963 | for (elem = CONSTRUCTOR_ELTS (init); | |
8964 | ! tree_int_cst_equal (TREE_PURPOSE (elem), index); | |
8965 | elem = TREE_CHAIN (elem)) | |
8966 | ; | |
14a774a9 | 8967 | |
14a774a9 RK |
8968 | if (elem) |
8969 | return expand_expr_unaligned (fold (TREE_VALUE (elem)), | |
8970 | palign); | |
8971 | } | |
8972 | } | |
8973 | } | |
8974 | } | |
3a94c984 | 8975 | /* Fall through. */ |
14a774a9 RK |
8976 | |
8977 | case COMPONENT_REF: | |
8978 | case BIT_FIELD_REF: | |
b4e3fabb | 8979 | case ARRAY_RANGE_REF: |
14a774a9 RK |
8980 | /* If the operand is a CONSTRUCTOR, we can just extract the |
8981 | appropriate field if it is present. Don't do this if we have | |
8982 | already written the data since we want to refer to that copy | |
8983 | and varasm.c assumes that's what we'll do. */ | |
b4e3fabb | 8984 | if (TREE_CODE (exp) == COMPONENT_REF |
14a774a9 RK |
8985 | && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR |
8986 | && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0) | |
8987 | { | |
8988 | tree elt; | |
8989 | ||
8990 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
8991 | elt = TREE_CHAIN (elt)) | |
8992 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)) | |
8993 | /* Note that unlike the case in expand_expr, we know this is | |
8994 | BLKmode and hence not an integer. */ | |
8995 | return expand_expr_unaligned (TREE_VALUE (elt), palign); | |
8996 | } | |
8997 | ||
8998 | { | |
8999 | enum machine_mode mode1; | |
770ae6cc | 9000 | HOST_WIDE_INT bitsize, bitpos; |
14a774a9 RK |
9001 | tree offset; |
9002 | int volatilep = 0; | |
729a2125 | 9003 | unsigned int alignment; |
14a774a9 RK |
9004 | int unsignedp; |
9005 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
9006 | &mode1, &unsignedp, &volatilep, | |
9007 | &alignment); | |
9008 | ||
9009 | /* If we got back the original object, something is wrong. Perhaps | |
9010 | we are evaluating an expression too early. In any event, don't | |
9011 | infinitely recurse. */ | |
9012 | if (tem == exp) | |
9013 | abort (); | |
9014 | ||
9015 | op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL); | |
9016 | ||
9017 | /* If this is a constant, put it into a register if it is a | |
9018 | legitimate constant and OFFSET is 0 and memory if it isn't. */ | |
9019 | if (CONSTANT_P (op0)) | |
9020 | { | |
9021 | enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem)); | |
9022 | ||
9023 | if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) | |
9024 | && offset == 0) | |
9025 | op0 = force_reg (inner_mode, op0); | |
9026 | else | |
9027 | op0 = validize_mem (force_const_mem (inner_mode, op0)); | |
9028 | } | |
9029 | ||
9030 | if (offset != 0) | |
9031 | { | |
9032 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); | |
9033 | ||
9034 | /* If this object is in a register, put it into memory. | |
9035 | This case can't occur in C, but can in Ada if we have | |
9036 | unchecked conversion of an expression from a scalar type to | |
9037 | an array or record type. */ | |
9038 | if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG | |
9039 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) | |
9040 | { | |
1da68f56 RK |
9041 | tree nt = build_qualified_type (TREE_TYPE (tem), |
9042 | (TYPE_QUALS (TREE_TYPE (tem)) | |
9043 | | TYPE_QUAL_CONST)); | |
9044 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
14a774a9 RK |
9045 | |
9046 | mark_temp_addr_taken (memloc); | |
9047 | emit_move_insn (memloc, op0); | |
9048 | op0 = memloc; | |
9049 | } | |
9050 | ||
9051 | if (GET_CODE (op0) != MEM) | |
9052 | abort (); | |
9053 | ||
9054 | if (GET_MODE (offset_rtx) != ptr_mode) | |
9055 | { | |
9056 | #ifdef POINTERS_EXTEND_UNSIGNED | |
9057 | offset_rtx = convert_memory_address (ptr_mode, offset_rtx); | |
9058 | #else | |
9059 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
9060 | #endif | |
9061 | } | |
9062 | ||
9063 | op0 = change_address (op0, VOIDmode, | |
9064 | gen_rtx_PLUS (ptr_mode, XEXP (op0, 0), | |
9065 | force_reg (ptr_mode, | |
9066 | offset_rtx))); | |
9067 | } | |
9068 | ||
9069 | /* Don't forget about volatility even if this is a bitfield. */ | |
9070 | if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0)) | |
9071 | { | |
9072 | op0 = copy_rtx (op0); | |
9073 | MEM_VOLATILE_P (op0) = 1; | |
9074 | } | |
9075 | ||
9076 | /* Check the access. */ | |
9077 | if (current_function_check_memory_usage && GET_CODE (op0) == MEM) | |
3a94c984 | 9078 | { |
14a774a9 RK |
9079 | rtx to; |
9080 | int size; | |
9081 | ||
9082 | to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT)); | |
9083 | size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1; | |
9084 | ||
9085 | /* Check the access right of the pointer. */ | |
ea4da9db | 9086 | in_check_memory_usage = 1; |
14a774a9 | 9087 | if (size > BITS_PER_UNIT) |
ebb1b59a BS |
9088 | emit_library_call (chkr_check_addr_libfunc, |
9089 | LCT_CONST_MAKE_BLOCK, VOIDmode, 3, | |
14a774a9 RK |
9090 | to, ptr_mode, GEN_INT (size / BITS_PER_UNIT), |
9091 | TYPE_MODE (sizetype), | |
3a94c984 | 9092 | GEN_INT (MEMORY_USE_RO), |
14a774a9 | 9093 | TYPE_MODE (integer_type_node)); |
ea4da9db | 9094 | in_check_memory_usage = 0; |
14a774a9 RK |
9095 | } |
9096 | ||
a2b99161 RK |
9097 | /* In cases where an aligned union has an unaligned object |
9098 | as a field, we might be extracting a BLKmode value from | |
9099 | an integer-mode (e.g., SImode) object. Handle this case | |
9100 | by doing the extract into an object as wide as the field | |
9101 | (which we know to be the width of a basic mode), then | |
9102 | storing into memory, and changing the mode to BLKmode. | |
9103 | If we ultimately want the address (EXPAND_CONST_ADDRESS or | |
9104 | EXPAND_INITIALIZER), then we must not copy to a temporary. */ | |
9105 | if (mode1 == VOIDmode | |
9106 | || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG | |
e1565e65 | 9107 | || (SLOW_UNALIGNED_ACCESS (mode1, alignment) |
19caa751 | 9108 | && (TYPE_ALIGN (type) > alignment |
a2b99161 RK |
9109 | || bitpos % TYPE_ALIGN (type) != 0))) |
9110 | { | |
9111 | enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1); | |
9112 | ||
9113 | if (ext_mode == BLKmode) | |
9114 | { | |
9115 | /* In this case, BITPOS must start at a byte boundary. */ | |
9116 | if (GET_CODE (op0) != MEM | |
9117 | || bitpos % BITS_PER_UNIT != 0) | |
9118 | abort (); | |
9119 | ||
9120 | op0 = change_address (op0, VOIDmode, | |
9121 | plus_constant (XEXP (op0, 0), | |
9122 | bitpos / BITS_PER_UNIT)); | |
9123 | } | |
9124 | else | |
9125 | { | |
1da68f56 RK |
9126 | tree nt = build_qualified_type (type_for_mode (ext_mode, 0), |
9127 | TYPE_QUAL_CONST); | |
9128 | rtx new = assign_temp (nt, 0, 1, 1); | |
a2b99161 RK |
9129 | |
9130 | op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos, | |
9131 | unsignedp, NULL_RTX, ext_mode, | |
9132 | ext_mode, alignment, | |
9133 | int_size_in_bytes (TREE_TYPE (tem))); | |
9134 | ||
9135 | /* If the result is a record type and BITSIZE is narrower than | |
9136 | the mode of OP0, an integral mode, and this is a big endian | |
9137 | machine, we must put the field into the high-order bits. */ | |
9138 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
9139 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
9140 | && bitsize < GET_MODE_BITSIZE (GET_MODE (op0))) | |
9141 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, | |
9142 | size_int (GET_MODE_BITSIZE | |
9143 | (GET_MODE (op0)) | |
9144 | - bitsize), | |
9145 | op0, 1); | |
9146 | ||
a2b99161 RK |
9147 | emit_move_insn (new, op0); |
9148 | op0 = copy_rtx (new); | |
9149 | PUT_MODE (op0, BLKmode); | |
9150 | } | |
9151 | } | |
9152 | else | |
9153 | /* Get a reference to just this component. */ | |
9154 | op0 = change_address (op0, mode1, | |
3a94c984 KH |
9155 | plus_constant (XEXP (op0, 0), |
9156 | (bitpos / BITS_PER_UNIT))); | |
14a774a9 RK |
9157 | |
9158 | MEM_ALIAS_SET (op0) = get_alias_set (exp); | |
9159 | ||
9160 | /* Adjust the alignment in case the bit position is not | |
9161 | a multiple of the alignment of the inner object. */ | |
9162 | while (bitpos % alignment != 0) | |
9163 | alignment >>= 1; | |
9164 | ||
9165 | if (GET_CODE (XEXP (op0, 0)) == REG) | |
bdb429a5 | 9166 | mark_reg_pointer (XEXP (op0, 0), alignment); |
14a774a9 RK |
9167 | |
9168 | MEM_IN_STRUCT_P (op0) = 1; | |
9169 | MEM_VOLATILE_P (op0) |= volatilep; | |
9170 | ||
9171 | *palign = alignment; | |
9172 | return op0; | |
9173 | } | |
9174 | ||
9175 | default: | |
9176 | break; | |
9177 | ||
9178 | } | |
9179 | ||
9180 | return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL); | |
9181 | } | |
9182 | \f | |
fed3cef0 RK |
9183 | /* Return the tree node if a ARG corresponds to a string constant or zero |
9184 | if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset | |
9185 | in bytes within the string that ARG is accessing. The type of the | |
9186 | offset will be `sizetype'. */ | |
b93a436e | 9187 | |
28f4ec01 | 9188 | tree |
b93a436e JL |
9189 | string_constant (arg, ptr_offset) |
9190 | tree arg; | |
9191 | tree *ptr_offset; | |
9192 | { | |
9193 | STRIP_NOPS (arg); | |
9194 | ||
9195 | if (TREE_CODE (arg) == ADDR_EXPR | |
9196 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) | |
9197 | { | |
fed3cef0 | 9198 | *ptr_offset = size_zero_node; |
b93a436e JL |
9199 | return TREE_OPERAND (arg, 0); |
9200 | } | |
9201 | else if (TREE_CODE (arg) == PLUS_EXPR) | |
9202 | { | |
9203 | tree arg0 = TREE_OPERAND (arg, 0); | |
9204 | tree arg1 = TREE_OPERAND (arg, 1); | |
9205 | ||
9206 | STRIP_NOPS (arg0); | |
9207 | STRIP_NOPS (arg1); | |
9208 | ||
9209 | if (TREE_CODE (arg0) == ADDR_EXPR | |
9210 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) | |
bbf6f052 | 9211 | { |
fed3cef0 | 9212 | *ptr_offset = convert (sizetype, arg1); |
b93a436e | 9213 | return TREE_OPERAND (arg0, 0); |
bbf6f052 | 9214 | } |
b93a436e JL |
9215 | else if (TREE_CODE (arg1) == ADDR_EXPR |
9216 | && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) | |
bbf6f052 | 9217 | { |
fed3cef0 | 9218 | *ptr_offset = convert (sizetype, arg0); |
b93a436e | 9219 | return TREE_OPERAND (arg1, 0); |
bbf6f052 | 9220 | } |
b93a436e | 9221 | } |
ca695ac9 | 9222 | |
b93a436e JL |
9223 | return 0; |
9224 | } | |
ca695ac9 | 9225 | \f |
b93a436e JL |
9226 | /* Expand code for a post- or pre- increment or decrement |
9227 | and return the RTX for the result. | |
9228 | POST is 1 for postinc/decrements and 0 for preinc/decrements. */ | |
1499e0a8 | 9229 | |
b93a436e JL |
9230 | static rtx |
9231 | expand_increment (exp, post, ignore) | |
9232 | register tree exp; | |
9233 | int post, ignore; | |
ca695ac9 | 9234 | { |
b93a436e JL |
9235 | register rtx op0, op1; |
9236 | register rtx temp, value; | |
9237 | register tree incremented = TREE_OPERAND (exp, 0); | |
9238 | optab this_optab = add_optab; | |
9239 | int icode; | |
9240 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
9241 | int op0_is_copy = 0; | |
9242 | int single_insn = 0; | |
9243 | /* 1 means we can't store into OP0 directly, | |
9244 | because it is a subreg narrower than a word, | |
9245 | and we don't dare clobber the rest of the word. */ | |
9246 | int bad_subreg = 0; | |
1499e0a8 | 9247 | |
b93a436e JL |
9248 | /* Stabilize any component ref that might need to be |
9249 | evaluated more than once below. */ | |
9250 | if (!post | |
9251 | || TREE_CODE (incremented) == BIT_FIELD_REF | |
9252 | || (TREE_CODE (incremented) == COMPONENT_REF | |
9253 | && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF | |
9254 | || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) | |
9255 | incremented = stabilize_reference (incremented); | |
9256 | /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost | |
9257 | ones into save exprs so that they don't accidentally get evaluated | |
9258 | more than once by the code below. */ | |
9259 | if (TREE_CODE (incremented) == PREINCREMENT_EXPR | |
9260 | || TREE_CODE (incremented) == PREDECREMENT_EXPR) | |
9261 | incremented = save_expr (incremented); | |
e9a25f70 | 9262 | |
b93a436e JL |
9263 | /* Compute the operands as RTX. |
9264 | Note whether OP0 is the actual lvalue or a copy of it: | |
9265 | I believe it is a copy iff it is a register or subreg | |
9266 | and insns were generated in computing it. */ | |
e9a25f70 | 9267 | |
b93a436e JL |
9268 | temp = get_last_insn (); |
9269 | op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW); | |
e9a25f70 | 9270 | |
b93a436e JL |
9271 | /* If OP0 is a SUBREG made for a promoted variable, we cannot increment |
9272 | in place but instead must do sign- or zero-extension during assignment, | |
9273 | so we copy it into a new register and let the code below use it as | |
9274 | a copy. | |
e9a25f70 | 9275 | |
b93a436e JL |
9276 | Note that we can safely modify this SUBREG since it is know not to be |
9277 | shared (it was made by the expand_expr call above). */ | |
9278 | ||
9279 | if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) | |
9280 | { | |
9281 | if (post) | |
9282 | SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); | |
9283 | else | |
9284 | bad_subreg = 1; | |
9285 | } | |
9286 | else if (GET_CODE (op0) == SUBREG | |
9287 | && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD) | |
9288 | { | |
9289 | /* We cannot increment this SUBREG in place. If we are | |
9290 | post-incrementing, get a copy of the old value. Otherwise, | |
9291 | just mark that we cannot increment in place. */ | |
9292 | if (post) | |
9293 | op0 = copy_to_reg (op0); | |
9294 | else | |
9295 | bad_subreg = 1; | |
e9a25f70 JL |
9296 | } |
9297 | ||
b93a436e JL |
9298 | op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) |
9299 | && temp != get_last_insn ()); | |
9300 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, | |
9301 | EXPAND_MEMORY_USE_BAD); | |
1499e0a8 | 9302 | |
b93a436e JL |
9303 | /* Decide whether incrementing or decrementing. */ |
9304 | if (TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9305 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9306 | this_optab = sub_optab; | |
9307 | ||
9308 | /* Convert decrement by a constant into a negative increment. */ | |
9309 | if (this_optab == sub_optab | |
9310 | && GET_CODE (op1) == CONST_INT) | |
ca695ac9 | 9311 | { |
3a94c984 | 9312 | op1 = GEN_INT (-INTVAL (op1)); |
b93a436e | 9313 | this_optab = add_optab; |
ca695ac9 | 9314 | } |
1499e0a8 | 9315 | |
91ce572a CC |
9316 | if (TYPE_TRAP_SIGNED (TREE_TYPE (exp))) |
9317 | this_optab = this_optab == add_optab ? addv_optab : subv_optab; | |
9318 | ||
b93a436e JL |
9319 | /* For a preincrement, see if we can do this with a single instruction. */ |
9320 | if (!post) | |
9321 | { | |
9322 | icode = (int) this_optab->handlers[(int) mode].insn_code; | |
9323 | if (icode != (int) CODE_FOR_nothing | |
9324 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9325 | of the insn we want to queue. */ | |
a995e389 RH |
9326 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9327 | && (*insn_data[icode].operand[1].predicate) (op0, mode) | |
9328 | && (*insn_data[icode].operand[2].predicate) (op1, mode)) | |
b93a436e JL |
9329 | single_insn = 1; |
9330 | } | |
bbf6f052 | 9331 | |
b93a436e JL |
9332 | /* If OP0 is not the actual lvalue, but rather a copy in a register, |
9333 | then we cannot just increment OP0. We must therefore contrive to | |
9334 | increment the original value. Then, for postincrement, we can return | |
9335 | OP0 since it is a copy of the old value. For preincrement, expand here | |
9336 | unless we can do it with a single insn. | |
bbf6f052 | 9337 | |
b93a436e JL |
9338 | Likewise if storing directly into OP0 would clobber high bits |
9339 | we need to preserve (bad_subreg). */ | |
9340 | if (op0_is_copy || (!post && !single_insn) || bad_subreg) | |
a358cee0 | 9341 | { |
b93a436e JL |
9342 | /* This is the easiest way to increment the value wherever it is. |
9343 | Problems with multiple evaluation of INCREMENTED are prevented | |
9344 | because either (1) it is a component_ref or preincrement, | |
9345 | in which case it was stabilized above, or (2) it is an array_ref | |
9346 | with constant index in an array in a register, which is | |
9347 | safe to reevaluate. */ | |
9348 | tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9349 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9350 | ? MINUS_EXPR : PLUS_EXPR), | |
9351 | TREE_TYPE (exp), | |
9352 | incremented, | |
9353 | TREE_OPERAND (exp, 1)); | |
a358cee0 | 9354 | |
b93a436e JL |
9355 | while (TREE_CODE (incremented) == NOP_EXPR |
9356 | || TREE_CODE (incremented) == CONVERT_EXPR) | |
9357 | { | |
9358 | newexp = convert (TREE_TYPE (incremented), newexp); | |
9359 | incremented = TREE_OPERAND (incremented, 0); | |
9360 | } | |
bbf6f052 | 9361 | |
b93a436e JL |
9362 | temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0); |
9363 | return post ? op0 : temp; | |
9364 | } | |
bbf6f052 | 9365 | |
b93a436e JL |
9366 | if (post) |
9367 | { | |
9368 | /* We have a true reference to the value in OP0. | |
9369 | If there is an insn to add or subtract in this mode, queue it. | |
9370 | Queueing the increment insn avoids the register shuffling | |
9371 | that often results if we must increment now and first save | |
9372 | the old value for subsequent use. */ | |
bbf6f052 | 9373 | |
b93a436e JL |
9374 | #if 0 /* Turned off to avoid making extra insn for indexed memref. */ |
9375 | op0 = stabilize (op0); | |
9376 | #endif | |
41dfd40c | 9377 | |
b93a436e JL |
9378 | icode = (int) this_optab->handlers[(int) mode].insn_code; |
9379 | if (icode != (int) CODE_FOR_nothing | |
9380 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9381 | of the insn we want to queue. */ | |
a995e389 RH |
9382 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9383 | && (*insn_data[icode].operand[1].predicate) (op0, mode)) | |
b93a436e | 9384 | { |
a995e389 | 9385 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9386 | op1 = force_reg (mode, op1); |
bbf6f052 | 9387 | |
b93a436e JL |
9388 | return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); |
9389 | } | |
9390 | if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM) | |
9391 | { | |
9392 | rtx addr = (general_operand (XEXP (op0, 0), mode) | |
9393 | ? force_reg (Pmode, XEXP (op0, 0)) | |
9394 | : copy_to_reg (XEXP (op0, 0))); | |
9395 | rtx temp, result; | |
ca695ac9 | 9396 | |
b93a436e JL |
9397 | op0 = change_address (op0, VOIDmode, addr); |
9398 | temp = force_reg (GET_MODE (op0), op0); | |
a995e389 | 9399 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b93a436e | 9400 | op1 = force_reg (mode, op1); |
ca695ac9 | 9401 | |
b93a436e JL |
9402 | /* The increment queue is LIFO, thus we have to `queue' |
9403 | the instructions in reverse order. */ | |
9404 | enqueue_insn (op0, gen_move_insn (op0, temp)); | |
9405 | result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1)); | |
9406 | return result; | |
bbf6f052 RK |
9407 | } |
9408 | } | |
ca695ac9 | 9409 | |
b93a436e JL |
9410 | /* Preincrement, or we can't increment with one simple insn. */ |
9411 | if (post) | |
9412 | /* Save a copy of the value before inc or dec, to return it later. */ | |
9413 | temp = value = copy_to_reg (op0); | |
9414 | else | |
9415 | /* Arrange to return the incremented value. */ | |
9416 | /* Copy the rtx because expand_binop will protect from the queue, | |
9417 | and the results of that would be invalid for us to return | |
9418 | if our caller does emit_queue before using our result. */ | |
9419 | temp = copy_rtx (value = op0); | |
bbf6f052 | 9420 | |
b93a436e JL |
9421 | /* Increment however we can. */ |
9422 | op1 = expand_binop (mode, this_optab, value, op1, | |
3a94c984 | 9423 | current_function_check_memory_usage ? NULL_RTX : op0, |
b93a436e JL |
9424 | TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); |
9425 | /* Make sure the value is stored into OP0. */ | |
9426 | if (op1 != op0) | |
9427 | emit_move_insn (op0, op1); | |
5718612f | 9428 | |
b93a436e JL |
9429 | return temp; |
9430 | } | |
9431 | \f | |
b93a436e JL |
9432 | /* At the start of a function, record that we have no previously-pushed |
9433 | arguments waiting to be popped. */ | |
bbf6f052 | 9434 | |
b93a436e JL |
9435 | void |
9436 | init_pending_stack_adjust () | |
9437 | { | |
9438 | pending_stack_adjust = 0; | |
9439 | } | |
bbf6f052 | 9440 | |
b93a436e | 9441 | /* When exiting from function, if safe, clear out any pending stack adjust |
060fbabf JL |
9442 | so the adjustment won't get done. |
9443 | ||
9444 | Note, if the current function calls alloca, then it must have a | |
9445 | frame pointer regardless of the value of flag_omit_frame_pointer. */ | |
bbf6f052 | 9446 | |
b93a436e JL |
9447 | void |
9448 | clear_pending_stack_adjust () | |
9449 | { | |
9450 | #ifdef EXIT_IGNORE_STACK | |
9451 | if (optimize > 0 | |
060fbabf JL |
9452 | && (! flag_omit_frame_pointer || current_function_calls_alloca) |
9453 | && EXIT_IGNORE_STACK | |
b93a436e JL |
9454 | && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) |
9455 | && ! flag_inline_functions) | |
1503a7ec JH |
9456 | { |
9457 | stack_pointer_delta -= pending_stack_adjust, | |
9458 | pending_stack_adjust = 0; | |
9459 | } | |
b93a436e JL |
9460 | #endif |
9461 | } | |
bbf6f052 | 9462 | |
b93a436e JL |
9463 | /* Pop any previously-pushed arguments that have not been popped yet. */ |
9464 | ||
9465 | void | |
9466 | do_pending_stack_adjust () | |
9467 | { | |
9468 | if (inhibit_defer_pop == 0) | |
ca695ac9 | 9469 | { |
b93a436e JL |
9470 | if (pending_stack_adjust != 0) |
9471 | adjust_stack (GEN_INT (pending_stack_adjust)); | |
9472 | pending_stack_adjust = 0; | |
bbf6f052 | 9473 | } |
bbf6f052 RK |
9474 | } |
9475 | \f | |
b93a436e | 9476 | /* Expand conditional expressions. */ |
bbf6f052 | 9477 | |
b93a436e JL |
9478 | /* Generate code to evaluate EXP and jump to LABEL if the value is zero. |
9479 | LABEL is an rtx of code CODE_LABEL, in this function and all the | |
9480 | functions here. */ | |
bbf6f052 | 9481 | |
b93a436e JL |
9482 | void |
9483 | jumpifnot (exp, label) | |
ca695ac9 | 9484 | tree exp; |
b93a436e | 9485 | rtx label; |
bbf6f052 | 9486 | { |
b93a436e JL |
9487 | do_jump (exp, label, NULL_RTX); |
9488 | } | |
bbf6f052 | 9489 | |
b93a436e | 9490 | /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ |
ca695ac9 | 9491 | |
b93a436e JL |
9492 | void |
9493 | jumpif (exp, label) | |
9494 | tree exp; | |
9495 | rtx label; | |
9496 | { | |
9497 | do_jump (exp, NULL_RTX, label); | |
9498 | } | |
ca695ac9 | 9499 | |
b93a436e JL |
9500 | /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if |
9501 | the result is zero, or IF_TRUE_LABEL if the result is one. | |
9502 | Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, | |
9503 | meaning fall through in that case. | |
ca695ac9 | 9504 | |
b93a436e JL |
9505 | do_jump always does any pending stack adjust except when it does not |
9506 | actually perform a jump. An example where there is no jump | |
9507 | is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. | |
ca695ac9 | 9508 | |
b93a436e JL |
9509 | This function is responsible for optimizing cases such as |
9510 | &&, || and comparison operators in EXP. */ | |
5718612f | 9511 | |
b93a436e JL |
9512 | void |
9513 | do_jump (exp, if_false_label, if_true_label) | |
9514 | tree exp; | |
9515 | rtx if_false_label, if_true_label; | |
9516 | { | |
9517 | register enum tree_code code = TREE_CODE (exp); | |
9518 | /* Some cases need to create a label to jump to | |
9519 | in order to properly fall through. | |
9520 | These cases set DROP_THROUGH_LABEL nonzero. */ | |
9521 | rtx drop_through_label = 0; | |
9522 | rtx temp; | |
b93a436e JL |
9523 | int i; |
9524 | tree type; | |
9525 | enum machine_mode mode; | |
ca695ac9 | 9526 | |
dbecbbe4 JL |
9527 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
9528 | check_max_integer_computation_mode (exp); | |
9529 | #endif | |
9530 | ||
b93a436e | 9531 | emit_queue (); |
ca695ac9 | 9532 | |
b93a436e | 9533 | switch (code) |
ca695ac9 | 9534 | { |
b93a436e | 9535 | case ERROR_MARK: |
ca695ac9 | 9536 | break; |
bbf6f052 | 9537 | |
b93a436e JL |
9538 | case INTEGER_CST: |
9539 | temp = integer_zerop (exp) ? if_false_label : if_true_label; | |
9540 | if (temp) | |
9541 | emit_jump (temp); | |
9542 | break; | |
bbf6f052 | 9543 | |
b93a436e JL |
9544 | #if 0 |
9545 | /* This is not true with #pragma weak */ | |
9546 | case ADDR_EXPR: | |
9547 | /* The address of something can never be zero. */ | |
9548 | if (if_true_label) | |
9549 | emit_jump (if_true_label); | |
9550 | break; | |
9551 | #endif | |
bbf6f052 | 9552 | |
b93a436e JL |
9553 | case NOP_EXPR: |
9554 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF | |
9555 | || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF | |
b4e3fabb RK |
9556 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF |
9557 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) | |
b93a436e JL |
9558 | goto normal; |
9559 | case CONVERT_EXPR: | |
9560 | /* If we are narrowing the operand, we have to do the compare in the | |
9561 | narrower mode. */ | |
9562 | if ((TYPE_PRECISION (TREE_TYPE (exp)) | |
9563 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
9564 | goto normal; | |
9565 | case NON_LVALUE_EXPR: | |
9566 | case REFERENCE_EXPR: | |
9567 | case ABS_EXPR: | |
9568 | case NEGATE_EXPR: | |
9569 | case LROTATE_EXPR: | |
9570 | case RROTATE_EXPR: | |
9571 | /* These cannot change zero->non-zero or vice versa. */ | |
9572 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9573 | break; | |
bbf6f052 | 9574 | |
14a774a9 RK |
9575 | case WITH_RECORD_EXPR: |
9576 | /* Put the object on the placeholder list, recurse through our first | |
9577 | operand, and pop the list. */ | |
9578 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
9579 | placeholder_list); | |
9580 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9581 | placeholder_list = TREE_CHAIN (placeholder_list); | |
9582 | break; | |
9583 | ||
b93a436e JL |
9584 | #if 0 |
9585 | /* This is never less insns than evaluating the PLUS_EXPR followed by | |
9586 | a test and can be longer if the test is eliminated. */ | |
9587 | case PLUS_EXPR: | |
9588 | /* Reduce to minus. */ | |
9589 | exp = build (MINUS_EXPR, TREE_TYPE (exp), | |
9590 | TREE_OPERAND (exp, 0), | |
9591 | fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), | |
9592 | TREE_OPERAND (exp, 1)))); | |
9593 | /* Process as MINUS. */ | |
ca695ac9 | 9594 | #endif |
bbf6f052 | 9595 | |
b93a436e JL |
9596 | case MINUS_EXPR: |
9597 | /* Non-zero iff operands of minus differ. */ | |
b30f05db BS |
9598 | do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), |
9599 | TREE_OPERAND (exp, 0), | |
9600 | TREE_OPERAND (exp, 1)), | |
9601 | NE, NE, if_false_label, if_true_label); | |
b93a436e | 9602 | break; |
bbf6f052 | 9603 | |
b93a436e JL |
9604 | case BIT_AND_EXPR: |
9605 | /* If we are AND'ing with a small constant, do this comparison in the | |
9606 | smallest type that fits. If the machine doesn't have comparisons | |
9607 | that small, it will be converted back to the wider comparison. | |
9608 | This helps if we are testing the sign bit of a narrower object. | |
9609 | combine can't do this for us because it can't know whether a | |
9610 | ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ | |
bbf6f052 | 9611 | |
b93a436e JL |
9612 | if (! SLOW_BYTE_ACCESS |
9613 | && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
9614 | && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT | |
05bccae2 | 9615 | && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 |
b93a436e JL |
9616 | && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode |
9617 | && (type = type_for_mode (mode, 1)) != 0 | |
9618 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
9619 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9620 | != CODE_FOR_nothing)) | |
9621 | { | |
9622 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9623 | break; | |
9624 | } | |
9625 | goto normal; | |
bbf6f052 | 9626 | |
b93a436e JL |
9627 | case TRUTH_NOT_EXPR: |
9628 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9629 | break; | |
bbf6f052 | 9630 | |
b93a436e JL |
9631 | case TRUTH_ANDIF_EXPR: |
9632 | if (if_false_label == 0) | |
9633 | if_false_label = drop_through_label = gen_label_rtx (); | |
9634 | do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); | |
9635 | start_cleanup_deferral (); | |
9636 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9637 | end_cleanup_deferral (); | |
9638 | break; | |
bbf6f052 | 9639 | |
b93a436e JL |
9640 | case TRUTH_ORIF_EXPR: |
9641 | if (if_true_label == 0) | |
9642 | if_true_label = drop_through_label = gen_label_rtx (); | |
9643 | do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); | |
9644 | start_cleanup_deferral (); | |
9645 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9646 | end_cleanup_deferral (); | |
9647 | break; | |
bbf6f052 | 9648 | |
b93a436e JL |
9649 | case COMPOUND_EXPR: |
9650 | push_temp_slots (); | |
9651 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
9652 | preserve_temp_slots (NULL_RTX); | |
9653 | free_temp_slots (); | |
9654 | pop_temp_slots (); | |
9655 | emit_queue (); | |
9656 | do_pending_stack_adjust (); | |
9657 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9658 | break; | |
bbf6f052 | 9659 | |
b93a436e JL |
9660 | case COMPONENT_REF: |
9661 | case BIT_FIELD_REF: | |
9662 | case ARRAY_REF: | |
b4e3fabb | 9663 | case ARRAY_RANGE_REF: |
b93a436e | 9664 | { |
770ae6cc RK |
9665 | HOST_WIDE_INT bitsize, bitpos; |
9666 | int unsignedp; | |
b93a436e JL |
9667 | enum machine_mode mode; |
9668 | tree type; | |
9669 | tree offset; | |
9670 | int volatilep = 0; | |
729a2125 | 9671 | unsigned int alignment; |
bbf6f052 | 9672 | |
b93a436e JL |
9673 | /* Get description of this reference. We don't actually care |
9674 | about the underlying object here. */ | |
19caa751 RK |
9675 | get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, |
9676 | &unsignedp, &volatilep, &alignment); | |
bbf6f052 | 9677 | |
b93a436e JL |
9678 | type = type_for_size (bitsize, unsignedp); |
9679 | if (! SLOW_BYTE_ACCESS | |
9680 | && type != 0 && bitsize >= 0 | |
9681 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
9682 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9683 | != CODE_FOR_nothing)) | |
9684 | { | |
9685 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9686 | break; | |
9687 | } | |
9688 | goto normal; | |
9689 | } | |
bbf6f052 | 9690 | |
b93a436e JL |
9691 | case COND_EXPR: |
9692 | /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ | |
9693 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
9694 | && integer_zerop (TREE_OPERAND (exp, 2))) | |
9695 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
bbf6f052 | 9696 | |
b93a436e JL |
9697 | else if (integer_zerop (TREE_OPERAND (exp, 1)) |
9698 | && integer_onep (TREE_OPERAND (exp, 2))) | |
9699 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
bbf6f052 | 9700 | |
b93a436e JL |
9701 | else |
9702 | { | |
9703 | register rtx label1 = gen_label_rtx (); | |
9704 | drop_through_label = gen_label_rtx (); | |
bbf6f052 | 9705 | |
b93a436e | 9706 | do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); |
bbf6f052 | 9707 | |
b93a436e JL |
9708 | start_cleanup_deferral (); |
9709 | /* Now the THEN-expression. */ | |
9710 | do_jump (TREE_OPERAND (exp, 1), | |
9711 | if_false_label ? if_false_label : drop_through_label, | |
9712 | if_true_label ? if_true_label : drop_through_label); | |
9713 | /* In case the do_jump just above never jumps. */ | |
9714 | do_pending_stack_adjust (); | |
9715 | emit_label (label1); | |
bbf6f052 | 9716 | |
b93a436e JL |
9717 | /* Now the ELSE-expression. */ |
9718 | do_jump (TREE_OPERAND (exp, 2), | |
9719 | if_false_label ? if_false_label : drop_through_label, | |
9720 | if_true_label ? if_true_label : drop_through_label); | |
9721 | end_cleanup_deferral (); | |
9722 | } | |
9723 | break; | |
bbf6f052 | 9724 | |
b93a436e JL |
9725 | case EQ_EXPR: |
9726 | { | |
9727 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
bbf6f052 | 9728 | |
9ec36da5 JL |
9729 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9730 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
8d62b411 AS |
9731 | { |
9732 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9733 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9734 | do_jump | |
9735 | (fold | |
9736 | (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp), | |
9737 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9738 | fold (build1 (REALPART_EXPR, | |
9739 | TREE_TYPE (inner_type), | |
9740 | exp0)), | |
9741 | fold (build1 (REALPART_EXPR, | |
9742 | TREE_TYPE (inner_type), | |
9743 | exp1)))), | |
9744 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9745 | fold (build1 (IMAGPART_EXPR, | |
9746 | TREE_TYPE (inner_type), | |
9747 | exp0)), | |
9748 | fold (build1 (IMAGPART_EXPR, | |
9749 | TREE_TYPE (inner_type), | |
9750 | exp1)))))), | |
9751 | if_false_label, if_true_label); | |
9752 | } | |
9ec36da5 JL |
9753 | |
9754 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9755 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9756 | ||
b93a436e | 9757 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
1eb8759b | 9758 | && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) |
b93a436e JL |
9759 | do_jump_by_parts_equality (exp, if_false_label, if_true_label); |
9760 | else | |
b30f05db | 9761 | do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); |
b93a436e JL |
9762 | break; |
9763 | } | |
bbf6f052 | 9764 | |
b93a436e JL |
9765 | case NE_EXPR: |
9766 | { | |
9767 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
bbf6f052 | 9768 | |
9ec36da5 JL |
9769 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9770 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
8d62b411 AS |
9771 | { |
9772 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9773 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9774 | do_jump | |
9775 | (fold | |
9776 | (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), | |
9777 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9778 | fold (build1 (REALPART_EXPR, | |
9779 | TREE_TYPE (inner_type), | |
9780 | exp0)), | |
9781 | fold (build1 (REALPART_EXPR, | |
9782 | TREE_TYPE (inner_type), | |
9783 | exp1)))), | |
9784 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9785 | fold (build1 (IMAGPART_EXPR, | |
9786 | TREE_TYPE (inner_type), | |
9787 | exp0)), | |
9788 | fold (build1 (IMAGPART_EXPR, | |
9789 | TREE_TYPE (inner_type), | |
9790 | exp1)))))), | |
9791 | if_false_label, if_true_label); | |
9792 | } | |
9ec36da5 JL |
9793 | |
9794 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9795 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9796 | ||
b93a436e | 9797 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
1eb8759b | 9798 | && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) |
b93a436e JL |
9799 | do_jump_by_parts_equality (exp, if_true_label, if_false_label); |
9800 | else | |
b30f05db | 9801 | do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); |
b93a436e JL |
9802 | break; |
9803 | } | |
bbf6f052 | 9804 | |
b93a436e | 9805 | case LT_EXPR: |
1c0290ea BS |
9806 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9807 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9808 | && ! can_compare_p (LT, mode, ccp_jump)) |
b93a436e JL |
9809 | do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); |
9810 | else | |
b30f05db | 9811 | do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); |
b93a436e | 9812 | break; |
bbf6f052 | 9813 | |
b93a436e | 9814 | case LE_EXPR: |
1c0290ea BS |
9815 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9816 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9817 | && ! can_compare_p (LE, mode, ccp_jump)) |
b93a436e JL |
9818 | do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); |
9819 | else | |
b30f05db | 9820 | do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); |
b93a436e | 9821 | break; |
bbf6f052 | 9822 | |
b93a436e | 9823 | case GT_EXPR: |
1c0290ea BS |
9824 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9825 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9826 | && ! can_compare_p (GT, mode, ccp_jump)) |
b93a436e JL |
9827 | do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); |
9828 | else | |
b30f05db | 9829 | do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); |
b93a436e | 9830 | break; |
bbf6f052 | 9831 | |
b93a436e | 9832 | case GE_EXPR: |
1c0290ea BS |
9833 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9834 | if (GET_MODE_CLASS (mode) == MODE_INT | |
1eb8759b | 9835 | && ! can_compare_p (GE, mode, ccp_jump)) |
b93a436e JL |
9836 | do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); |
9837 | else | |
b30f05db | 9838 | do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); |
b93a436e | 9839 | break; |
bbf6f052 | 9840 | |
1eb8759b RH |
9841 | case UNORDERED_EXPR: |
9842 | case ORDERED_EXPR: | |
9843 | { | |
9844 | enum rtx_code cmp, rcmp; | |
9845 | int do_rev; | |
9846 | ||
9847 | if (code == UNORDERED_EXPR) | |
9848 | cmp = UNORDERED, rcmp = ORDERED; | |
9849 | else | |
9850 | cmp = ORDERED, rcmp = UNORDERED; | |
3a94c984 | 9851 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
1eb8759b RH |
9852 | |
9853 | do_rev = 0; | |
9854 | if (! can_compare_p (cmp, mode, ccp_jump) | |
9855 | && (can_compare_p (rcmp, mode, ccp_jump) | |
9856 | /* If the target doesn't provide either UNORDERED or ORDERED | |
9857 | comparisons, canonicalize on UNORDERED for the library. */ | |
9858 | || rcmp == UNORDERED)) | |
9859 | do_rev = 1; | |
9860 | ||
9861 | if (! do_rev) | |
9862 | do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); | |
9863 | else | |
9864 | do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); | |
9865 | } | |
9866 | break; | |
9867 | ||
9868 | { | |
9869 | enum rtx_code rcode1; | |
9870 | enum tree_code tcode2; | |
9871 | ||
9872 | case UNLT_EXPR: | |
9873 | rcode1 = UNLT; | |
9874 | tcode2 = LT_EXPR; | |
9875 | goto unordered_bcc; | |
9876 | case UNLE_EXPR: | |
9877 | rcode1 = UNLE; | |
9878 | tcode2 = LE_EXPR; | |
9879 | goto unordered_bcc; | |
9880 | case UNGT_EXPR: | |
9881 | rcode1 = UNGT; | |
9882 | tcode2 = GT_EXPR; | |
9883 | goto unordered_bcc; | |
9884 | case UNGE_EXPR: | |
9885 | rcode1 = UNGE; | |
9886 | tcode2 = GE_EXPR; | |
9887 | goto unordered_bcc; | |
9888 | case UNEQ_EXPR: | |
9889 | rcode1 = UNEQ; | |
9890 | tcode2 = EQ_EXPR; | |
9891 | goto unordered_bcc; | |
7913f3d0 | 9892 | |
1eb8759b RH |
9893 | unordered_bcc: |
9894 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
9895 | if (can_compare_p (rcode1, mode, ccp_jump)) | |
9896 | do_compare_and_jump (exp, rcode1, rcode1, if_false_label, | |
9897 | if_true_label); | |
9898 | else | |
9899 | { | |
9900 | tree op0 = save_expr (TREE_OPERAND (exp, 0)); | |
9901 | tree op1 = save_expr (TREE_OPERAND (exp, 1)); | |
9902 | tree cmp0, cmp1; | |
9903 | ||
3a94c984 | 9904 | /* If the target doesn't support combined unordered |
1eb8759b RH |
9905 | compares, decompose into UNORDERED + comparison. */ |
9906 | cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); | |
9907 | cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); | |
9908 | exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); | |
9909 | do_jump (exp, if_false_label, if_true_label); | |
9910 | } | |
9911 | } | |
9912 | break; | |
9913 | ||
b93a436e JL |
9914 | default: |
9915 | normal: | |
9916 | temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); | |
9917 | #if 0 | |
9918 | /* This is not needed any more and causes poor code since it causes | |
9919 | comparisons and tests from non-SI objects to have different code | |
9920 | sequences. */ | |
9921 | /* Copy to register to avoid generating bad insns by cse | |
9922 | from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ | |
9923 | if (!cse_not_expected && GET_CODE (temp) == MEM) | |
9924 | temp = copy_to_reg (temp); | |
ca695ac9 | 9925 | #endif |
b93a436e | 9926 | do_pending_stack_adjust (); |
b30f05db BS |
9927 | /* Do any postincrements in the expression that was tested. */ |
9928 | emit_queue (); | |
9929 | ||
998a298e GK |
9930 | if (GET_CODE (temp) == CONST_INT |
9931 | || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) | |
9932 | || GET_CODE (temp) == LABEL_REF) | |
b30f05db BS |
9933 | { |
9934 | rtx target = temp == const0_rtx ? if_false_label : if_true_label; | |
9935 | if (target) | |
9936 | emit_jump (target); | |
9937 | } | |
b93a436e | 9938 | else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT |
1eb8759b | 9939 | && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) |
b93a436e JL |
9940 | /* Note swapping the labels gives us not-equal. */ |
9941 | do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); | |
9942 | else if (GET_MODE (temp) != VOIDmode) | |
b30f05db BS |
9943 | do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), |
9944 | NE, TREE_UNSIGNED (TREE_TYPE (exp)), | |
9945 | GET_MODE (temp), NULL_RTX, 0, | |
9946 | if_false_label, if_true_label); | |
b93a436e JL |
9947 | else |
9948 | abort (); | |
9949 | } | |
bbf6f052 | 9950 | |
b93a436e JL |
9951 | if (drop_through_label) |
9952 | { | |
9953 | /* If do_jump produces code that might be jumped around, | |
9954 | do any stack adjusts from that code, before the place | |
9955 | where control merges in. */ | |
9956 | do_pending_stack_adjust (); | |
9957 | emit_label (drop_through_label); | |
9958 | } | |
bbf6f052 | 9959 | } |
b93a436e JL |
9960 | \f |
9961 | /* Given a comparison expression EXP for values too wide to be compared | |
9962 | with one insn, test the comparison and jump to the appropriate label. | |
9963 | The code of EXP is ignored; we always test GT if SWAP is 0, | |
9964 | and LT if SWAP is 1. */ | |
bbf6f052 | 9965 | |
b93a436e JL |
9966 | static void |
9967 | do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) | |
9968 | tree exp; | |
9969 | int swap; | |
9970 | rtx if_false_label, if_true_label; | |
9971 | { | |
9972 | rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); | |
9973 | rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); | |
9974 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
b93a436e | 9975 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); |
bbf6f052 | 9976 | |
b30f05db | 9977 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label); |
f81497d9 RS |
9978 | } |
9979 | ||
b93a436e JL |
9980 | /* Compare OP0 with OP1, word at a time, in mode MODE. |
9981 | UNSIGNEDP says to do unsigned comparison. | |
9982 | Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ | |
f81497d9 | 9983 | |
b93a436e JL |
9984 | void |
9985 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label) | |
9986 | enum machine_mode mode; | |
9987 | int unsignedp; | |
9988 | rtx op0, op1; | |
9989 | rtx if_false_label, if_true_label; | |
f81497d9 | 9990 | { |
b93a436e JL |
9991 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); |
9992 | rtx drop_through_label = 0; | |
9993 | int i; | |
f81497d9 | 9994 | |
b93a436e JL |
9995 | if (! if_true_label || ! if_false_label) |
9996 | drop_through_label = gen_label_rtx (); | |
9997 | if (! if_true_label) | |
9998 | if_true_label = drop_through_label; | |
9999 | if (! if_false_label) | |
10000 | if_false_label = drop_through_label; | |
f81497d9 | 10001 | |
b93a436e JL |
10002 | /* Compare a word at a time, high order first. */ |
10003 | for (i = 0; i < nwords; i++) | |
10004 | { | |
b93a436e | 10005 | rtx op0_word, op1_word; |
bbf6f052 | 10006 | |
b93a436e JL |
10007 | if (WORDS_BIG_ENDIAN) |
10008 | { | |
10009 | op0_word = operand_subword_force (op0, i, mode); | |
10010 | op1_word = operand_subword_force (op1, i, mode); | |
10011 | } | |
10012 | else | |
10013 | { | |
10014 | op0_word = operand_subword_force (op0, nwords - 1 - i, mode); | |
10015 | op1_word = operand_subword_force (op1, nwords - 1 - i, mode); | |
10016 | } | |
bbf6f052 | 10017 | |
b93a436e | 10018 | /* All but high-order word must be compared as unsigned. */ |
b30f05db BS |
10019 | do_compare_rtx_and_jump (op0_word, op1_word, GT, |
10020 | (unsignedp || i > 0), word_mode, NULL_RTX, 0, | |
10021 | NULL_RTX, if_true_label); | |
bbf6f052 | 10022 | |
b93a436e | 10023 | /* Consider lower words only if these are equal. */ |
b30f05db BS |
10024 | do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, |
10025 | NULL_RTX, 0, NULL_RTX, if_false_label); | |
b93a436e | 10026 | } |
bbf6f052 | 10027 | |
b93a436e JL |
10028 | if (if_false_label) |
10029 | emit_jump (if_false_label); | |
10030 | if (drop_through_label) | |
10031 | emit_label (drop_through_label); | |
bbf6f052 RK |
10032 | } |
10033 | ||
b93a436e JL |
10034 | /* Given an EQ_EXPR expression EXP for values too wide to be compared |
10035 | with one insn, test the comparison and jump to the appropriate label. */ | |
bbf6f052 | 10036 | |
b93a436e JL |
10037 | static void |
10038 | do_jump_by_parts_equality (exp, if_false_label, if_true_label) | |
10039 | tree exp; | |
10040 | rtx if_false_label, if_true_label; | |
bbf6f052 | 10041 | { |
b93a436e JL |
10042 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
10043 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); | |
10044 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
10045 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); | |
10046 | int i; | |
10047 | rtx drop_through_label = 0; | |
bbf6f052 | 10048 | |
b93a436e JL |
10049 | if (! if_false_label) |
10050 | drop_through_label = if_false_label = gen_label_rtx (); | |
bbf6f052 | 10051 | |
b93a436e | 10052 | for (i = 0; i < nwords; i++) |
b30f05db BS |
10053 | do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), |
10054 | operand_subword_force (op1, i, mode), | |
10055 | EQ, TREE_UNSIGNED (TREE_TYPE (exp)), | |
10056 | word_mode, NULL_RTX, 0, if_false_label, | |
10057 | NULL_RTX); | |
bbf6f052 | 10058 | |
b93a436e JL |
10059 | if (if_true_label) |
10060 | emit_jump (if_true_label); | |
10061 | if (drop_through_label) | |
10062 | emit_label (drop_through_label); | |
bbf6f052 | 10063 | } |
b93a436e JL |
10064 | \f |
10065 | /* Jump according to whether OP0 is 0. | |
10066 | We assume that OP0 has an integer mode that is too wide | |
10067 | for the available compare insns. */ | |
bbf6f052 | 10068 | |
f5963e61 | 10069 | void |
b93a436e JL |
10070 | do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) |
10071 | rtx op0; | |
10072 | rtx if_false_label, if_true_label; | |
ca695ac9 | 10073 | { |
b93a436e JL |
10074 | int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; |
10075 | rtx part; | |
10076 | int i; | |
10077 | rtx drop_through_label = 0; | |
bbf6f052 | 10078 | |
b93a436e JL |
10079 | /* The fastest way of doing this comparison on almost any machine is to |
10080 | "or" all the words and compare the result. If all have to be loaded | |
10081 | from memory and this is a very wide item, it's possible this may | |
10082 | be slower, but that's highly unlikely. */ | |
bbf6f052 | 10083 | |
b93a436e JL |
10084 | part = gen_reg_rtx (word_mode); |
10085 | emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0))); | |
10086 | for (i = 1; i < nwords && part != 0; i++) | |
10087 | part = expand_binop (word_mode, ior_optab, part, | |
10088 | operand_subword_force (op0, i, GET_MODE (op0)), | |
10089 | part, 1, OPTAB_WIDEN); | |
bbf6f052 | 10090 | |
b93a436e JL |
10091 | if (part != 0) |
10092 | { | |
b30f05db BS |
10093 | do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, |
10094 | NULL_RTX, 0, if_false_label, if_true_label); | |
bbf6f052 | 10095 | |
b93a436e JL |
10096 | return; |
10097 | } | |
bbf6f052 | 10098 | |
b93a436e JL |
10099 | /* If we couldn't do the "or" simply, do this with a series of compares. */ |
10100 | if (! if_false_label) | |
10101 | drop_through_label = if_false_label = gen_label_rtx (); | |
bbf6f052 | 10102 | |
b93a436e | 10103 | for (i = 0; i < nwords; i++) |
b30f05db BS |
10104 | do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), |
10105 | const0_rtx, EQ, 1, word_mode, NULL_RTX, 0, | |
10106 | if_false_label, NULL_RTX); | |
bbf6f052 | 10107 | |
b93a436e JL |
10108 | if (if_true_label) |
10109 | emit_jump (if_true_label); | |
0f41302f | 10110 | |
b93a436e JL |
10111 | if (drop_through_label) |
10112 | emit_label (drop_through_label); | |
bbf6f052 | 10113 | } |
b93a436e | 10114 | \f |
b30f05db | 10115 | /* Generate code for a comparison of OP0 and OP1 with rtx code CODE. |
b93a436e JL |
10116 | (including code to compute the values to be compared) |
10117 | and set (CC0) according to the result. | |
b30f05db | 10118 | The decision as to signed or unsigned comparison must be made by the caller. |
bbf6f052 | 10119 | |
b93a436e | 10120 | We force a stack adjustment unless there are currently |
b30f05db | 10121 | things pushed on the stack that aren't yet used. |
ca695ac9 | 10122 | |
b30f05db BS |
10123 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
10124 | compared. | |
10125 | ||
10126 | If ALIGN is non-zero, it is the alignment of this type; if zero, the | |
10127 | size of MODE should be used. */ | |
10128 | ||
10129 | rtx | |
10130 | compare_from_rtx (op0, op1, code, unsignedp, mode, size, align) | |
10131 | register rtx op0, op1; | |
10132 | enum rtx_code code; | |
10133 | int unsignedp; | |
10134 | enum machine_mode mode; | |
10135 | rtx size; | |
729a2125 | 10136 | unsigned int align; |
b93a436e | 10137 | { |
b30f05db | 10138 | rtx tem; |
76bbe028 | 10139 | |
b30f05db BS |
10140 | /* If one operand is constant, make it the second one. Only do this |
10141 | if the other operand is not constant as well. */ | |
ca695ac9 | 10142 | |
8c9864f3 | 10143 | if (swap_commutative_operands_p (op0, op1)) |
bbf6f052 | 10144 | { |
b30f05db BS |
10145 | tem = op0; |
10146 | op0 = op1; | |
10147 | op1 = tem; | |
10148 | code = swap_condition (code); | |
ca695ac9 | 10149 | } |
bbf6f052 | 10150 | |
b30f05db | 10151 | if (flag_force_mem) |
b93a436e | 10152 | { |
b30f05db BS |
10153 | op0 = force_not_mem (op0); |
10154 | op1 = force_not_mem (op1); | |
10155 | } | |
bbf6f052 | 10156 | |
b30f05db BS |
10157 | do_pending_stack_adjust (); |
10158 | ||
10159 | if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT | |
10160 | && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) | |
10161 | return tem; | |
10162 | ||
10163 | #if 0 | |
10164 | /* There's no need to do this now that combine.c can eliminate lots of | |
10165 | sign extensions. This can be less efficient in certain cases on other | |
10166 | machines. */ | |
10167 | ||
10168 | /* If this is a signed equality comparison, we can do it as an | |
10169 | unsigned comparison since zero-extension is cheaper than sign | |
10170 | extension and comparisons with zero are done as unsigned. This is | |
10171 | the case even on machines that can do fast sign extension, since | |
10172 | zero-extension is easier to combine with other operations than | |
10173 | sign-extension is. If we are comparing against a constant, we must | |
10174 | convert it to what it would look like unsigned. */ | |
10175 | if ((code == EQ || code == NE) && ! unsignedp | |
10176 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
10177 | { | |
10178 | if (GET_CODE (op1) == CONST_INT | |
10179 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
10180 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
10181 | unsignedp = 1; | |
b93a436e JL |
10182 | } |
10183 | #endif | |
3a94c984 | 10184 | |
b30f05db | 10185 | emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align); |
0f41302f | 10186 | |
b30f05db | 10187 | return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); |
ca695ac9 | 10188 | } |
bbf6f052 | 10189 | |
b30f05db | 10190 | /* Like do_compare_and_jump but expects the values to compare as two rtx's. |
b93a436e | 10191 | The decision as to signed or unsigned comparison must be made by the caller. |
bbf6f052 | 10192 | |
b93a436e JL |
10193 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
10194 | compared. | |
bbf6f052 | 10195 | |
b93a436e JL |
10196 | If ALIGN is non-zero, it is the alignment of this type; if zero, the |
10197 | size of MODE should be used. */ | |
ca695ac9 | 10198 | |
b30f05db BS |
10199 | void |
10200 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align, | |
10201 | if_false_label, if_true_label) | |
b93a436e JL |
10202 | register rtx op0, op1; |
10203 | enum rtx_code code; | |
10204 | int unsignedp; | |
10205 | enum machine_mode mode; | |
10206 | rtx size; | |
729a2125 | 10207 | unsigned int align; |
b30f05db | 10208 | rtx if_false_label, if_true_label; |
bbf6f052 | 10209 | { |
b93a436e | 10210 | rtx tem; |
b30f05db BS |
10211 | int dummy_true_label = 0; |
10212 | ||
10213 | /* Reverse the comparison if that is safe and we want to jump if it is | |
10214 | false. */ | |
10215 | if (! if_true_label && ! FLOAT_MODE_P (mode)) | |
10216 | { | |
10217 | if_true_label = if_false_label; | |
10218 | if_false_label = 0; | |
10219 | code = reverse_condition (code); | |
10220 | } | |
bbf6f052 | 10221 | |
b93a436e JL |
10222 | /* If one operand is constant, make it the second one. Only do this |
10223 | if the other operand is not constant as well. */ | |
e7c33f54 | 10224 | |
8c9864f3 | 10225 | if (swap_commutative_operands_p (op0, op1)) |
ca695ac9 | 10226 | { |
b93a436e JL |
10227 | tem = op0; |
10228 | op0 = op1; | |
10229 | op1 = tem; | |
10230 | code = swap_condition (code); | |
10231 | } | |
bbf6f052 | 10232 | |
b93a436e JL |
10233 | if (flag_force_mem) |
10234 | { | |
10235 | op0 = force_not_mem (op0); | |
10236 | op1 = force_not_mem (op1); | |
10237 | } | |
bbf6f052 | 10238 | |
b93a436e | 10239 | do_pending_stack_adjust (); |
ca695ac9 | 10240 | |
b93a436e JL |
10241 | if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT |
10242 | && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0) | |
b30f05db BS |
10243 | { |
10244 | if (tem == const_true_rtx) | |
10245 | { | |
10246 | if (if_true_label) | |
10247 | emit_jump (if_true_label); | |
10248 | } | |
10249 | else | |
10250 | { | |
10251 | if (if_false_label) | |
10252 | emit_jump (if_false_label); | |
10253 | } | |
10254 | return; | |
10255 | } | |
ca695ac9 | 10256 | |
b93a436e JL |
10257 | #if 0 |
10258 | /* There's no need to do this now that combine.c can eliminate lots of | |
10259 | sign extensions. This can be less efficient in certain cases on other | |
10260 | machines. */ | |
ca695ac9 | 10261 | |
b93a436e JL |
10262 | /* If this is a signed equality comparison, we can do it as an |
10263 | unsigned comparison since zero-extension is cheaper than sign | |
10264 | extension and comparisons with zero are done as unsigned. This is | |
10265 | the case even on machines that can do fast sign extension, since | |
10266 | zero-extension is easier to combine with other operations than | |
10267 | sign-extension is. If we are comparing against a constant, we must | |
10268 | convert it to what it would look like unsigned. */ | |
10269 | if ((code == EQ || code == NE) && ! unsignedp | |
10270 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
10271 | { | |
10272 | if (GET_CODE (op1) == CONST_INT | |
10273 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
10274 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
10275 | unsignedp = 1; | |
10276 | } | |
10277 | #endif | |
ca695ac9 | 10278 | |
b30f05db BS |
10279 | if (! if_true_label) |
10280 | { | |
10281 | dummy_true_label = 1; | |
10282 | if_true_label = gen_label_rtx (); | |
10283 | } | |
10284 | ||
10285 | emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align, | |
10286 | if_true_label); | |
10287 | ||
10288 | if (if_false_label) | |
10289 | emit_jump (if_false_label); | |
10290 | if (dummy_true_label) | |
10291 | emit_label (if_true_label); | |
10292 | } | |
10293 | ||
10294 | /* Generate code for a comparison expression EXP (including code to compute | |
10295 | the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or | |
10296 | IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the | |
10297 | generated code will drop through. | |
10298 | SIGNED_CODE should be the rtx operation for this comparison for | |
10299 | signed data; UNSIGNED_CODE, likewise for use if data is unsigned. | |
10300 | ||
10301 | We force a stack adjustment unless there are currently | |
10302 | things pushed on the stack that aren't yet used. */ | |
10303 | ||
10304 | static void | |
10305 | do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label, | |
10306 | if_true_label) | |
10307 | register tree exp; | |
10308 | enum rtx_code signed_code, unsigned_code; | |
10309 | rtx if_false_label, if_true_label; | |
10310 | { | |
729a2125 | 10311 | unsigned int align0, align1; |
b30f05db BS |
10312 | register rtx op0, op1; |
10313 | register tree type; | |
10314 | register enum machine_mode mode; | |
10315 | int unsignedp; | |
10316 | enum rtx_code code; | |
10317 | ||
10318 | /* Don't crash if the comparison was erroneous. */ | |
14a774a9 | 10319 | op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0); |
b30f05db BS |
10320 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) |
10321 | return; | |
10322 | ||
14a774a9 | 10323 | op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1); |
6b16805e JJ |
10324 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) |
10325 | return; | |
10326 | ||
b30f05db BS |
10327 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
10328 | mode = TYPE_MODE (type); | |
6b16805e JJ |
10329 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
10330 | && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST | |
10331 | || (GET_MODE_BITSIZE (mode) | |
31a7659b JDA |
10332 | > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, |
10333 | 1))))))) | |
6b16805e JJ |
10334 | { |
10335 | /* op0 might have been replaced by promoted constant, in which | |
10336 | case the type of second argument should be used. */ | |
10337 | type = TREE_TYPE (TREE_OPERAND (exp, 1)); | |
10338 | mode = TYPE_MODE (type); | |
10339 | } | |
b30f05db BS |
10340 | unsignedp = TREE_UNSIGNED (type); |
10341 | code = unsignedp ? unsigned_code : signed_code; | |
10342 | ||
10343 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
10344 | /* If function pointers need to be "canonicalized" before they can | |
10345 | be reliably compared, then canonicalize them. */ | |
10346 | if (HAVE_canonicalize_funcptr_for_compare | |
10347 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
10348 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
10349 | == FUNCTION_TYPE)) | |
10350 | { | |
10351 | rtx new_op0 = gen_reg_rtx (mode); | |
10352 | ||
10353 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); | |
10354 | op0 = new_op0; | |
10355 | } | |
10356 | ||
10357 | if (HAVE_canonicalize_funcptr_for_compare | |
10358 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10359 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10360 | == FUNCTION_TYPE)) | |
10361 | { | |
10362 | rtx new_op1 = gen_reg_rtx (mode); | |
10363 | ||
10364 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); | |
10365 | op1 = new_op1; | |
10366 | } | |
10367 | #endif | |
10368 | ||
10369 | /* Do any postincrements in the expression that was tested. */ | |
10370 | emit_queue (); | |
10371 | ||
10372 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, | |
10373 | ((mode == BLKmode) | |
10374 | ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), | |
19caa751 | 10375 | MIN (align0, align1), |
b30f05db | 10376 | if_false_label, if_true_label); |
b93a436e JL |
10377 | } |
10378 | \f | |
10379 | /* Generate code to calculate EXP using a store-flag instruction | |
10380 | and return an rtx for the result. EXP is either a comparison | |
10381 | or a TRUTH_NOT_EXPR whose operand is a comparison. | |
ca695ac9 | 10382 | |
b93a436e | 10383 | If TARGET is nonzero, store the result there if convenient. |
ca695ac9 | 10384 | |
b93a436e JL |
10385 | If ONLY_CHEAP is non-zero, only do this if it is likely to be very |
10386 | cheap. | |
ca695ac9 | 10387 | |
b93a436e JL |
10388 | Return zero if there is no suitable set-flag instruction |
10389 | available on this machine. | |
ca695ac9 | 10390 | |
b93a436e JL |
10391 | Once expand_expr has been called on the arguments of the comparison, |
10392 | we are committed to doing the store flag, since it is not safe to | |
10393 | re-evaluate the expression. We emit the store-flag insn by calling | |
10394 | emit_store_flag, but only expand the arguments if we have a reason | |
10395 | to believe that emit_store_flag will be successful. If we think that | |
10396 | it will, but it isn't, we have to simulate the store-flag with a | |
10397 | set/jump/set sequence. */ | |
ca695ac9 | 10398 | |
b93a436e JL |
10399 | static rtx |
10400 | do_store_flag (exp, target, mode, only_cheap) | |
10401 | tree exp; | |
10402 | rtx target; | |
10403 | enum machine_mode mode; | |
10404 | int only_cheap; | |
10405 | { | |
10406 | enum rtx_code code; | |
10407 | tree arg0, arg1, type; | |
10408 | tree tem; | |
10409 | enum machine_mode operand_mode; | |
10410 | int invert = 0; | |
10411 | int unsignedp; | |
10412 | rtx op0, op1; | |
10413 | enum insn_code icode; | |
10414 | rtx subtarget = target; | |
381127e8 | 10415 | rtx result, label; |
ca695ac9 | 10416 | |
b93a436e JL |
10417 | /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
10418 | result at the end. We can't simply invert the test since it would | |
10419 | have already been inverted if it were valid. This case occurs for | |
10420 | some floating-point comparisons. */ | |
ca695ac9 | 10421 | |
b93a436e JL |
10422 | if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
10423 | invert = 1, exp = TREE_OPERAND (exp, 0); | |
ca695ac9 | 10424 | |
b93a436e JL |
10425 | arg0 = TREE_OPERAND (exp, 0); |
10426 | arg1 = TREE_OPERAND (exp, 1); | |
5129d2ce AH |
10427 | |
10428 | /* Don't crash if the comparison was erroneous. */ | |
10429 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
10430 | return const0_rtx; | |
10431 | ||
b93a436e JL |
10432 | type = TREE_TYPE (arg0); |
10433 | operand_mode = TYPE_MODE (type); | |
10434 | unsignedp = TREE_UNSIGNED (type); | |
ca695ac9 | 10435 | |
b93a436e JL |
10436 | /* We won't bother with BLKmode store-flag operations because it would mean |
10437 | passing a lot of information to emit_store_flag. */ | |
10438 | if (operand_mode == BLKmode) | |
10439 | return 0; | |
ca695ac9 | 10440 | |
b93a436e JL |
10441 | /* We won't bother with store-flag operations involving function pointers |
10442 | when function pointers must be canonicalized before comparisons. */ | |
10443 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
10444 | if (HAVE_canonicalize_funcptr_for_compare | |
10445 | && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
10446 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
10447 | == FUNCTION_TYPE)) | |
10448 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10449 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10450 | == FUNCTION_TYPE)))) | |
10451 | return 0; | |
ca695ac9 JB |
10452 | #endif |
10453 | ||
b93a436e JL |
10454 | STRIP_NOPS (arg0); |
10455 | STRIP_NOPS (arg1); | |
ca695ac9 | 10456 | |
b93a436e JL |
10457 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
10458 | operation of some type. Some comparisons against 1 and -1 can be | |
10459 | converted to comparisons with zero. Do so here so that the tests | |
10460 | below will be aware that we have a comparison with zero. These | |
10461 | tests will not catch constants in the first operand, but constants | |
10462 | are rarely passed as the first operand. */ | |
ca695ac9 | 10463 | |
b93a436e JL |
10464 | switch (TREE_CODE (exp)) |
10465 | { | |
10466 | case EQ_EXPR: | |
10467 | code = EQ; | |
bbf6f052 | 10468 | break; |
b93a436e JL |
10469 | case NE_EXPR: |
10470 | code = NE; | |
bbf6f052 | 10471 | break; |
b93a436e JL |
10472 | case LT_EXPR: |
10473 | if (integer_onep (arg1)) | |
10474 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
10475 | else | |
10476 | code = unsignedp ? LTU : LT; | |
ca695ac9 | 10477 | break; |
b93a436e JL |
10478 | case LE_EXPR: |
10479 | if (! unsignedp && integer_all_onesp (arg1)) | |
10480 | arg1 = integer_zero_node, code = LT; | |
10481 | else | |
10482 | code = unsignedp ? LEU : LE; | |
ca695ac9 | 10483 | break; |
b93a436e JL |
10484 | case GT_EXPR: |
10485 | if (! unsignedp && integer_all_onesp (arg1)) | |
10486 | arg1 = integer_zero_node, code = GE; | |
10487 | else | |
10488 | code = unsignedp ? GTU : GT; | |
10489 | break; | |
10490 | case GE_EXPR: | |
10491 | if (integer_onep (arg1)) | |
10492 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
10493 | else | |
10494 | code = unsignedp ? GEU : GE; | |
ca695ac9 | 10495 | break; |
1eb8759b RH |
10496 | |
10497 | case UNORDERED_EXPR: | |
10498 | code = UNORDERED; | |
10499 | break; | |
10500 | case ORDERED_EXPR: | |
10501 | code = ORDERED; | |
10502 | break; | |
10503 | case UNLT_EXPR: | |
10504 | code = UNLT; | |
10505 | break; | |
10506 | case UNLE_EXPR: | |
10507 | code = UNLE; | |
10508 | break; | |
10509 | case UNGT_EXPR: | |
10510 | code = UNGT; | |
10511 | break; | |
10512 | case UNGE_EXPR: | |
10513 | code = UNGE; | |
10514 | break; | |
10515 | case UNEQ_EXPR: | |
10516 | code = UNEQ; | |
10517 | break; | |
1eb8759b | 10518 | |
ca695ac9 | 10519 | default: |
b93a436e | 10520 | abort (); |
bbf6f052 | 10521 | } |
bbf6f052 | 10522 | |
b93a436e JL |
10523 | /* Put a constant second. */ |
10524 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) | |
10525 | { | |
10526 | tem = arg0; arg0 = arg1; arg1 = tem; | |
10527 | code = swap_condition (code); | |
ca695ac9 | 10528 | } |
bbf6f052 | 10529 | |
b93a436e JL |
10530 | /* If this is an equality or inequality test of a single bit, we can |
10531 | do this by shifting the bit being tested to the low-order bit and | |
10532 | masking the result with the constant 1. If the condition was EQ, | |
10533 | we xor it with 1. This does not require an scc insn and is faster | |
10534 | than an scc insn even if we have it. */ | |
d39985fa | 10535 | |
b93a436e JL |
10536 | if ((code == NE || code == EQ) |
10537 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
10538 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
10539 | { | |
10540 | tree inner = TREE_OPERAND (arg0, 0); | |
10541 | int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); | |
10542 | int ops_unsignedp; | |
bbf6f052 | 10543 | |
b93a436e JL |
10544 | /* If INNER is a right shift of a constant and it plus BITNUM does |
10545 | not overflow, adjust BITNUM and INNER. */ | |
ca695ac9 | 10546 | |
b93a436e JL |
10547 | if (TREE_CODE (inner) == RSHIFT_EXPR |
10548 | && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST | |
10549 | && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 | |
05bccae2 RK |
10550 | && bitnum < TYPE_PRECISION (type) |
10551 | && 0 > compare_tree_int (TREE_OPERAND (inner, 1), | |
10552 | bitnum - TYPE_PRECISION (type))) | |
ca695ac9 | 10553 | { |
b93a436e JL |
10554 | bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); |
10555 | inner = TREE_OPERAND (inner, 0); | |
ca695ac9 | 10556 | } |
ca695ac9 | 10557 | |
b93a436e JL |
10558 | /* If we are going to be able to omit the AND below, we must do our |
10559 | operations as unsigned. If we must use the AND, we have a choice. | |
10560 | Normally unsigned is faster, but for some machines signed is. */ | |
10561 | ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1 | |
10562 | #ifdef LOAD_EXTEND_OP | |
10563 | : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1) | |
10564 | #else | |
10565 | : 1 | |
10566 | #endif | |
10567 | ); | |
bbf6f052 | 10568 | |
296b4ed9 | 10569 | if (! get_subtarget (subtarget) |
a47fed55 | 10570 | || GET_MODE (subtarget) != operand_mode |
e5e809f4 | 10571 | || ! safe_from_p (subtarget, inner, 1)) |
b93a436e | 10572 | subtarget = 0; |
bbf6f052 | 10573 | |
b93a436e | 10574 | op0 = expand_expr (inner, subtarget, VOIDmode, 0); |
bbf6f052 | 10575 | |
b93a436e | 10576 | if (bitnum != 0) |
681cb233 | 10577 | op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0, |
b93a436e | 10578 | size_int (bitnum), subtarget, ops_unsignedp); |
bbf6f052 | 10579 | |
b93a436e JL |
10580 | if (GET_MODE (op0) != mode) |
10581 | op0 = convert_to_mode (mode, op0, ops_unsignedp); | |
bbf6f052 | 10582 | |
b93a436e JL |
10583 | if ((code == EQ && ! invert) || (code == NE && invert)) |
10584 | op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget, | |
10585 | ops_unsignedp, OPTAB_LIB_WIDEN); | |
bbf6f052 | 10586 | |
b93a436e JL |
10587 | /* Put the AND last so it can combine with more things. */ |
10588 | if (bitnum != TYPE_PRECISION (type) - 1) | |
10589 | op0 = expand_and (op0, const1_rtx, subtarget); | |
bbf6f052 | 10590 | |
b93a436e JL |
10591 | return op0; |
10592 | } | |
bbf6f052 | 10593 | |
b93a436e | 10594 | /* Now see if we are likely to be able to do this. Return if not. */ |
1eb8759b | 10595 | if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
b93a436e | 10596 | return 0; |
1eb8759b | 10597 | |
b93a436e JL |
10598 | icode = setcc_gen_code[(int) code]; |
10599 | if (icode == CODE_FOR_nothing | |
a995e389 | 10600 | || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
ca695ac9 | 10601 | { |
b93a436e JL |
10602 | /* We can only do this if it is one of the special cases that |
10603 | can be handled without an scc insn. */ | |
10604 | if ((code == LT && integer_zerop (arg1)) | |
10605 | || (! only_cheap && code == GE && integer_zerop (arg1))) | |
10606 | ; | |
10607 | else if (BRANCH_COST >= 0 | |
10608 | && ! only_cheap && (code == NE || code == EQ) | |
10609 | && TREE_CODE (type) != REAL_TYPE | |
10610 | && ((abs_optab->handlers[(int) operand_mode].insn_code | |
10611 | != CODE_FOR_nothing) | |
10612 | || (ffs_optab->handlers[(int) operand_mode].insn_code | |
10613 | != CODE_FOR_nothing))) | |
10614 | ; | |
10615 | else | |
10616 | return 0; | |
ca695ac9 | 10617 | } |
3a94c984 | 10618 | |
296b4ed9 | 10619 | if (! get_subtarget (target) |
a47fed55 | 10620 | || GET_MODE (subtarget) != operand_mode |
e5e809f4 | 10621 | || ! safe_from_p (subtarget, arg1, 1)) |
b93a436e JL |
10622 | subtarget = 0; |
10623 | ||
10624 | op0 = expand_expr (arg0, subtarget, VOIDmode, 0); | |
10625 | op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); | |
10626 | ||
10627 | if (target == 0) | |
10628 | target = gen_reg_rtx (mode); | |
10629 | ||
10630 | /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe | |
10631 | because, if the emit_store_flag does anything it will succeed and | |
10632 | OP0 and OP1 will not be used subsequently. */ | |
ca695ac9 | 10633 | |
b93a436e JL |
10634 | result = emit_store_flag (target, code, |
10635 | queued_subexp_p (op0) ? copy_rtx (op0) : op0, | |
10636 | queued_subexp_p (op1) ? copy_rtx (op1) : op1, | |
10637 | operand_mode, unsignedp, 1); | |
ca695ac9 | 10638 | |
b93a436e JL |
10639 | if (result) |
10640 | { | |
10641 | if (invert) | |
10642 | result = expand_binop (mode, xor_optab, result, const1_rtx, | |
10643 | result, 0, OPTAB_LIB_WIDEN); | |
10644 | return result; | |
ca695ac9 | 10645 | } |
bbf6f052 | 10646 | |
b93a436e JL |
10647 | /* If this failed, we have to do this with set/compare/jump/set code. */ |
10648 | if (GET_CODE (target) != REG | |
10649 | || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) | |
10650 | target = gen_reg_rtx (GET_MODE (target)); | |
10651 | ||
10652 | emit_move_insn (target, invert ? const0_rtx : const1_rtx); | |
10653 | result = compare_from_rtx (op0, op1, code, unsignedp, | |
10654 | operand_mode, NULL_RTX, 0); | |
10655 | if (GET_CODE (result) == CONST_INT) | |
10656 | return (((result == const0_rtx && ! invert) | |
10657 | || (result != const0_rtx && invert)) | |
10658 | ? const0_rtx : const1_rtx); | |
ca695ac9 | 10659 | |
b93a436e JL |
10660 | label = gen_label_rtx (); |
10661 | if (bcc_gen_fctn[(int) code] == 0) | |
10662 | abort (); | |
0f41302f | 10663 | |
b93a436e JL |
10664 | emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); |
10665 | emit_move_insn (target, invert ? const1_rtx : const0_rtx); | |
10666 | emit_label (label); | |
bbf6f052 | 10667 | |
b93a436e | 10668 | return target; |
ca695ac9 | 10669 | } |
b93a436e JL |
10670 | \f |
10671 | /* Generate a tablejump instruction (used for switch statements). */ | |
10672 | ||
10673 | #ifdef HAVE_tablejump | |
e87b4f3f | 10674 | |
b93a436e JL |
10675 | /* INDEX is the value being switched on, with the lowest value |
10676 | in the table already subtracted. | |
10677 | MODE is its expected mode (needed if INDEX is constant). | |
10678 | RANGE is the length of the jump table. | |
10679 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
88d3b7f0 | 10680 | |
b93a436e JL |
10681 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
10682 | index value is out of range. */ | |
0f41302f | 10683 | |
ca695ac9 | 10684 | void |
b93a436e JL |
10685 | do_tablejump (index, mode, range, table_label, default_label) |
10686 | rtx index, range, table_label, default_label; | |
10687 | enum machine_mode mode; | |
ca695ac9 | 10688 | { |
b93a436e | 10689 | register rtx temp, vector; |
88d3b7f0 | 10690 | |
b93a436e JL |
10691 | /* Do an unsigned comparison (in the proper mode) between the index |
10692 | expression and the value which represents the length of the range. | |
10693 | Since we just finished subtracting the lower bound of the range | |
10694 | from the index expression, this comparison allows us to simultaneously | |
10695 | check that the original index expression value is both greater than | |
10696 | or equal to the minimum value of the range and less than or equal to | |
10697 | the maximum value of the range. */ | |
709f5be1 | 10698 | |
c5d5d461 JL |
10699 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
10700 | 0, default_label); | |
bbf6f052 | 10701 | |
b93a436e JL |
10702 | /* If index is in range, it must fit in Pmode. |
10703 | Convert to Pmode so we can index with it. */ | |
10704 | if (mode != Pmode) | |
10705 | index = convert_to_mode (Pmode, index, 1); | |
bbf6f052 | 10706 | |
b93a436e JL |
10707 | /* Don't let a MEM slip thru, because then INDEX that comes |
10708 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, | |
10709 | and break_out_memory_refs will go to work on it and mess it up. */ | |
10710 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
10711 | if (flag_pic && GET_CODE (index) != REG) | |
10712 | index = copy_to_mode_reg (Pmode, index); | |
10713 | #endif | |
ca695ac9 | 10714 | |
b93a436e JL |
10715 | /* If flag_force_addr were to affect this address |
10716 | it could interfere with the tricky assumptions made | |
10717 | about addresses that contain label-refs, | |
10718 | which may be valid only very near the tablejump itself. */ | |
10719 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the | |
10720 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
10721 | uses should all be Pmode, because they are addresses. This code | |
10722 | could fail if addresses and insns are not the same size. */ | |
10723 | index = gen_rtx_PLUS (Pmode, | |
10724 | gen_rtx_MULT (Pmode, index, | |
10725 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
10726 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
10727 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
10728 | if (flag_pic) | |
10729 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
10730 | else | |
bbf6f052 | 10731 | #endif |
b93a436e JL |
10732 | index = memory_address_noforce (CASE_VECTOR_MODE, index); |
10733 | temp = gen_reg_rtx (CASE_VECTOR_MODE); | |
10734 | vector = gen_rtx_MEM (CASE_VECTOR_MODE, index); | |
10735 | RTX_UNCHANGING_P (vector) = 1; | |
10736 | convert_move (temp, vector, 0); | |
10737 | ||
10738 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
10739 | ||
10740 | /* If we are generating PIC code or if the table is PC-relative, the | |
10741 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
10742 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
10743 | emit_barrier (); | |
bbf6f052 | 10744 | } |
b93a436e | 10745 | |
3a94c984 | 10746 | #endif /* HAVE_tablejump */ |