]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
jcf-write.c (generate_bytecode_insns): Emit invokeinterface bytecodes in the correct...
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "java-tree.h"
29 #include "obstack.h"
30 #undef AND
31 #include "rtl.h"
32 #include "flags.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
35 #include "buffer.h"
36 #include "toplev.h"
37
38 #ifndef DIR_SEPARATOR
39 #define DIR_SEPARATOR '/'
40 #endif
41
42 extern struct obstack temporary_obstack;
43
44 /* Base directory in which `.class' files should be written.
45 NULL means to put the file into the same directory as the
46 corresponding .java file. */
47 char *jcf_write_base_directory = NULL;
48
49 /* Make sure bytecode.data is big enough for at least N more bytes. */
50
51 #define RESERVE(N) \
52 do { CHECK_OP(state); \
53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
54 buffer_grow (&state->bytecode, N); } while (0)
55
56 /* Add a 1-byte instruction/operand I to bytecode.data,
57 assuming space has already been RESERVE'd. */
58
59 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
60
61 /* Like OP1, but I is a 2-byte big endian integer. */
62
63 #define OP2(I) \
64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
65
66 /* Like OP1, but I is a 4-byte big endian integer. */
67
68 #define OP4(I) \
69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
71
72 /* Macro to call each time we push I words on the JVM stack. */
73
74 #define NOTE_PUSH(I) \
75 do { state->code_SP += (I); \
76 if (state->code_SP > state->code_SP_max) \
77 state->code_SP_max = state->code_SP; } while (0)
78
79 /* Macro to call each time we pop I words from the JVM stack. */
80
81 #define NOTE_POP(I) \
82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
83
84 /* A chunk or segment of a .class file. */
85
86 struct chunk
87 {
88 /* The next segment of this .class file. */
89 struct chunk *next;
90
91 /* The actual data in this segment to be written to the .class file. */
92 unsigned char *data;
93
94 /* The size of the segment to be written to the .class file. */
95 int size;
96 };
97
98 #define PENDING_CLEANUP_PC (-3)
99 #define PENDING_EXIT_PC (-2)
100 #define UNDEFINED_PC (-1)
101
102 /* Each "block" represents a label plus the bytecode instructions following.
103 There may be branches out of the block, but no incoming jumps, except
104 to the beginning of the block.
105
106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
107 assocated code yet), but it is an undefined label.
108 */
109
110 struct jcf_block
111 {
112 /* For blocks that that are defined, the next block (in pc order).
113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
114 or a cleanup expression (from a WITH_CLEANUP_EXPR),
115 this is the next (outer) such end label, in a stack headed by
116 labeled_blocks in jcf_partial. */
117 struct jcf_block *next;
118
119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
120 pc is PENDING_EXIT_PC.
121 In the not-yet-defined end label for pending cleanup subroutine,
122 pc is PENDING_CLEANUP_PC.
123 For other not-yet-defined labels, pc is UNDEFINED_PC.
124
125 If the label has been defined:
126 Until perform_relocations is finished, this is the maximum possible
127 value of the bytecode offset at the begnning of this block.
128 After perform_relocations, it is the actual offset (pc). */
129 int pc;
130
131 int linenumber;
132
133 /* After finish_jcf_block is called, The actual instructions contained in this block.
134 Before than NULL, and the instructions are in state->bytecode. */
135 union {
136 struct chunk *chunk;
137
138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
139 coveed by the cleanup. */
140 struct jcf_block *start_label;
141 } v;
142
143 union {
144 /* Set of relocations (in reverse offset order) for this block. */
145 struct jcf_relocation *relocations;
146
147 /* If this block is that of the not-yet-defined end label of
148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
150 tree labeled_block;
151 } u;
152 };
153
154 /* A "relocation" type for the 0-3 bytes of padding at the start
155 of a tableswitch or a lookupswitch. */
156 #define SWITCH_ALIGN_RELOC 4
157
158 /* A relocation type for the labels in a tableswitch or a lookupswitch;
159 these are relative to the start of the instruction, but (due to
160 th 0-3 bytes of padding), we don't know the offset before relocation. */
161 #define BLOCK_START_RELOC 1
162
163 struct jcf_relocation
164 {
165 /* Next relocation for the current jcf_block. */
166 struct jcf_relocation *next;
167
168 /* The (byte) offset within the current block that needs to be relocated. */
169 HOST_WIDE_INT offset;
170
171 /* 0 if offset is a 4-byte relative offset.
172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
173 for proper alignment in tableswitch/lookupswitch instructions.
174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
175 to the start of the containing block.
176 -1 if offset is a 2-byte relative offset.
177 < -1 if offset is the address of an instruction with a 2-byte offset
178 that does not have a corresponding 4-byte offset version, in which
179 case the absolute value of kind is the inverted opcode.
180 > 4 if offset is the address of an instruction (such as jsr) with a
181 2-byte offset that does have a corresponding 4-byte offset version,
182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
183 int kind;
184
185 /* The label the relocation wants to actually transfer to. */
186 struct jcf_block *label;
187 };
188
189 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
190 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
191
192 /* State for single catch clause. */
193
194 struct jcf_handler
195 {
196 struct jcf_handler *next;
197
198 struct jcf_block *start_label;
199 struct jcf_block *end_label;
200 struct jcf_block *handler_label;
201
202 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
203 tree type;
204 };
205
206 /* State for the current switch statement. */
207
208 struct jcf_switch_state
209 {
210 struct jcf_switch_state *prev;
211 struct jcf_block *default_label;
212
213 struct jcf_relocation *cases;
214 int num_cases;
215 HOST_WIDE_INT min_case, max_case;
216 };
217
218 /* This structure is used to contain the various pieces that will
219 become a .class file. */
220
221 struct jcf_partial
222 {
223 struct chunk *first;
224 struct chunk *chunk;
225 struct obstack *chunk_obstack;
226 tree current_method;
227
228 /* List of basic blocks for the current method. */
229 struct jcf_block *blocks;
230 struct jcf_block *last_block;
231
232 struct localvar_info *first_lvar;
233 struct localvar_info *last_lvar;
234 int lvar_count;
235
236 CPool cpool;
237
238 int linenumber_count;
239
240 /* Until perform_relocations, this is a upper bound on the number
241 of bytes (so far) in the instructions for the current method. */
242 int code_length;
243
244 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
245 struct jcf_block *labeled_blocks;
246
247 /* The current stack size (stack pointer) in the current method. */
248 int code_SP;
249
250 /* The largest extent of stack size (stack pointer) in the current method. */
251 int code_SP_max;
252
253 /* Contains a mapping from local var slot number to localvar_info. */
254 struct buffer localvars;
255
256 /* The buffer allocated for bytecode for the current jcf_block. */
257 struct buffer bytecode;
258
259 /* Chain of exception handlers for the current method. */
260 struct jcf_handler *handlers;
261
262 /* Last element in handlers chain. */
263 struct jcf_handler *last_handler;
264
265 /* Number of exception handlers for the current method. */
266 int num_handlers;
267
268 /* Number of finalizers we are currently nested within. */
269 int num_finalizers;
270
271 /* If non-NULL, use this for the return value. */
272 tree return_value_decl;
273
274 /* Information about the current switch statemenet. */
275 struct jcf_switch_state *sw_state;
276 };
277
278 static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *));
279 static struct chunk * alloc_chunk PROTO ((struct chunk *, unsigned char *,
280 int, struct obstack *));
281 static unsigned char * append_chunk PROTO ((unsigned char *, int,
282 struct jcf_partial *));
283 static void append_chunk_copy PROTO ((unsigned char *, int,
284 struct jcf_partial *));
285 static struct jcf_block * gen_jcf_label PROTO ((struct jcf_partial *));
286 static void finish_jcf_block PROTO ((struct jcf_partial *));
287 static void define_jcf_label PROTO ((struct jcf_block *,
288 struct jcf_partial *));
289 static struct jcf_block * get_jcf_label_here PROTO ((struct jcf_partial *));
290 static void put_linenumber PROTO ((int, struct jcf_partial *));
291 static void localvar_alloc PROTO ((tree, struct jcf_partial *));
292 static void localvar_free PROTO ((tree, struct jcf_partial *));
293 static int get_access_flags PROTO ((tree));
294 static void write_chunks PROTO ((FILE *, struct chunk *));
295 static int adjust_typed_op PROTO ((tree, int));
296 static void generate_bytecode_conditional PROTO ((tree, struct jcf_block *,
297 struct jcf_block *, int,
298 struct jcf_partial *));
299 static void generate_bytecode_return PROTO ((tree, struct jcf_partial *));
300 static void perform_relocations PROTO ((struct jcf_partial *));
301 static void init_jcf_state PROTO ((struct jcf_partial *, struct obstack *));
302 static void init_jcf_method PROTO ((struct jcf_partial *, tree));
303 static void release_jcf_state PROTO ((struct jcf_partial *));
304 static struct chunk * generate_classfile PROTO ((tree, struct jcf_partial *));
305 static struct jcf_handler *alloc_handler PROTO ((struct jcf_block *,
306 struct jcf_block *,
307 struct jcf_partial *));
308 static void emit_iinc PROTO ((tree, HOST_WIDE_INT, struct jcf_partial *));
309 static void emit_reloc PROTO ((HOST_WIDE_INT, int, struct jcf_block *,
310 struct jcf_partial *));
311 static void push_constant1 PROTO ((HOST_WIDE_INT, struct jcf_partial *));
312 static void push_constant2 PROTO ((HOST_WIDE_INT, struct jcf_partial *));
313 static void push_int_const PROTO ((HOST_WIDE_INT, struct jcf_partial *));
314 static int find_constant_wide PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
315 struct jcf_partial *));
316 static void push_long_const PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
317 struct jcf_partial *));
318 static int find_constant_index PROTO ((tree, struct jcf_partial *));
319 static void push_long_const PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static void field_op PROTO ((tree, int, struct jcf_partial *));
322 static void maybe_wide PROTO ((int, int, struct jcf_partial *));
323 static void emit_dup PROTO ((int, int, struct jcf_partial *));
324 static void emit_pop PROTO ((int, struct jcf_partial *));
325 static void emit_load_or_store PROTO ((tree, int, struct jcf_partial *));
326 static void emit_load PROTO ((tree, struct jcf_partial *));
327 static void emit_store PROTO ((tree, struct jcf_partial *));
328 static void emit_unop PROTO ((enum java_opcode, tree, struct jcf_partial *));
329 static void emit_binop PROTO ((enum java_opcode, tree, struct jcf_partial *));
330 static void emit_reloc PROTO ((HOST_WIDE_INT, int, struct jcf_block *,
331 struct jcf_partial *));
332 static void emit_switch_reloc PROTO ((struct jcf_block *,
333 struct jcf_partial *));
334 static void emit_case_reloc PROTO ((struct jcf_relocation *,
335 struct jcf_partial *));
336 static void emit_if PROTO ((struct jcf_block *, int, int,
337 struct jcf_partial *));
338 static void emit_goto PROTO ((struct jcf_block *, struct jcf_partial *));
339 static void emit_jsr PROTO ((struct jcf_block *, struct jcf_partial *));
340 static void call_cleanups PROTO ((struct jcf_block *, struct jcf_partial *));
341 static char *make_class_file_name PROTO ((tree));
342
343 /* Utility macros for appending (big-endian) data to a buffer.
344 We assume a local variable 'ptr' points into where we want to
345 write next, and we assume enoygh space has been allocated. */
346
347 #ifdef ENABLE_CHECKING
348 int
349 CHECK_PUT(ptr, state, i)
350 void *ptr;
351 struct jcf_partial *state;
352 int i;
353 {
354 if (ptr < state->chunk->data
355 || (char*)ptr + i > state->chunk->data + state->chunk->size)
356 fatal ("internal error - CHECK_PUT failed");
357 return 0;
358 }
359 #else
360 #define CHECK_PUT(PTR, STATE, I) ((void)0)
361 #endif
362
363 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
364 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
365 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
366 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
367
368 /* There are some cases below where CHECK_PUT is guaranteed to fail.
369 Use the following macros in those specific cases. */
370 #define UNSAFE_PUT1(X) (*ptr++ = (X))
371 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
372 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
373 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
374
375 \f
376 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
377 Set the data and size fields to DATA and SIZE, respectively.
378 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
379
380 static struct chunk *
381 alloc_chunk (last, data, size, work)
382 struct chunk *last;
383 unsigned char *data;
384 int size;
385 struct obstack *work;
386 {
387 struct chunk *chunk = (struct chunk *)
388 obstack_alloc (work, sizeof(struct chunk));
389
390 if (data == NULL && size > 0)
391 data = obstack_alloc (work, size);
392
393 chunk->next = NULL;
394 chunk->data = data;
395 chunk->size = size;
396 if (last != NULL)
397 last->next = chunk;
398 return chunk;
399 }
400
401 #ifdef ENABLE_CHECKING
402 int
403 CHECK_OP(struct jcf_partial *state)
404 {
405 if (state->bytecode.ptr > state->bytecode.limit)
406 {
407 fatal("internal error - CHECK_OP failed");
408 }
409 return 0;
410 }
411 #else
412 #define CHECK_OP(STATE) ((void)0)
413 #endif
414
415 static unsigned char *
416 append_chunk (data, size, state)
417 unsigned char *data;
418 int size;
419 struct jcf_partial *state;
420 {
421 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
422 if (state->first == NULL)
423 state->first = state->chunk;
424 return state->chunk->data;
425 }
426
427 static void
428 append_chunk_copy (data, size, state)
429 unsigned char *data;
430 int size;
431 struct jcf_partial *state;
432 {
433 unsigned char *ptr = append_chunk (NULL, size, state);
434 memcpy (ptr, data, size);
435 }
436 \f
437 static struct jcf_block *
438 gen_jcf_label (state)
439 struct jcf_partial *state;
440 {
441 struct jcf_block *block = (struct jcf_block *)
442 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
443 block->next = NULL;
444 block->linenumber = -1;
445 block->pc = UNDEFINED_PC;
446 return block;
447 }
448
449 static void
450 finish_jcf_block (state)
451 struct jcf_partial *state;
452 {
453 struct jcf_block *block = state->last_block;
454 struct jcf_relocation *reloc;
455 int code_length = BUFFER_LENGTH (&state->bytecode);
456 int pc = state->code_length;
457 append_chunk_copy (state->bytecode.data, code_length, state);
458 BUFFER_RESET (&state->bytecode);
459 block->v.chunk = state->chunk;
460
461 /* Calculate code_length to the maximum value it can have. */
462 pc += block->v.chunk->size;
463 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
464 {
465 int kind = reloc->kind;
466 if (kind == SWITCH_ALIGN_RELOC)
467 pc += 3;
468 else if (kind > BLOCK_START_RELOC)
469 pc += 2; /* 2-byte offset may grow to 4-byte offset */
470 else if (kind < -1)
471 pc += 5; /* May need to add a goto_w. */
472 }
473 state->code_length = pc;
474 }
475
476 static void
477 define_jcf_label (label, state)
478 struct jcf_block *label;
479 struct jcf_partial *state;
480 {
481 if (state->last_block != NULL)
482 finish_jcf_block (state);
483 label->pc = state->code_length;
484 if (state->blocks == NULL)
485 state->blocks = label;
486 else
487 state->last_block->next = label;
488 state->last_block = label;
489 label->next = NULL;
490 label->u.relocations = NULL;
491 }
492
493 static struct jcf_block *
494 get_jcf_label_here (state)
495 struct jcf_partial *state;
496 {
497 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
498 return state->last_block;
499 else
500 {
501 struct jcf_block *label = gen_jcf_label (state);
502 define_jcf_label (label, state);
503 return label;
504 }
505 }
506
507 /* Note a line number entry for the current PC and given LINE. */
508
509 static void
510 put_linenumber (line, state)
511 int line;
512 struct jcf_partial *state;
513 {
514 struct jcf_block *label = get_jcf_label_here (state);
515 if (label->linenumber > 0)
516 {
517 label = gen_jcf_label (state);
518 define_jcf_label (label, state);
519 }
520 label->linenumber = line;
521 state->linenumber_count++;
522 }
523
524 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
525 in the range (START_LABEL, END_LABEL). */
526
527 static struct jcf_handler *
528 alloc_handler (start_label, end_label, state)
529 struct jcf_block *start_label;
530 struct jcf_block *end_label;
531 struct jcf_partial *state;
532 {
533 struct jcf_handler *handler = (struct jcf_handler *)
534 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
535 handler->start_label = start_label;
536 handler->end_label = end_label;
537 handler->handler_label = get_jcf_label_here (state);
538 if (state->handlers == NULL)
539 state->handlers = handler;
540 else
541 state->last_handler->next = handler;
542 state->last_handler = handler;
543 handler->next = NULL;
544 state->num_handlers++;
545 return handler;
546 }
547
548 \f
549 /* The index of jvm local variable allocated for this DECL.
550 This is assigned when generating .class files;
551 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
552 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
553
554 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
555
556 struct localvar_info
557 {
558 struct localvar_info *next;
559
560 tree decl;
561 struct jcf_block *start_label;
562 struct jcf_block *end_label;
563 };
564
565 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
566 #define localvar_max \
567 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
568
569 static void
570 localvar_alloc (decl, state)
571 tree decl;
572 struct jcf_partial *state;
573 {
574 struct jcf_block *start_label = get_jcf_label_here (state);
575 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
576 int index;
577 register struct localvar_info *info;
578 register struct localvar_info **ptr = localvar_buffer;
579 register struct localvar_info **limit
580 = (struct localvar_info**) state->localvars.ptr;
581 for (index = 0; ptr < limit; index++, ptr++)
582 {
583 if (ptr[0] == NULL
584 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
585 break;
586 }
587 if (ptr == limit)
588 {
589 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
590 ptr = (struct localvar_info**) state->localvars.data + index;
591 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
592 }
593 info = (struct localvar_info *)
594 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
595 ptr[0] = info;
596 if (wide)
597 ptr[1] = (struct localvar_info *)(~0);
598 DECL_LOCAL_INDEX (decl) = index;
599 info->decl = decl;
600 info->start_label = start_label;
601
602 if (debug_info_level > DINFO_LEVEL_TERSE
603 && DECL_NAME (decl) != NULL_TREE)
604 {
605 /* Generate debugging info. */
606 info->next = NULL;
607 if (state->last_lvar != NULL)
608 state->last_lvar->next = info;
609 else
610 state->first_lvar = info;
611 state->last_lvar = info;
612 state->lvar_count++;
613 }
614 }
615
616 static void
617 localvar_free (decl, state)
618 tree decl;
619 struct jcf_partial *state;
620 {
621 struct jcf_block *end_label = get_jcf_label_here (state);
622 int index = DECL_LOCAL_INDEX (decl);
623 register struct localvar_info **ptr = &localvar_buffer [index];
624 register struct localvar_info *info = *ptr;
625 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
626
627 info->end_label = end_label;
628
629 if (info->decl != decl)
630 abort ();
631 ptr[0] = NULL;
632 if (wide)
633 {
634 if (ptr[1] != (struct localvar_info *)(~0))
635 abort ();
636 ptr[1] = NULL;
637 }
638 }
639
640 \f
641 #define STACK_TARGET 1
642 #define IGNORE_TARGET 2
643
644 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
645 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
646
647 static int
648 get_access_flags (decl)
649 tree decl;
650 {
651 int flags = 0;
652 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
653 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
654 flags |= ACC_PUBLIC;
655 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
656 flags |= ACC_FINAL;
657 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
658 {
659 if (TREE_PROTECTED (decl))
660 flags |= ACC_PROTECTED;
661 if (TREE_PRIVATE (decl))
662 flags |= ACC_PRIVATE;
663 }
664 else if (TREE_CODE (decl) == TYPE_DECL)
665 {
666 if (CLASS_SUPER (decl))
667 flags |= ACC_SUPER;
668 if (CLASS_ABSTRACT (decl))
669 flags |= ACC_ABSTRACT;
670 if (CLASS_INTERFACE (decl))
671 flags |= ACC_INTERFACE;
672 }
673 else
674 fatal ("internal error - bad argument to get_access_flags");
675 if (TREE_CODE (decl) == FUNCTION_DECL)
676 {
677 if (METHOD_NATIVE (decl))
678 flags |= ACC_NATIVE;
679 if (METHOD_STATIC (decl))
680 flags |= ACC_STATIC;
681 if (METHOD_SYNCHRONIZED (decl))
682 flags |= ACC_SYNCHRONIZED;
683 if (METHOD_ABSTRACT (decl))
684 flags |= ACC_ABSTRACT;
685 }
686 if (isfield)
687 {
688 if (FIELD_STATIC (decl))
689 flags |= ACC_STATIC;
690 if (FIELD_VOLATILE (decl))
691 flags |= ACC_VOLATILE;
692 if (FIELD_TRANSIENT (decl))
693 flags |= ACC_TRANSIENT;
694 }
695 return flags;
696 }
697
698 /* Write the list of segments starting at CHUNKS to STREAM. */
699
700 static void
701 write_chunks (stream, chunks)
702 FILE* stream;
703 struct chunk *chunks;
704 {
705 for (; chunks != NULL; chunks = chunks->next)
706 fwrite (chunks->data, chunks->size, 1, stream);
707 }
708
709 /* Push a 1-word constant in the constant pool at the given INDEX.
710 (Caller is responsible for doing NOTE_PUSH.) */
711
712 static void
713 push_constant1 (index, state)
714 HOST_WIDE_INT index;
715 struct jcf_partial *state;
716 {
717 RESERVE (3);
718 if (index < 256)
719 {
720 OP1 (OPCODE_ldc);
721 OP1 (index);
722 }
723 else
724 {
725 OP1 (OPCODE_ldc_w);
726 OP2 (index);
727 }
728 }
729
730 /* Push a 2-word constant in the constant pool at the given INDEX.
731 (Caller is responsible for doing NOTE_PUSH.) */
732
733 static void
734 push_constant2 (index, state)
735 HOST_WIDE_INT index;
736 struct jcf_partial *state;
737 {
738 RESERVE (3);
739 OP1 (OPCODE_ldc2_w);
740 OP2 (index);
741 }
742
743 /* Push 32-bit integer constant on VM stack.
744 Caller is responsible for doing NOTE_PUSH. */
745
746 static void
747 push_int_const (i, state)
748 HOST_WIDE_INT i;
749 struct jcf_partial *state;
750 {
751 RESERVE(3);
752 if (i >= -1 && i <= 5)
753 OP1(OPCODE_iconst_0 + i);
754 else if (i >= -128 && i < 128)
755 {
756 OP1(OPCODE_bipush);
757 OP1(i);
758 }
759 else if (i >= -32768 && i < 32768)
760 {
761 OP1(OPCODE_sipush);
762 OP2(i);
763 }
764 else
765 {
766 i = find_constant1 (&state->cpool, CONSTANT_Integer,
767 (jword)(i & 0xFFFFFFFF));
768 push_constant1 (i, state);
769 }
770 }
771
772 static int
773 find_constant_wide (lo, hi, state)
774 HOST_WIDE_INT lo, hi;
775 struct jcf_partial *state;
776 {
777 HOST_WIDE_INT w1, w2;
778 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
779 return find_constant2 (&state->cpool, CONSTANT_Long,
780 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
781 }
782
783 /* Find or allocate a constant pool entry for the given VALUE.
784 Return the index in the constant pool. */
785
786 static int
787 find_constant_index (value, state)
788 tree value;
789 struct jcf_partial *state;
790 {
791 if (TREE_CODE (value) == INTEGER_CST)
792 {
793 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
794 return find_constant1 (&state->cpool, CONSTANT_Integer,
795 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
796 else
797 return find_constant_wide (TREE_INT_CST_LOW (value),
798 TREE_INT_CST_HIGH (value), state);
799 }
800 else if (TREE_CODE (value) == REAL_CST)
801 {
802 long words[2];
803 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
804 {
805 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
806 return find_constant1 (&state->cpool, CONSTANT_Float,
807 (jword)words[0]);
808 }
809 else
810 {
811 etardouble (TREE_REAL_CST (value), words);
812 return find_constant2 (&state->cpool, CONSTANT_Double,
813 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
814 0xFFFFFFFF),
815 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
816 0xFFFFFFFF));
817 }
818 }
819 else if (TREE_CODE (value) == STRING_CST)
820 {
821 return find_string_constant (&state->cpool, value);
822 }
823 else
824 fatal ("find_constant_index - bad type");
825 }
826
827 /* Push 64-bit long constant on VM stack.
828 Caller is responsible for doing NOTE_PUSH. */
829
830 static void
831 push_long_const (lo, hi, state)
832 HOST_WIDE_INT lo, hi;
833 struct jcf_partial *state;
834 {
835 if (hi == 0 && lo >= 0 && lo <= 1)
836 {
837 RESERVE(1);
838 OP1(OPCODE_lconst_0 + lo);
839 }
840 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
841 {
842 push_int_const (lo, state);
843 RESERVE (1);
844 OP1 (OPCODE_i2l);
845 }
846 else
847 push_constant2 (find_constant_wide (lo, hi, state), state);
848 }
849
850 static void
851 field_op (field, opcode, state)
852 tree field;
853 int opcode;
854 struct jcf_partial *state;
855 {
856 int index = find_fieldref_index (&state->cpool, field);
857 RESERVE (3);
858 OP1 (opcode);
859 OP2 (index);
860 }
861
862 /* Returns an integer in the range 0 (for 'int') through 4 (for object
863 reference) to 7 (for 'short') which matches the pattern of how JVM
864 opcodes typically depend on the operand type. */
865
866 static int
867 adjust_typed_op (type, max)
868 tree type;
869 int max;
870 {
871 switch (TREE_CODE (type))
872 {
873 case POINTER_TYPE:
874 case RECORD_TYPE: return 4;
875 case BOOLEAN_TYPE:
876 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
877 case CHAR_TYPE:
878 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
879 case INTEGER_TYPE:
880 switch (TYPE_PRECISION (type))
881 {
882 case 8: return max < 5 ? 0 : 5;
883 case 16: return max < 7 ? 0 : 7;
884 case 32: return 0;
885 case 64: return 1;
886 }
887 break;
888 case REAL_TYPE:
889 switch (TYPE_PRECISION (type))
890 {
891 case 32: return 2;
892 case 64: return 3;
893 }
894 break;
895 default:
896 break;
897 }
898 abort ();
899 }
900
901 static void
902 maybe_wide (opcode, index, state)
903 int opcode, index;
904 struct jcf_partial *state;
905 {
906 if (index >= 256)
907 {
908 RESERVE (4);
909 OP1 (OPCODE_wide);
910 OP1 (opcode);
911 OP2 (index);
912 }
913 else
914 {
915 RESERVE (2);
916 OP1 (opcode);
917 OP1 (index);
918 }
919 }
920
921 /* Compile code to duplicate with offset, where
922 SIZE is the size of the stack item to duplicate (1 or 2), abd
923 OFFSET is where to insert the result (must be 0, 1, or 2).
924 (The new words get inserted at stack[SP-size-offset].) */
925
926 static void
927 emit_dup (size, offset, state)
928 int size, offset;
929 struct jcf_partial *state;
930 {
931 int kind;
932 if (size == 0)
933 return;
934 RESERVE(1);
935 if (offset == 0)
936 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
937 else if (offset == 1)
938 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
939 else if (offset == 2)
940 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
941 else
942 abort();
943 OP1 (kind);
944 NOTE_PUSH (size);
945 }
946
947 static void
948 emit_pop (size, state)
949 int size;
950 struct jcf_partial *state;
951 {
952 RESERVE (1);
953 OP1 (OPCODE_pop - 1 + size);
954 }
955
956 static void
957 emit_iinc (var, value, state)
958 tree var;
959 HOST_WIDE_INT value;
960 struct jcf_partial *state;
961 {
962 int slot = DECL_LOCAL_INDEX (var);
963
964 if (value < -128 || value > 127 || slot >= 256)
965 {
966 RESERVE (6);
967 OP1 (OPCODE_wide);
968 OP1 (OPCODE_iinc);
969 OP2 (slot);
970 OP2 (value);
971 }
972 else
973 {
974 RESERVE (3);
975 OP1 (OPCODE_iinc);
976 OP1 (slot);
977 OP1 (value);
978 }
979 }
980
981 static void
982 emit_load_or_store (var, opcode, state)
983 tree var; /* Variable to load from or store into. */
984 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
985 struct jcf_partial *state;
986 {
987 tree type = TREE_TYPE (var);
988 int kind = adjust_typed_op (type, 4);
989 int index = DECL_LOCAL_INDEX (var);
990 if (index <= 3)
991 {
992 RESERVE (1);
993 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
994 }
995 else
996 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
997 }
998
999 static void
1000 emit_load (var, state)
1001 tree var;
1002 struct jcf_partial *state;
1003 {
1004 emit_load_or_store (var, OPCODE_iload, state);
1005 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1006 }
1007
1008 static void
1009 emit_store (var, state)
1010 tree var;
1011 struct jcf_partial *state;
1012 {
1013 emit_load_or_store (var, OPCODE_istore, state);
1014 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1015 }
1016
1017 static void
1018 emit_unop (opcode, type, state)
1019 enum java_opcode opcode;
1020 tree type ATTRIBUTE_UNUSED;
1021 struct jcf_partial *state;
1022 {
1023 RESERVE(1);
1024 OP1 (opcode);
1025 }
1026
1027 static void
1028 emit_binop (opcode, type, state)
1029 enum java_opcode opcode;
1030 tree type;
1031 struct jcf_partial *state;
1032 {
1033 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1034 RESERVE(1);
1035 OP1 (opcode);
1036 NOTE_POP (size);
1037 }
1038
1039 static void
1040 emit_reloc (value, kind, target, state)
1041 HOST_WIDE_INT value;
1042 int kind;
1043 struct jcf_block *target;
1044 struct jcf_partial *state;
1045 {
1046 struct jcf_relocation *reloc = (struct jcf_relocation *)
1047 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1048 struct jcf_block *block = state->last_block;
1049 reloc->next = block->u.relocations;
1050 block->u.relocations = reloc;
1051 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1052 reloc->label = target;
1053 reloc->kind = kind;
1054 if (kind == 0 || kind == BLOCK_START_RELOC)
1055 OP4 (value);
1056 else if (kind != SWITCH_ALIGN_RELOC)
1057 OP2 (value);
1058 }
1059
1060 static void
1061 emit_switch_reloc (label, state)
1062 struct jcf_block *label;
1063 struct jcf_partial *state;
1064 {
1065 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1066 }
1067
1068 /* Similar to emit_switch_reloc,
1069 but re-uses an existing case reloc. */
1070
1071 static void
1072 emit_case_reloc (reloc, state)
1073 struct jcf_relocation *reloc;
1074 struct jcf_partial *state;
1075 {
1076 struct jcf_block *block = state->last_block;
1077 reloc->next = block->u.relocations;
1078 block->u.relocations = reloc;
1079 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1080 reloc->kind = BLOCK_START_RELOC;
1081 OP4 (0);
1082 }
1083
1084 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1085 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1086
1087 static void
1088 emit_if (target, opcode, inv_opcode, state)
1089 struct jcf_block *target;
1090 int opcode, inv_opcode;
1091 struct jcf_partial *state;
1092 {
1093 OP1 (opcode);
1094 /* value is 1 byte from reloc back to start of instruction. */
1095 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1096 }
1097
1098 static void
1099 emit_goto (target, state)
1100 struct jcf_block *target;
1101 struct jcf_partial *state;
1102 {
1103 OP1 (OPCODE_goto);
1104 /* Value is 1 byte from reloc back to start of instruction. */
1105 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1106 }
1107
1108 static void
1109 emit_jsr (target, state)
1110 struct jcf_block *target;
1111 struct jcf_partial *state;
1112 {
1113 OP1 (OPCODE_jsr);
1114 /* Value is 1 byte from reloc back to start of instruction. */
1115 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1116 }
1117
1118 /* Generate code to evaluate EXP. If the result is true,
1119 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1120 TRUE_BRANCH_FIRST is a code geneation hint that the
1121 TRUE_LABEL may follow right after this. (The idea is that we
1122 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1123
1124 static void
1125 generate_bytecode_conditional (exp, true_label, false_label,
1126 true_branch_first, state)
1127 tree exp;
1128 struct jcf_block *true_label;
1129 struct jcf_block *false_label;
1130 int true_branch_first;
1131 struct jcf_partial *state;
1132 {
1133 tree exp0, exp1, type;
1134 int save_SP = state->code_SP;
1135 enum java_opcode op, negop;
1136 switch (TREE_CODE (exp))
1137 {
1138 case INTEGER_CST:
1139 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1140 break;
1141 case COND_EXPR:
1142 {
1143 struct jcf_block *then_label = gen_jcf_label (state);
1144 struct jcf_block *else_label = gen_jcf_label (state);
1145 int save_SP_before, save_SP_after;
1146 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1147 then_label, else_label, 1, state);
1148 define_jcf_label (then_label, state);
1149 save_SP_before = state->code_SP;
1150 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1151 true_label, false_label, 1, state);
1152 save_SP_after = state->code_SP;
1153 state->code_SP = save_SP_before;
1154 define_jcf_label (else_label, state);
1155 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1156 true_label, false_label,
1157 true_branch_first, state);
1158 if (state->code_SP != save_SP_after)
1159 fatal ("internal error non-matching SP");
1160 }
1161 break;
1162 case TRUTH_NOT_EXPR:
1163 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1164 false_label, true_label,
1165 ! true_branch_first, state);
1166 break;
1167 case TRUTH_ANDIF_EXPR:
1168 {
1169 struct jcf_block *next_label = gen_jcf_label (state);
1170 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1171 next_label, false_label, 1, state);
1172 define_jcf_label (next_label, state);
1173 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1174 true_label, false_label, 1, state);
1175 }
1176 break;
1177 case TRUTH_ORIF_EXPR:
1178 {
1179 struct jcf_block *next_label = gen_jcf_label (state);
1180 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1181 true_label, next_label, 1, state);
1182 define_jcf_label (next_label, state);
1183 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1184 true_label, false_label, 1, state);
1185 }
1186 break;
1187 compare_1:
1188 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1189 set it to the corresponding 1-operand if<COND> instructions. */
1190 op = op - 6;
1191 /* FALLTHROUGH */
1192 compare_2:
1193 /* The opcodes with their inverses are allocated in pairs.
1194 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1195 negop = (op & 1) ? op + 1 : op - 1;
1196 compare_2_ptr:
1197 if (true_branch_first)
1198 {
1199 emit_if (false_label, negop, op, state);
1200 emit_goto (true_label, state);
1201 }
1202 else
1203 {
1204 emit_if (true_label, op, negop, state);
1205 emit_goto (false_label, state);
1206 }
1207 break;
1208 case EQ_EXPR:
1209 op = OPCODE_if_icmpeq;
1210 goto compare;
1211 case NE_EXPR:
1212 op = OPCODE_if_icmpne;
1213 goto compare;
1214 case GT_EXPR:
1215 op = OPCODE_if_icmpgt;
1216 goto compare;
1217 case LT_EXPR:
1218 op = OPCODE_if_icmplt;
1219 goto compare;
1220 case GE_EXPR:
1221 op = OPCODE_if_icmpge;
1222 goto compare;
1223 case LE_EXPR:
1224 op = OPCODE_if_icmple;
1225 goto compare;
1226 compare:
1227 exp0 = TREE_OPERAND (exp, 0);
1228 exp1 = TREE_OPERAND (exp, 1);
1229 type = TREE_TYPE (exp0);
1230 switch (TREE_CODE (type))
1231 {
1232 int opf;
1233 case POINTER_TYPE: case RECORD_TYPE:
1234 switch (TREE_CODE (exp))
1235 {
1236 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1237 case NE_EXPR: op = OPCODE_if_acmpne; break;
1238 default: abort();
1239 }
1240 if (integer_zerop (exp1) || integer_zerop (exp0))
1241 {
1242 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp1,
1243 STACK_TARGET, state);
1244 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1245 negop = (op & 1) ? op - 1 : op + 1;
1246 NOTE_POP (1);
1247 goto compare_2_ptr;
1248 }
1249 generate_bytecode_insns (exp0, STACK_TARGET, state);
1250 generate_bytecode_insns (exp1, STACK_TARGET, state);
1251 NOTE_POP (2);
1252 goto compare_2;
1253 case REAL_TYPE:
1254 generate_bytecode_insns (exp0, STACK_TARGET, state);
1255 generate_bytecode_insns (exp1, STACK_TARGET, state);
1256 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1257 opf = OPCODE_fcmpg;
1258 else
1259 opf = OPCODE_fcmpl;
1260 if (TYPE_PRECISION (type) > 32)
1261 {
1262 opf += 2;
1263 NOTE_POP (4);
1264 }
1265 else
1266 NOTE_POP (2);
1267 RESERVE (1);
1268 OP1 (opf);
1269 goto compare_1;
1270 case INTEGER_TYPE:
1271 if (TYPE_PRECISION (type) > 32)
1272 {
1273 generate_bytecode_insns (exp0, STACK_TARGET, state);
1274 generate_bytecode_insns (exp1, STACK_TARGET, state);
1275 NOTE_POP (4);
1276 RESERVE (1);
1277 OP1 (OPCODE_lcmp);
1278 goto compare_1;
1279 }
1280 /* FALLTHOUGH */
1281 default:
1282 if (integer_zerop (exp1))
1283 {
1284 generate_bytecode_insns (exp0, STACK_TARGET, state);
1285 NOTE_POP (1);
1286 goto compare_1;
1287 }
1288 if (integer_zerop (exp0))
1289 {
1290 switch (op)
1291 {
1292 case OPCODE_if_icmplt:
1293 case OPCODE_if_icmpge:
1294 op += 2;
1295 break;
1296 case OPCODE_if_icmpgt:
1297 case OPCODE_if_icmple:
1298 op -= 2;
1299 break;
1300 default:
1301 break;
1302 }
1303 generate_bytecode_insns (exp1, STACK_TARGET, state);
1304 NOTE_POP (1);
1305 goto compare_1;
1306 }
1307 generate_bytecode_insns (exp0, STACK_TARGET, state);
1308 generate_bytecode_insns (exp1, STACK_TARGET, state);
1309 NOTE_POP (2);
1310 goto compare_2;
1311 }
1312
1313 default:
1314 generate_bytecode_insns (exp, STACK_TARGET, state);
1315 NOTE_POP (1);
1316 if (true_branch_first)
1317 {
1318 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1319 emit_goto (true_label, state);
1320 }
1321 else
1322 {
1323 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1324 emit_goto (false_label, state);
1325 }
1326 break;
1327 }
1328 if (save_SP != state->code_SP)
1329 fatal ("internal error - SP mismatch");
1330 }
1331
1332 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1333 but only as far out as LIMIT (since we are about to jump to the
1334 emit label that is LIMIT). */
1335
1336 static void
1337 call_cleanups (limit, state)
1338 struct jcf_block *limit;
1339 struct jcf_partial *state;
1340 {
1341 struct jcf_block *block = state->labeled_blocks;
1342 for (; block != limit; block = block->next)
1343 {
1344 if (block->pc == PENDING_CLEANUP_PC)
1345 emit_jsr (block, state);
1346 }
1347 }
1348
1349 static void
1350 generate_bytecode_return (exp, state)
1351 tree exp;
1352 struct jcf_partial *state;
1353 {
1354 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1355 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1356 int op;
1357 again:
1358 if (exp != NULL)
1359 {
1360 switch (TREE_CODE (exp))
1361 {
1362 case COMPOUND_EXPR:
1363 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1364 state);
1365 exp = TREE_OPERAND (exp, 1);
1366 goto again;
1367 case COND_EXPR:
1368 {
1369 struct jcf_block *then_label = gen_jcf_label (state);
1370 struct jcf_block *else_label = gen_jcf_label (state);
1371 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1372 then_label, else_label, 1, state);
1373 define_jcf_label (then_label, state);
1374 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1375 define_jcf_label (else_label, state);
1376 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1377 }
1378 return;
1379 default:
1380 generate_bytecode_insns (exp,
1381 returns_void ? IGNORE_TARGET
1382 : STACK_TARGET, state);
1383 }
1384 }
1385 if (returns_void)
1386 {
1387 op = OPCODE_return;
1388 call_cleanups (NULL_PTR, state);
1389 }
1390 else
1391 {
1392 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1393 if (state->num_finalizers > 0)
1394 {
1395 if (state->return_value_decl == NULL_TREE)
1396 {
1397 state->return_value_decl
1398 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1399 localvar_alloc (state->return_value_decl, state);
1400 }
1401 emit_store (state->return_value_decl, state);
1402 call_cleanups (NULL_PTR, state);
1403 emit_load (state->return_value_decl, state);
1404 /* If we call localvar_free (state->return_value_decl, state),
1405 then we risk the save decl erroneously re-used in the
1406 finalizer. Instead, we keep the state->return_value_decl
1407 allocated through the rest of the method. This is not
1408 the greatest solution, but it is at least simple and safe. */
1409 }
1410 }
1411 RESERVE (1);
1412 OP1 (op);
1413 }
1414
1415 /* Generate bytecode for sub-expression EXP of METHOD.
1416 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1417
1418 static void
1419 generate_bytecode_insns (exp, target, state)
1420 tree exp;
1421 int target;
1422 struct jcf_partial *state;
1423 {
1424 tree type;
1425 enum java_opcode jopcode;
1426 int op;
1427 HOST_WIDE_INT value;
1428 int post_op;
1429 int size;
1430 int offset;
1431
1432 if (exp == NULL && target == IGNORE_TARGET)
1433 return;
1434
1435 type = TREE_TYPE (exp);
1436
1437 switch (TREE_CODE (exp))
1438 {
1439 case BLOCK:
1440 if (BLOCK_EXPR_BODY (exp))
1441 {
1442 tree local;
1443 tree body = BLOCK_EXPR_BODY (exp);
1444 for (local = BLOCK_EXPR_DECLS (exp); local; )
1445 {
1446 tree next = TREE_CHAIN (local);
1447 localvar_alloc (local, state);
1448 local = next;
1449 }
1450 /* Avoid deep recursion for long blocks. */
1451 while (TREE_CODE (body) == COMPOUND_EXPR)
1452 {
1453 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1454 body = TREE_OPERAND (body, 1);
1455 }
1456 generate_bytecode_insns (body, target, state);
1457 for (local = BLOCK_EXPR_DECLS (exp); local; )
1458 {
1459 tree next = TREE_CHAIN (local);
1460 localvar_free (local, state);
1461 local = next;
1462 }
1463 }
1464 break;
1465 case COMPOUND_EXPR:
1466 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1467 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1468 break;
1469 case EXPR_WITH_FILE_LOCATION:
1470 {
1471 char *saved_input_filename = input_filename;
1472 tree body = EXPR_WFL_NODE (exp);
1473 int saved_lineno = lineno;
1474 if (body == empty_stmt_node)
1475 break;
1476 input_filename = EXPR_WFL_FILENAME (exp);
1477 lineno = EXPR_WFL_LINENO (exp);
1478 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1479 && debug_info_level > DINFO_LEVEL_NONE)
1480 put_linenumber (lineno, state);
1481 generate_bytecode_insns (body, target, state);
1482 input_filename = saved_input_filename;
1483 lineno = saved_lineno;
1484 }
1485 break;
1486 case INTEGER_CST:
1487 if (target == IGNORE_TARGET) ; /* do nothing */
1488 else if (TREE_CODE (type) == POINTER_TYPE)
1489 {
1490 if (! integer_zerop (exp))
1491 abort();
1492 RESERVE(1);
1493 OP1 (OPCODE_aconst_null);
1494 NOTE_PUSH (1);
1495 }
1496 else if (TYPE_PRECISION (type) <= 32)
1497 {
1498 push_int_const (TREE_INT_CST_LOW (exp), state);
1499 NOTE_PUSH (1);
1500 }
1501 else
1502 {
1503 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1504 state);
1505 NOTE_PUSH (2);
1506 }
1507 break;
1508 case REAL_CST:
1509 {
1510 int prec = TYPE_PRECISION (type) >> 5;
1511 RESERVE(1);
1512 if (real_zerop (exp))
1513 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1514 else if (real_onep (exp))
1515 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1516 /* FIXME Should also use fconst_2 for 2.0f.
1517 Also, should use iconst_2/ldc followed by i2f/i2d
1518 for other float/double when the value is a small integer. */
1519 else
1520 {
1521 offset = find_constant_index (exp, state);
1522 if (prec == 1)
1523 push_constant1 (offset, state);
1524 else
1525 push_constant2 (offset, state);
1526 }
1527 NOTE_PUSH (prec);
1528 }
1529 break;
1530 case STRING_CST:
1531 push_constant1 (find_string_constant (&state->cpool, exp), state);
1532 NOTE_PUSH (1);
1533 break;
1534 case VAR_DECL:
1535 if (TREE_STATIC (exp))
1536 {
1537 field_op (exp, OPCODE_getstatic, state);
1538 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1539 break;
1540 }
1541 /* ... fall through ... */
1542 case PARM_DECL:
1543 emit_load (exp, state);
1544 break;
1545 case NON_LVALUE_EXPR:
1546 case INDIRECT_REF:
1547 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1548 break;
1549 case ARRAY_REF:
1550 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1551 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1552 if (target != IGNORE_TARGET)
1553 {
1554 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1555 RESERVE(1);
1556 OP1 (jopcode);
1557 if (! TYPE_IS_WIDE (type))
1558 NOTE_POP (1);
1559 }
1560 break;
1561 case COMPONENT_REF:
1562 {
1563 tree obj = TREE_OPERAND (exp, 0);
1564 tree field = TREE_OPERAND (exp, 1);
1565 int is_static = FIELD_STATIC (field);
1566 generate_bytecode_insns (obj,
1567 is_static ? IGNORE_TARGET : target, state);
1568 if (target != IGNORE_TARGET)
1569 {
1570 if (DECL_NAME (field) == length_identifier_node && !is_static
1571 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1572 {
1573 RESERVE (1);
1574 OP1 (OPCODE_arraylength);
1575 }
1576 else
1577 {
1578 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1579 state);
1580 if (! is_static)
1581 NOTE_POP (1);
1582 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1583 }
1584 }
1585 }
1586 break;
1587 case TRUTH_ANDIF_EXPR:
1588 case TRUTH_ORIF_EXPR:
1589 case EQ_EXPR:
1590 case NE_EXPR:
1591 case GT_EXPR:
1592 case LT_EXPR:
1593 case GE_EXPR:
1594 case LE_EXPR:
1595 {
1596 struct jcf_block *then_label = gen_jcf_label (state);
1597 struct jcf_block *else_label = gen_jcf_label (state);
1598 struct jcf_block *end_label = gen_jcf_label (state);
1599 generate_bytecode_conditional (exp,
1600 then_label, else_label, 1, state);
1601 define_jcf_label (then_label, state);
1602 push_int_const (1, state);
1603 emit_goto (end_label, state);
1604 define_jcf_label (else_label, state);
1605 push_int_const (0, state);
1606 define_jcf_label (end_label, state);
1607 NOTE_PUSH (1);
1608 }
1609 break;
1610 case COND_EXPR:
1611 {
1612 struct jcf_block *then_label = gen_jcf_label (state);
1613 struct jcf_block *else_label = gen_jcf_label (state);
1614 struct jcf_block *end_label = gen_jcf_label (state);
1615 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1616 then_label, else_label, 1, state);
1617 define_jcf_label (then_label, state);
1618 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1619 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1620 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1621 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1622 emit_goto (end_label, state);
1623 define_jcf_label (else_label, state);
1624 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1625 define_jcf_label (end_label, state);
1626
1627 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1628 if (TREE_TYPE (exp) != void_type_node)
1629 NOTE_POP (TYPE_PRECISION (TREE_TYPE (exp)) > 32 ? 2 : 1);
1630 }
1631 break;
1632 case CASE_EXPR:
1633 {
1634 struct jcf_switch_state *sw_state = state->sw_state;
1635 struct jcf_relocation *reloc = (struct jcf_relocation *)
1636 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1637 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1638 reloc->kind = 0;
1639 reloc->label = get_jcf_label_here (state);
1640 reloc->offset = case_value;
1641 reloc->next = sw_state->cases;
1642 sw_state->cases = reloc;
1643 if (sw_state->num_cases == 0)
1644 {
1645 sw_state->min_case = case_value;
1646 sw_state->max_case = case_value;
1647 }
1648 else
1649 {
1650 if (case_value < sw_state->min_case)
1651 sw_state->min_case = case_value;
1652 if (case_value > sw_state->max_case)
1653 sw_state->max_case = case_value;
1654 }
1655 sw_state->num_cases++;
1656 }
1657 break;
1658 case DEFAULT_EXPR:
1659 state->sw_state->default_label = get_jcf_label_here (state);
1660 break;
1661
1662 case SWITCH_EXPR:
1663 {
1664 /* The SWITCH_EXPR has three parts, generated in the following order:
1665 1. the switch_expression (the value used to select the correct case);
1666 2. the switch_body;
1667 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1668 After code generation, we will re-order then in the order 1, 3, 2.
1669 This is to avoid an extra GOTOs. */
1670 struct jcf_switch_state sw_state;
1671 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1672 struct jcf_block *body_last; /* Last block of the switch_body. */
1673 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1674 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1675 struct jcf_block *body_block;
1676 int switch_length;
1677 sw_state.prev = state->sw_state;
1678 state->sw_state = &sw_state;
1679 sw_state.cases = NULL;
1680 sw_state.num_cases = 0;
1681 sw_state.default_label = NULL;
1682 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1683 expression_last = state->last_block;
1684 body_block = get_jcf_label_here (state); /* Force a new block here. */
1685 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1686 body_last = state->last_block;
1687
1688 switch_instruction = gen_jcf_label (state);
1689 define_jcf_label (switch_instruction, state);
1690 if (sw_state.default_label == NULL)
1691 sw_state.default_label = gen_jcf_label (state);
1692
1693 if (sw_state.num_cases <= 1)
1694 {
1695 if (sw_state.num_cases == 0)
1696 {
1697 emit_pop (1, state);
1698 NOTE_POP (1);
1699 }
1700 else
1701 {
1702 push_int_const (sw_state.cases->offset, state);
1703 emit_if (sw_state.cases->label,
1704 OPCODE_ifeq, OPCODE_ifne, state);
1705 }
1706 emit_goto (sw_state.default_label, state);
1707 }
1708 else
1709 {
1710 HOST_WIDE_INT i;
1711 /* Copy the chain of relocs into a sorted array. */
1712 struct jcf_relocation **relocs = (struct jcf_relocation **)
1713 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1714 /* The relocs arrays is a buffer with a gap.
1715 The assumption is that cases will normally come in "runs". */
1716 int gap_start = 0;
1717 int gap_end = sw_state.num_cases;
1718 struct jcf_relocation *reloc;
1719 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1720 {
1721 HOST_WIDE_INT case_value = reloc->offset;
1722 while (gap_end < sw_state.num_cases)
1723 {
1724 struct jcf_relocation *end = relocs[gap_end];
1725 if (case_value <= end->offset)
1726 break;
1727 relocs[gap_start++] = end;
1728 gap_end++;
1729 }
1730 while (gap_start > 0)
1731 {
1732 struct jcf_relocation *before = relocs[gap_start-1];
1733 if (case_value >= before->offset)
1734 break;
1735 relocs[--gap_end] = before;
1736 gap_start--;
1737 }
1738 relocs[gap_start++] = reloc;
1739 /* Note we don't check for duplicates. FIXME! */
1740 }
1741
1742 if (2 * sw_state.num_cases
1743 >= sw_state.max_case - sw_state.min_case)
1744 { /* Use tableswitch. */
1745 int index = 0;
1746 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1747 OP1 (OPCODE_tableswitch);
1748 emit_reloc (RELOCATION_VALUE_0,
1749 SWITCH_ALIGN_RELOC, NULL, state);
1750 emit_switch_reloc (sw_state.default_label, state);
1751 OP4 (sw_state.min_case);
1752 OP4 (sw_state.max_case);
1753 for (i = sw_state.min_case; ; )
1754 {
1755 reloc = relocs[index];
1756 if (i == reloc->offset)
1757 {
1758 emit_case_reloc (reloc, state);
1759 if (i == sw_state.max_case)
1760 break;
1761 index++;
1762 }
1763 else
1764 emit_switch_reloc (sw_state.default_label, state);
1765 i++;
1766 }
1767 }
1768 else
1769 { /* Use lookupswitch. */
1770 RESERVE(9 + 8 * sw_state.num_cases);
1771 OP1 (OPCODE_lookupswitch);
1772 emit_reloc (RELOCATION_VALUE_0,
1773 SWITCH_ALIGN_RELOC, NULL, state);
1774 emit_switch_reloc (sw_state.default_label, state);
1775 OP4 (sw_state.num_cases);
1776 for (i = 0; i < sw_state.num_cases; i++)
1777 {
1778 struct jcf_relocation *reloc = relocs[i];
1779 OP4 (reloc->offset);
1780 emit_case_reloc (reloc, state);
1781 }
1782 }
1783 free (relocs);
1784 }
1785
1786 instruction_last = state->last_block;
1787 if (sw_state.default_label->pc < 0)
1788 define_jcf_label (sw_state.default_label, state);
1789 else /* Force a new block. */
1790 sw_state.default_label = get_jcf_label_here (state);
1791 /* Now re-arrange the blocks so the switch_instruction
1792 comes before the switch_body. */
1793 switch_length = state->code_length - switch_instruction->pc;
1794 switch_instruction->pc = body_block->pc;
1795 instruction_last->next = body_block;
1796 instruction_last->v.chunk->next = body_block->v.chunk;
1797 expression_last->next = switch_instruction;
1798 expression_last->v.chunk->next = switch_instruction->v.chunk;
1799 body_last->next = sw_state.default_label;
1800 body_last->v.chunk->next = NULL;
1801 state->chunk = body_last->v.chunk;
1802 for (; body_block != sw_state.default_label; body_block = body_block->next)
1803 body_block->pc += switch_length;
1804
1805 state->sw_state = sw_state.prev;
1806 break;
1807 }
1808
1809 case RETURN_EXPR:
1810 exp = TREE_OPERAND (exp, 0);
1811 if (exp == NULL_TREE)
1812 exp = empty_stmt_node;
1813 else if (TREE_CODE (exp) != MODIFY_EXPR)
1814 abort ();
1815 else
1816 exp = TREE_OPERAND (exp, 1);
1817 generate_bytecode_return (exp, state);
1818 break;
1819 case LABELED_BLOCK_EXPR:
1820 {
1821 struct jcf_block *end_label = gen_jcf_label (state);
1822 end_label->next = state->labeled_blocks;
1823 state->labeled_blocks = end_label;
1824 end_label->pc = PENDING_EXIT_PC;
1825 end_label->u.labeled_block = exp;
1826 if (LABELED_BLOCK_BODY (exp))
1827 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1828 if (state->labeled_blocks != end_label)
1829 abort();
1830 state->labeled_blocks = end_label->next;
1831 define_jcf_label (end_label, state);
1832 }
1833 break;
1834 case LOOP_EXPR:
1835 {
1836 tree body = TREE_OPERAND (exp, 0);
1837 #if 0
1838 if (TREE_CODE (body) == COMPOUND_EXPR
1839 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1840 {
1841 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1842 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1843 struct jcf_block *head_label;
1844 struct jcf_block *body_label;
1845 struct jcf_block *end_label = gen_jcf_label (state);
1846 struct jcf_block *exit_label = state->labeled_blocks;
1847 head_label = gen_jcf_label (state);
1848 emit_goto (head_label, state);
1849 body_label = get_jcf_label_here (state);
1850 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1851 define_jcf_label (head_label, state);
1852 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1853 end_label, body_label, 1, state);
1854 define_jcf_label (end_label, state);
1855 }
1856 else
1857 #endif
1858 {
1859 struct jcf_block *head_label = get_jcf_label_here (state);
1860 generate_bytecode_insns (body, IGNORE_TARGET, state);
1861 emit_goto (head_label, state);
1862 }
1863 }
1864 break;
1865 case EXIT_EXPR:
1866 {
1867 struct jcf_block *label = state->labeled_blocks;
1868 struct jcf_block *end_label = gen_jcf_label (state);
1869 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1870 label, end_label, 0, state);
1871 define_jcf_label (end_label, state);
1872 }
1873 break;
1874 case EXIT_BLOCK_EXPR:
1875 {
1876 struct jcf_block *label = state->labeled_blocks;
1877 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1878 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1879 label = label->next;
1880 call_cleanups (label, state);
1881 emit_goto (label, state);
1882 }
1883 break;
1884
1885 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1886 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1887 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1888 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1889 increment:
1890
1891 exp = TREE_OPERAND (exp, 0);
1892 type = TREE_TYPE (exp);
1893 size = TYPE_IS_WIDE (type) ? 2 : 1;
1894 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1895 && ! TREE_STATIC (exp)
1896 && TREE_CODE (type) == INTEGER_TYPE
1897 && TYPE_PRECISION (type) == 32)
1898 {
1899 if (target != IGNORE_TARGET && post_op)
1900 emit_load (exp, state);
1901 emit_iinc (exp, value, state);
1902 if (target != IGNORE_TARGET && ! post_op)
1903 emit_load (exp, state);
1904 break;
1905 }
1906 if (TREE_CODE (exp) == COMPONENT_REF)
1907 {
1908 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1909 emit_dup (1, 0, state);
1910 /* Stack: ..., objectref, objectref. */
1911 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1912 NOTE_PUSH (size-1);
1913 /* Stack: ..., objectref, oldvalue. */
1914 offset = 1;
1915 }
1916 else if (TREE_CODE (exp) == ARRAY_REF)
1917 {
1918 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1919 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1920 emit_dup (2, 0, state);
1921 /* Stack: ..., array, index, array, index. */
1922 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1923 RESERVE(1);
1924 OP1 (jopcode);
1925 NOTE_POP (2-size);
1926 /* Stack: ..., array, index, oldvalue. */
1927 offset = 2;
1928 }
1929 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1930 {
1931 generate_bytecode_insns (exp, STACK_TARGET, state);
1932 /* Stack: ..., oldvalue. */
1933 offset = 0;
1934 }
1935 else
1936 abort ();
1937
1938 if (target != IGNORE_TARGET && post_op)
1939 emit_dup (size, offset, state);
1940 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1941 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1942 /* Stack, otherwise: ..., [result, ] oldvalue. */
1943 if (size == 1)
1944 push_int_const (value, state);
1945 else
1946 push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
1947 NOTE_PUSH (size);
1948 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1949 if (target != IGNORE_TARGET && ! post_op)
1950 emit_dup (size, offset, state);
1951 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1952 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1953 /* Stack, otherwise: ..., [result, ] newvalue. */
1954 goto finish_assignment;
1955
1956 case MODIFY_EXPR:
1957 {
1958 tree lhs = TREE_OPERAND (exp, 0);
1959 tree rhs = TREE_OPERAND (exp, 1);
1960 int offset = 0;
1961
1962 /* See if we can use the iinc instruction. */
1963 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1964 && ! TREE_STATIC (lhs)
1965 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1966 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1967 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1968 {
1969 tree arg0 = TREE_OPERAND (rhs, 0);
1970 tree arg1 = TREE_OPERAND (rhs, 1);
1971 HOST_WIDE_INT min_value = -32768;
1972 HOST_WIDE_INT max_value = 32767;
1973 if (TREE_CODE (rhs) == MINUS_EXPR)
1974 {
1975 min_value++;
1976 max_value++;
1977 }
1978 else if (arg1 == lhs)
1979 {
1980 arg0 = arg1;
1981 arg1 = TREE_OPERAND (rhs, 0);
1982 }
1983 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1984 {
1985 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1986 value = TREE_INT_CST_LOW (arg1);
1987 if ((hi_value == 0 && value <= max_value)
1988 || (hi_value == -1 && value >= min_value))
1989 {
1990 if (TREE_CODE (rhs) == MINUS_EXPR)
1991 value = -value;
1992 emit_iinc (lhs, value, state);
1993 break;
1994 }
1995 }
1996 }
1997
1998 if (TREE_CODE (lhs) == COMPONENT_REF)
1999 {
2000 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2001 STACK_TARGET, state);
2002 offset = 1;
2003 }
2004 else if (TREE_CODE (lhs) == ARRAY_REF)
2005 {
2006 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2007 STACK_TARGET, state);
2008 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2009 STACK_TARGET, state);
2010 offset = 2;
2011 }
2012 else
2013 offset = 0;
2014 generate_bytecode_insns (rhs, STACK_TARGET, state);
2015 if (target != IGNORE_TARGET)
2016 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2017 exp = lhs;
2018 }
2019 /* FALLTHOUGH */
2020
2021 finish_assignment:
2022 if (TREE_CODE (exp) == COMPONENT_REF)
2023 {
2024 tree field = TREE_OPERAND (exp, 1);
2025 if (! FIELD_STATIC (field))
2026 NOTE_POP (1);
2027 field_op (field,
2028 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2029 state);
2030
2031 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2032 }
2033 else if (TREE_CODE (exp) == VAR_DECL
2034 || TREE_CODE (exp) == PARM_DECL)
2035 {
2036 if (FIELD_STATIC (exp))
2037 {
2038 field_op (exp, OPCODE_putstatic, state);
2039 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2040 }
2041 else
2042 emit_store (exp, state);
2043 }
2044 else if (TREE_CODE (exp) == ARRAY_REF)
2045 {
2046 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2047 RESERVE(1);
2048 OP1 (jopcode);
2049 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2050 }
2051 else
2052 fatal ("internal error (bad lhs to MODIFY_EXPR)");
2053 break;
2054 case PLUS_EXPR:
2055 jopcode = OPCODE_iadd;
2056 goto binop;
2057 case MINUS_EXPR:
2058 jopcode = OPCODE_isub;
2059 goto binop;
2060 case MULT_EXPR:
2061 jopcode = OPCODE_imul;
2062 goto binop;
2063 case TRUNC_DIV_EXPR:
2064 case RDIV_EXPR:
2065 jopcode = OPCODE_idiv;
2066 goto binop;
2067 case TRUNC_MOD_EXPR:
2068 jopcode = OPCODE_irem;
2069 goto binop;
2070 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2071 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2072 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2073 case TRUTH_AND_EXPR:
2074 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2075 case TRUTH_OR_EXPR:
2076 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2077 case TRUTH_XOR_EXPR:
2078 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2079 binop:
2080 {
2081 tree arg0 = TREE_OPERAND (exp, 0);
2082 tree arg1 = TREE_OPERAND (exp, 1);
2083 jopcode += adjust_typed_op (type, 3);
2084 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2085 {
2086 /* fold may (e.g) convert 2*x to x+x. */
2087 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2088 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2089 }
2090 else
2091 {
2092 generate_bytecode_insns (arg0, target, state);
2093 generate_bytecode_insns (arg1, target, state);
2094 }
2095 /* For most binary operations, both operands and the result have the
2096 same type. Shift operations are different. Using arg1's type
2097 gets us the correct SP adjustment in all casesd. */
2098 if (target == STACK_TARGET)
2099 emit_binop (jopcode, TREE_TYPE (arg1), state);
2100 break;
2101 }
2102 case TRUTH_NOT_EXPR:
2103 case BIT_NOT_EXPR:
2104 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2105 if (target == STACK_TARGET)
2106 {
2107 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2108 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2109 RESERVE (2);
2110 if (is_long)
2111 OP1 (OPCODE_i2l);
2112 NOTE_PUSH (1 + is_long);
2113 OP1 (OPCODE_ixor + is_long);
2114 NOTE_POP (1 + is_long);
2115 }
2116 break;
2117 case NEGATE_EXPR:
2118 jopcode = OPCODE_ineg;
2119 jopcode += adjust_typed_op (type, 3);
2120 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2121 if (target == STACK_TARGET)
2122 emit_unop (jopcode, type, state);
2123 break;
2124 case INSTANCEOF_EXPR:
2125 {
2126 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2127 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2128 RESERVE (3);
2129 OP1 (OPCODE_instanceof);
2130 OP2 (index);
2131 }
2132 break;
2133 case CONVERT_EXPR:
2134 case NOP_EXPR:
2135 case FLOAT_EXPR:
2136 case FIX_TRUNC_EXPR:
2137 {
2138 tree src = TREE_OPERAND (exp, 0);
2139 tree src_type = TREE_TYPE (src);
2140 tree dst_type = TREE_TYPE (exp);
2141 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2142 if (target == IGNORE_TARGET || src_type == dst_type)
2143 break;
2144 if (TREE_CODE (dst_type) == POINTER_TYPE)
2145 {
2146 if (TREE_CODE (exp) == CONVERT_EXPR)
2147 {
2148 int index = find_class_constant (&state->cpool,
2149 TREE_TYPE (dst_type));
2150 RESERVE (3);
2151 OP1 (OPCODE_checkcast);
2152 OP2 (index);
2153 }
2154 }
2155 else /* Convert numeric types. */
2156 {
2157 int wide_src = TYPE_PRECISION (src_type) > 32;
2158 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2159 NOTE_POP (1 + wide_src);
2160 RESERVE (1);
2161 if (TREE_CODE (dst_type) == REAL_TYPE)
2162 {
2163 if (TREE_CODE (src_type) == REAL_TYPE)
2164 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2165 else if (TYPE_PRECISION (src_type) == 64)
2166 OP1 (OPCODE_l2f + wide_dst);
2167 else
2168 OP1 (OPCODE_i2f + wide_dst);
2169 }
2170 else /* Convert to integral type. */
2171 {
2172 if (TREE_CODE (src_type) == REAL_TYPE)
2173 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2174 else if (wide_dst)
2175 OP1 (OPCODE_i2l);
2176 else if (wide_src)
2177 OP1 (OPCODE_l2i);
2178 if (TYPE_PRECISION (dst_type) < 32)
2179 {
2180 RESERVE (1);
2181 /* Already converted to int, if needed. */
2182 if (TYPE_PRECISION (dst_type) <= 8)
2183 OP1 (OPCODE_i2b);
2184 else if (TREE_UNSIGNED (dst_type))
2185 OP1 (OPCODE_i2c);
2186 else
2187 OP1 (OPCODE_i2s);
2188 }
2189 }
2190 NOTE_PUSH (1 + wide_dst);
2191 }
2192 }
2193 break;
2194
2195 case CLEANUP_POINT_EXPR:
2196 {
2197 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2198 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2199 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2200 if (target != IGNORE_TARGET)
2201 abort ();
2202 while (state->labeled_blocks != save_labeled_blocks)
2203 {
2204 struct jcf_block *finished_label = NULL;
2205 tree return_link;
2206 tree exception_type = build_pointer_type (throwable_type_node);
2207 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2208 exception_type);
2209 struct jcf_block *end_label = get_jcf_label_here (state);
2210 struct jcf_block *label = state->labeled_blocks;
2211 struct jcf_handler *handler;
2212 tree cleanup = label->u.labeled_block;
2213 state->labeled_blocks = label->next;
2214 state->num_finalizers--;
2215 if (can_complete)
2216 {
2217 finished_label = gen_jcf_label (state);
2218 emit_jsr (label, state);
2219 emit_goto (finished_label, state);
2220 if (! CAN_COMPLETE_NORMALLY (cleanup))
2221 can_complete = 0;
2222 }
2223 handler = alloc_handler (label->v.start_label, end_label, state);
2224 handler->type = NULL_TREE;
2225 localvar_alloc (exception_decl, state);
2226 NOTE_PUSH (1);
2227 emit_store (exception_decl, state);
2228 emit_jsr (label, state);
2229 emit_load (exception_decl, state);
2230 RESERVE (1);
2231 OP1 (OPCODE_athrow);
2232 NOTE_POP (1);
2233
2234 /* The finally block. */
2235 return_link = build_decl (VAR_DECL, NULL_TREE,
2236 return_address_type_node);
2237 define_jcf_label (label, state);
2238 NOTE_PUSH (1);
2239 localvar_alloc (return_link, state);
2240 emit_store (return_link, state);
2241 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2242 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2243 localvar_free (return_link, state);
2244 localvar_free (exception_decl, state);
2245 if (finished_label != NULL)
2246 define_jcf_label (finished_label, state);
2247 }
2248 }
2249 break;
2250
2251 case WITH_CLEANUP_EXPR:
2252 {
2253 struct jcf_block *label;
2254 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2255 label = gen_jcf_label (state);
2256 label->pc = PENDING_CLEANUP_PC;
2257 label->next = state->labeled_blocks;
2258 state->labeled_blocks = label;
2259 state->num_finalizers++;
2260 label->u.labeled_block = TREE_OPERAND (exp, 2);
2261 label->v.start_label = get_jcf_label_here (state);
2262 if (target != IGNORE_TARGET)
2263 abort ();
2264 }
2265 break;
2266
2267 case TRY_EXPR:
2268 {
2269 tree try_clause = TREE_OPERAND (exp, 0);
2270 struct jcf_block *start_label = get_jcf_label_here (state);
2271 struct jcf_block *end_label; /* End of try clause. */
2272 struct jcf_block *finished_label = gen_jcf_label (state);
2273 tree clause = TREE_OPERAND (exp, 1);
2274 if (target != IGNORE_TARGET)
2275 abort ();
2276 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2277 end_label = get_jcf_label_here (state);
2278 if (CAN_COMPLETE_NORMALLY (try_clause))
2279 emit_goto (finished_label, state);
2280 while (clause != NULL_TREE)
2281 {
2282 tree catch_clause = TREE_OPERAND (clause, 0);
2283 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2284 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2285 if (exception_decl == NULL_TREE)
2286 handler->type = NULL_TREE;
2287 else
2288 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2289 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2290 clause = TREE_CHAIN (clause);
2291 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2292 emit_goto (finished_label, state);
2293 }
2294 define_jcf_label (finished_label, state);
2295 }
2296 break;
2297 case TRY_FINALLY_EXPR:
2298 {
2299 tree try_block = TREE_OPERAND (exp, 0);
2300 tree finally = TREE_OPERAND (exp, 1);
2301 struct jcf_block *finished_label = gen_jcf_label (state);
2302 struct jcf_block *finally_label = gen_jcf_label (state);
2303 struct jcf_block *start_label = get_jcf_label_here (state);
2304 tree return_link = build_decl (VAR_DECL, NULL_TREE,
2305 return_address_type_node);
2306 tree exception_type = build_pointer_type (throwable_type_node);
2307 tree exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2308 struct jcf_handler *handler;
2309
2310 finally_label->pc = PENDING_CLEANUP_PC;
2311 finally_label->next = state->labeled_blocks;
2312 state->labeled_blocks = finally_label;
2313 state->num_finalizers++;
2314
2315 generate_bytecode_insns (try_block, target, state);
2316 if (state->labeled_blocks != finally_label)
2317 abort();
2318 state->labeled_blocks = finally_label->next;
2319 emit_jsr (finally_label, state);
2320 if (CAN_COMPLETE_NORMALLY (try_block))
2321 emit_goto (finished_label, state);
2322
2323 /* Handle exceptions. */
2324 localvar_alloc (return_link, state);
2325 handler = alloc_handler (start_label, NULL_PTR, state);
2326 handler->end_label = handler->handler_label;
2327 handler->type = NULL_TREE;
2328 localvar_alloc (exception_decl, state);
2329 NOTE_PUSH (1);
2330 emit_store (exception_decl, state);
2331 emit_jsr (finally_label, state);
2332 emit_load (exception_decl, state);
2333 RESERVE (1);
2334 OP1 (OPCODE_athrow);
2335 NOTE_POP (1);
2336 localvar_free (exception_decl, state);
2337
2338 /* The finally block. First save return PC into return_link. */
2339 define_jcf_label (finally_label, state);
2340 NOTE_PUSH (1);
2341 emit_store (return_link, state);
2342
2343 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2344 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2345 localvar_free (return_link, state);
2346 define_jcf_label (finished_label, state);
2347 }
2348 break;
2349 case THROW_EXPR:
2350 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2351 RESERVE (1);
2352 OP1 (OPCODE_athrow);
2353 break;
2354 case NEW_ARRAY_INIT:
2355 {
2356 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2357 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2358 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2359 HOST_WIDE_INT length = java_array_type_length (array_type);
2360 if (target == IGNORE_TARGET)
2361 {
2362 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2363 generate_bytecode_insns (TREE_VALUE (values), target, state);
2364 break;
2365 }
2366 push_int_const (length, state);
2367 NOTE_PUSH (1);
2368 RESERVE (3);
2369 if (JPRIMITIVE_TYPE_P (element_type))
2370 {
2371 int atype = encode_newarray_type (element_type);
2372 OP1 (OPCODE_newarray);
2373 OP1 (atype);
2374 }
2375 else
2376 {
2377 int index = find_class_constant (&state->cpool,
2378 TREE_TYPE (element_type));
2379 OP1 (OPCODE_anewarray);
2380 OP2 (index);
2381 }
2382 offset = 0;
2383 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2384 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2385 {
2386 int save_SP = state->code_SP;
2387 emit_dup (1, 0, state);
2388 push_int_const (offset, state);
2389 NOTE_PUSH (1);
2390 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2391 RESERVE (1);
2392 OP1 (jopcode);
2393 state->code_SP = save_SP;
2394 }
2395 }
2396 break;
2397 case NEW_CLASS_EXPR:
2398 {
2399 tree class = TREE_TYPE (TREE_TYPE (exp));
2400 int need_result = target != IGNORE_TARGET;
2401 int index = find_class_constant (&state->cpool, class);
2402 RESERVE (4);
2403 OP1 (OPCODE_new);
2404 OP2 (index);
2405 if (need_result)
2406 OP1 (OPCODE_dup);
2407 NOTE_PUSH (1 + need_result);
2408 }
2409 /* ... fall though ... */
2410 case CALL_EXPR:
2411 {
2412 tree f = TREE_OPERAND (exp, 0);
2413 tree x = TREE_OPERAND (exp, 1);
2414 int save_SP = state->code_SP;
2415 int nargs;
2416 if (TREE_CODE (f) == ADDR_EXPR)
2417 f = TREE_OPERAND (f, 0);
2418 if (f == soft_newarray_node)
2419 {
2420 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2421 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2422 STACK_TARGET, state);
2423 RESERVE (2);
2424 OP1 (OPCODE_newarray);
2425 OP1 (type_code);
2426 break;
2427 }
2428 else if (f == soft_multianewarray_node)
2429 {
2430 int ndims;
2431 int idim;
2432 int index = find_class_constant (&state->cpool,
2433 TREE_TYPE (TREE_TYPE (exp)));
2434 x = TREE_CHAIN (x); /* Skip class argument. */
2435 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2436 for (idim = ndims; --idim >= 0; )
2437 {
2438 x = TREE_CHAIN (x);
2439 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2440 }
2441 RESERVE (4);
2442 OP1 (OPCODE_multianewarray);
2443 OP2 (index);
2444 OP1 (ndims);
2445 break;
2446 }
2447 else if (f == soft_anewarray_node)
2448 {
2449 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2450 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2451 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2452 RESERVE (3);
2453 OP1 (OPCODE_anewarray);
2454 OP2 (index);
2455 break;
2456 }
2457 else if (f == soft_monitorenter_node
2458 || f == soft_monitorexit_node
2459 || f == throw_node[0]
2460 || f == throw_node[1])
2461 {
2462 if (f == soft_monitorenter_node)
2463 op = OPCODE_monitorenter;
2464 else if (f == soft_monitorexit_node)
2465 op = OPCODE_monitorexit;
2466 else
2467 op = OPCODE_athrow;
2468 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2469 RESERVE (1);
2470 OP1 (op);
2471 NOTE_POP (1);
2472 break;
2473 }
2474 else if (exp == soft_exceptioninfo_call_node)
2475 {
2476 NOTE_PUSH (1); /* Pushed by exception system. */
2477 break;
2478 }
2479 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2480 {
2481 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2482 }
2483 nargs = state->code_SP - save_SP;
2484 state->code_SP = save_SP;
2485 if (f == soft_fmod_node)
2486 {
2487 RESERVE (1);
2488 OP1 (OPCODE_drem);
2489 NOTE_PUSH (2);
2490 break;
2491 }
2492 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2493 NOTE_POP (1); /* Pop implicit this. */
2494 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2495 {
2496 int index = find_methodref_index (&state->cpool, f);
2497 int interface = 0;
2498 RESERVE (5);
2499 if (METHOD_STATIC (f))
2500 OP1 (OPCODE_invokestatic);
2501 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2502 || METHOD_PRIVATE (f))
2503 OP1 (OPCODE_invokespecial);
2504 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2505 {
2506 OP1 (OPCODE_invokeinterface);
2507 interface = 1;
2508 }
2509 else
2510 OP1 (OPCODE_invokevirtual);
2511 OP2 (index);
2512 if (interface)
2513 {
2514 OP1 (nargs);
2515 OP1 (0);
2516 }
2517 f = TREE_TYPE (TREE_TYPE (f));
2518 if (TREE_CODE (f) != VOID_TYPE)
2519 {
2520 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2521 if (target == IGNORE_TARGET)
2522 emit_pop (size, state);
2523 else
2524 NOTE_PUSH (size);
2525 }
2526 break;
2527 }
2528 }
2529 /* fall through */
2530 notimpl:
2531 default:
2532 error("internal error - tree code not implemented: %s",
2533 tree_code_name [(int) TREE_CODE (exp)]);
2534 }
2535 }
2536
2537 static void
2538 perform_relocations (state)
2539 struct jcf_partial *state;
2540 {
2541 struct jcf_block *block;
2542 struct jcf_relocation *reloc;
2543 int pc;
2544 int shrink;
2545
2546 /* Before we start, the pc field of each block is an upper bound on
2547 the block's start pc (it may be less, if previous blocks need less
2548 than their maximum).
2549
2550 The minimum size of each block is in the block's chunk->size. */
2551
2552 /* First, figure out the actual locations of each block. */
2553 pc = 0;
2554 shrink = 0;
2555 for (block = state->blocks; block != NULL; block = block->next)
2556 {
2557 int block_size = block->v.chunk->size;
2558
2559 block->pc = pc;
2560
2561 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2562 Assumes relocations are in reverse order. */
2563 reloc = block->u.relocations;
2564 while (reloc != NULL
2565 && reloc->kind == OPCODE_goto_w
2566 && reloc->label->pc == block->next->pc
2567 && reloc->offset + 2 == block_size)
2568 {
2569 reloc = reloc->next;
2570 block->u.relocations = reloc;
2571 block->v.chunk->size -= 3;
2572 block_size -= 3;
2573 shrink += 3;
2574 }
2575
2576 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2577 {
2578 if (reloc->kind == SWITCH_ALIGN_RELOC)
2579 {
2580 /* We assume this is the first relocation in this block,
2581 so we know its final pc. */
2582 int where = pc + reloc->offset;
2583 int pad = ((where + 3) & ~3) - where;
2584 block_size += pad;
2585 }
2586 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2587 {
2588 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2589 int expand = reloc->kind > 0 ? 2 : 5;
2590
2591 if (delta > 0)
2592 delta -= shrink;
2593 if (delta >= -32768 && delta <= 32767)
2594 {
2595 shrink += expand;
2596 reloc->kind = -1;
2597 }
2598 else
2599 block_size += expand;
2600 }
2601 }
2602 pc += block_size;
2603 }
2604
2605 for (block = state->blocks; block != NULL; block = block->next)
2606 {
2607 struct chunk *chunk = block->v.chunk;
2608 int old_size = chunk->size;
2609 int next_pc = block->next == NULL ? pc : block->next->pc;
2610 int new_size = next_pc - block->pc;
2611 unsigned char *new_ptr;
2612 unsigned char *old_buffer = chunk->data;
2613 unsigned char *old_ptr = old_buffer + old_size;
2614 if (new_size != old_size)
2615 {
2616 chunk->data = (unsigned char *)
2617 obstack_alloc (state->chunk_obstack, new_size);
2618 chunk->size = new_size;
2619 }
2620 new_ptr = chunk->data + new_size;
2621
2622 /* We do the relocations from back to front, because
2623 the relocations are in reverse order. */
2624 for (reloc = block->u.relocations; ; reloc = reloc->next)
2625 {
2626 /* new_ptr and old_ptr point into the old and new buffers,
2627 respectively. (If no relocations cause the buffer to
2628 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2629 The bytes at higher adress have been copied and relocations
2630 handled; those at lower addresses remain to process. */
2631
2632 /* Lower old index of piece to be copied with no relocation.
2633 I.e. high index of the first piece that does need relocation. */
2634 int start = reloc == NULL ? 0
2635 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2636 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2637 ? reloc->offset + 4
2638 : reloc->offset + 2;
2639 int32 value;
2640 int new_offset;
2641 int n = (old_ptr - old_buffer) - start;
2642 new_ptr -= n;
2643 old_ptr -= n;
2644 if (n > 0)
2645 memcpy (new_ptr, old_ptr, n);
2646 if (old_ptr == old_buffer)
2647 break;
2648
2649 new_offset = new_ptr - chunk->data;
2650 new_offset -= (reloc->kind == -1 ? 2 : 4);
2651 if (reloc->kind == 0)
2652 {
2653 old_ptr -= 4;
2654 value = GET_u4 (old_ptr);
2655 }
2656 else if (reloc->kind == BLOCK_START_RELOC)
2657 {
2658 old_ptr -= 4;
2659 value = 0;
2660 new_offset = 0;
2661 }
2662 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2663 {
2664 int where = block->pc + reloc->offset;
2665 int pad = ((where + 3) & ~3) - where;
2666 while (--pad >= 0)
2667 *--new_ptr = 0;
2668 continue;
2669 }
2670 else
2671 {
2672 old_ptr -= 2;
2673 value = GET_u2 (old_ptr);
2674 }
2675 value += reloc->label->pc - (block->pc + new_offset);
2676 *--new_ptr = (unsigned char) value; value >>= 8;
2677 *--new_ptr = (unsigned char) value; value >>= 8;
2678 if (reloc->kind != -1)
2679 {
2680 *--new_ptr = (unsigned char) value; value >>= 8;
2681 *--new_ptr = (unsigned char) value;
2682 }
2683 if (reloc->kind > BLOCK_START_RELOC)
2684 {
2685 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2686 --old_ptr;
2687 *--new_ptr = reloc->kind;
2688 }
2689 else if (reloc->kind < -1)
2690 {
2691 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2692 --old_ptr;
2693 *--new_ptr = OPCODE_goto_w;
2694 *--new_ptr = 3;
2695 *--new_ptr = 0;
2696 *--new_ptr = - reloc->kind;
2697 }
2698 }
2699 if (new_ptr != chunk->data)
2700 fatal ("internal error - perform_relocations");
2701 }
2702 state->code_length = pc;
2703 }
2704
2705 static void
2706 init_jcf_state (state, work)
2707 struct jcf_partial *state;
2708 struct obstack *work;
2709 {
2710 state->chunk_obstack = work;
2711 state->first = state->chunk = NULL;
2712 CPOOL_INIT (&state->cpool);
2713 BUFFER_INIT (&state->localvars);
2714 BUFFER_INIT (&state->bytecode);
2715 }
2716
2717 static void
2718 init_jcf_method (state, method)
2719 struct jcf_partial *state;
2720 tree method;
2721 {
2722 state->current_method = method;
2723 state->blocks = state->last_block = NULL;
2724 state->linenumber_count = 0;
2725 state->first_lvar = state->last_lvar = NULL;
2726 state->lvar_count = 0;
2727 state->labeled_blocks = NULL;
2728 state->code_length = 0;
2729 BUFFER_RESET (&state->bytecode);
2730 BUFFER_RESET (&state->localvars);
2731 state->code_SP = 0;
2732 state->code_SP_max = 0;
2733 state->handlers = NULL;
2734 state->last_handler = NULL;
2735 state->num_handlers = 0;
2736 state->num_finalizers = 0;
2737 state->return_value_decl = NULL_TREE;
2738 }
2739
2740 static void
2741 release_jcf_state (state)
2742 struct jcf_partial *state;
2743 {
2744 CPOOL_FINISH (&state->cpool);
2745 obstack_free (state->chunk_obstack, state->first);
2746 }
2747
2748 /* Generate and return a list of chunks containing the class CLAS
2749 in the .class file representation. The list can be written to a
2750 .class file using write_chunks. Allocate chunks from obstack WORK. */
2751
2752 static struct chunk *
2753 generate_classfile (clas, state)
2754 tree clas;
2755 struct jcf_partial *state;
2756 {
2757 struct chunk *cpool_chunk;
2758 char *source_file;
2759 char *ptr;
2760 int i;
2761 char *fields_count_ptr;
2762 int fields_count = 0;
2763 char *methods_count_ptr;
2764 int methods_count = 0;
2765 static tree SourceFile_node = NULL_TREE;
2766 tree part;
2767 int total_supers
2768 = clas == object_type_node ? 0
2769 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2770
2771 ptr = append_chunk (NULL, 8, state);
2772 PUT4 (0xCafeBabe); /* Magic number */
2773 PUT2 (3); /* Minor version */
2774 PUT2 (45); /* Major version */
2775
2776 append_chunk (NULL, 0, state);
2777 cpool_chunk = state->chunk;
2778
2779 /* Next allocate the chunk containing acces_flags through fields_counr. */
2780 if (clas == object_type_node)
2781 i = 10;
2782 else
2783 i = 8 + 2 * total_supers;
2784 ptr = append_chunk (NULL, i, state);
2785 i = get_access_flags (TYPE_NAME (clas));
2786 if (! (i & ACC_INTERFACE))
2787 i |= ACC_SUPER;
2788 PUT2 (i); /* acces_flags */
2789 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2790 if (clas == object_type_node)
2791 {
2792 PUT2(0); /* super_class */
2793 PUT2(0); /* interfaces_count */
2794 }
2795 else
2796 {
2797 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2798 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2799 int j = find_class_constant (&state->cpool, base);
2800 PUT2 (j); /* super_class */
2801 PUT2 (total_supers - 1); /* interfaces_count */
2802 for (i = 1; i < total_supers; i++)
2803 {
2804 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2805 j = find_class_constant (&state->cpool, base);
2806 PUT2 (j);
2807 }
2808 }
2809 fields_count_ptr = ptr;
2810
2811 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2812 {
2813 int have_value;
2814 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2815 continue;
2816 ptr = append_chunk (NULL, 8, state);
2817 i = get_access_flags (part); PUT2 (i);
2818 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2819 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part)));
2820 PUT2(i);
2821 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part);
2822 PUT2 (have_value); /* attributes_count */
2823 if (have_value)
2824 {
2825 tree init = DECL_INITIAL (part);
2826 static tree ConstantValue_node = NULL_TREE;
2827 ptr = append_chunk (NULL, 8, state);
2828 if (ConstantValue_node == NULL_TREE)
2829 ConstantValue_node = get_identifier ("ConstantValue");
2830 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2831 PUT2 (i); /* attribute_name_index */
2832 PUT4 (2); /* attribute_length */
2833 i = find_constant_index (init, state); PUT2 (i);
2834 }
2835 fields_count++;
2836 }
2837 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2838
2839 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2840 PUT2 (0);
2841
2842 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2843 {
2844 struct jcf_block *block;
2845 tree function_body = DECL_FUNCTION_BODY (part);
2846 tree body = function_body == NULL_TREE ? NULL_TREE
2847 : BLOCK_EXPR_BODY (function_body);
2848 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2849 : DECL_NAME (part);
2850 tree type = TREE_TYPE (part);
2851 tree save_function = current_function_decl;
2852 current_function_decl = part;
2853 ptr = append_chunk (NULL, 8, state);
2854 i = get_access_flags (part); PUT2 (i);
2855 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2856 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2857 PUT2 (i);
2858 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2859 PUT2 (i); /* attributes_count */
2860 if (body != NULL_TREE)
2861 {
2862 int code_attributes_count = 0;
2863 static tree Code_node = NULL_TREE;
2864 tree t;
2865 char *attr_len_ptr;
2866 struct jcf_handler *handler;
2867 if (Code_node == NULL_TREE)
2868 Code_node = get_identifier ("Code");
2869 ptr = append_chunk (NULL, 14, state);
2870 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2871 attr_len_ptr = ptr;
2872 init_jcf_method (state, part);
2873 get_jcf_label_here (state); /* Force a first block. */
2874 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2875 localvar_alloc (t, state);
2876 generate_bytecode_insns (body, IGNORE_TARGET, state);
2877 if (CAN_COMPLETE_NORMALLY (body))
2878 {
2879 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2880 abort();
2881 RESERVE (1);
2882 OP1 (OPCODE_return);
2883 }
2884 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2885 localvar_free (t, state);
2886 if (state->return_value_decl != NULL_TREE)
2887 localvar_free (state->return_value_decl, state);
2888 finish_jcf_block (state);
2889 perform_relocations (state);
2890
2891 ptr = attr_len_ptr;
2892 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2893 if (state->linenumber_count > 0)
2894 {
2895 code_attributes_count++;
2896 i += 8 + 4 * state->linenumber_count;
2897 }
2898 if (state->lvar_count > 0)
2899 {
2900 code_attributes_count++;
2901 i += 8 + 10 * state->lvar_count;
2902 }
2903 UNSAFE_PUT4 (i); /* attribute_length */
2904 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2905 UNSAFE_PUT2 (localvar_max); /* max_locals */
2906 UNSAFE_PUT4 (state->code_length);
2907
2908 /* Emit the exception table. */
2909 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2910 PUT2 (state->num_handlers); /* exception_table_length */
2911 handler = state->handlers;
2912 for (; handler != NULL; handler = handler->next)
2913 {
2914 int type_index;
2915 PUT2 (handler->start_label->pc);
2916 PUT2 (handler->end_label->pc);
2917 PUT2 (handler->handler_label->pc);
2918 if (handler->type == NULL_TREE)
2919 type_index = 0;
2920 else
2921 type_index = find_class_constant (&state->cpool,
2922 handler->type);
2923 PUT2 (type_index);
2924 }
2925
2926 ptr = append_chunk (NULL, 2, state);
2927 PUT2 (code_attributes_count);
2928
2929 /* Write the LineNumberTable attribute. */
2930 if (state->linenumber_count > 0)
2931 {
2932 static tree LineNumberTable_node = NULL_TREE;
2933 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state);
2934 if (LineNumberTable_node == NULL_TREE)
2935 LineNumberTable_node = get_identifier ("LineNumberTable");
2936 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
2937 PUT2 (i); /* attribute_name_index */
2938 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
2939 i = state->linenumber_count; PUT2 (i);
2940 for (block = state->blocks; block != NULL; block = block->next)
2941 {
2942 int line = block->linenumber;
2943 if (line > 0)
2944 {
2945 PUT2 (block->pc);
2946 PUT2 (line);
2947 }
2948 }
2949 }
2950
2951 /* Write the LocalVariableTable attribute. */
2952 if (state->lvar_count > 0)
2953 {
2954 static tree LocalVariableTable_node = NULL_TREE;
2955 struct localvar_info *lvar = state->first_lvar;
2956 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
2957 if (LocalVariableTable_node == NULL_TREE)
2958 LocalVariableTable_node = get_identifier("LocalVariableTable");
2959 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
2960 PUT2 (i); /* attribute_name_index */
2961 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
2962 i = state->lvar_count; PUT2 (i);
2963 for ( ; lvar != NULL; lvar = lvar->next)
2964 {
2965 tree name = DECL_NAME (lvar->decl);
2966 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
2967 i = lvar->start_label->pc; PUT2 (i);
2968 i = lvar->end_label->pc - i; PUT2 (i);
2969 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2970 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
2971 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
2972 }
2973 }
2974 }
2975 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
2976 {
2977 tree t = DECL_FUNCTION_THROWS (part);
2978 int throws_count = list_length (t);
2979 static tree Exceptions_node = NULL_TREE;
2980 if (Exceptions_node == NULL_TREE)
2981 Exceptions_node = get_identifier ("Exceptions");
2982 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
2983 i = find_utf8_constant (&state->cpool, Exceptions_node);
2984 PUT2 (i); /* attribute_name_index */
2985 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
2986 i = throws_count; PUT2 (i);
2987 for (; t != NULL_TREE; t = TREE_CHAIN (t))
2988 {
2989 i = find_class_constant (&state->cpool, TREE_VALUE (t));
2990 PUT2 (i);
2991 }
2992 }
2993 methods_count++;
2994 current_function_decl = save_function;
2995 }
2996 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
2997
2998 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
2999 for (ptr = source_file; ; ptr++)
3000 {
3001 char ch = *ptr;
3002 if (ch == '\0')
3003 break;
3004 if (ch == '/' || ch == '\\')
3005 source_file = ptr+1;
3006 }
3007 ptr = append_chunk (NULL, 10, state);
3008 PUT2 (1); /* attributes_count */
3009
3010 /* generate the SourceFile attribute. */
3011 if (SourceFile_node == NULL_TREE)
3012 SourceFile_node = get_identifier ("SourceFile");
3013 i = find_utf8_constant (&state->cpool, SourceFile_node);
3014 PUT2 (i); /* attribute_name_index */
3015 PUT4 (2);
3016 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3017 PUT2 (i);
3018
3019 /* New finally generate the contents of the constant pool chunk. */
3020 i = count_constant_pool_bytes (&state->cpool);
3021 ptr = obstack_alloc (state->chunk_obstack, i);
3022 cpool_chunk->data = ptr;
3023 cpool_chunk->size = i;
3024 write_constant_pool (&state->cpool, ptr, i);
3025 return state->first;
3026 }
3027
3028 static char *
3029 make_class_file_name (clas)
3030 tree clas;
3031 {
3032 const char *dname, *slash;
3033 char *cname, *r;
3034 struct stat sb;
3035
3036 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3037 "", '.', DIR_SEPARATOR,
3038 ".class"));
3039 if (jcf_write_base_directory == NULL)
3040 {
3041 /* Make sure we put the class file into the .java file's
3042 directory, and not into some subdirectory thereof. */
3043 char *t;
3044 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3045 slash = strrchr (dname, DIR_SEPARATOR);
3046 if (! slash)
3047 {
3048 dname = ".";
3049 slash = dname + 1;
3050 }
3051 t = strrchr (cname, DIR_SEPARATOR);
3052 if (t)
3053 cname = t + 1;
3054 }
3055 else
3056 {
3057 dname = jcf_write_base_directory;
3058 slash = dname + strlen (dname);
3059 }
3060
3061 r = xmalloc (slash - dname + strlen (cname) + 2);
3062 strncpy (r, dname, slash - dname);
3063 r[slash - dname] = DIR_SEPARATOR;
3064 strcpy (&r[slash - dname + 1], cname);
3065
3066 /* We try to make new directories when we need them. We only do
3067 this for directories which "might not" exist. For instance, we
3068 assume the `-d' directory exists, but we don't assume that any
3069 subdirectory below it exists. It might be worthwhile to keep
3070 track of which directories we've created to avoid gratuitous
3071 stat()s. */
3072 dname = r + (slash - dname) + 1;
3073 while (1)
3074 {
3075 cname = strchr (dname, DIR_SEPARATOR);
3076 if (cname == NULL)
3077 break;
3078 *cname = '\0';
3079 if (stat (r, &sb) == -1)
3080 {
3081 /* Try to make it. */
3082 if (mkdir (r, 0755) == -1)
3083 {
3084 fatal ("failed to create directory `%s'", r);
3085 free (r);
3086 return NULL;
3087 }
3088 }
3089 *cname = DIR_SEPARATOR;
3090 /* Skip consecutive separators. */
3091 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3092 ;
3093 }
3094
3095 return r;
3096 }
3097
3098 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3099 The output .class file name is make_class_file_name(CLAS). */
3100
3101 void
3102 write_classfile (clas)
3103 tree clas;
3104 {
3105 struct obstack *work = &temporary_obstack;
3106 struct jcf_partial state[1];
3107 char *class_file_name = make_class_file_name (clas);
3108 struct chunk *chunks;
3109
3110 if (class_file_name != NULL)
3111 {
3112 FILE* stream = fopen (class_file_name, "wb");
3113 if (stream == NULL)
3114 fatal ("failed to open `%s' for writing", class_file_name);
3115 jcf_dependency_add_target (class_file_name);
3116 init_jcf_state (state, work);
3117 chunks = generate_classfile (clas, state);
3118 write_chunks (stream, chunks);
3119 if (fclose (stream))
3120 fatal ("failed to close after writing `%s'", class_file_name);
3121 free (class_file_name);
3122 }
3123 release_jcf_state (state);
3124 }
3125
3126 /* TODO:
3127 string concatenation
3128 synchronized statement
3129 */
This page took 0.17138 seconds and 6 git commands to generate.