]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
jcf-write.c (push_long_const): lo, hi: New variables.
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "real.h"
29 #include "java-tree.h"
30 #include "obstack.h"
31 #undef AND
32 #include "rtl.h"
33 #include "flags.h"
34 #include "java-opcodes.h"
35 #include "parse.h" /* for BLOCK_EXPR_BODY */
36 #include "buffer.h"
37 #include "toplev.h"
38 #include "ggc.h"
39
40 #ifndef DIR_SEPARATOR
41 #define DIR_SEPARATOR '/'
42 #endif
43
44 extern struct obstack temporary_obstack;
45
46 /* Base directory in which `.class' files should be written.
47 NULL means to put the file into the same directory as the
48 corresponding .java file. */
49 char *jcf_write_base_directory = NULL;
50
51 /* Make sure bytecode.data is big enough for at least N more bytes. */
52
53 #define RESERVE(N) \
54 do { CHECK_OP(state); \
55 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
56 buffer_grow (&state->bytecode, N); } while (0)
57
58 /* Add a 1-byte instruction/operand I to bytecode.data,
59 assuming space has already been RESERVE'd. */
60
61 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62
63 /* Like OP1, but I is a 2-byte big endian integer. */
64
65 #define OP2(I) \
66 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67
68 /* Like OP1, but I is a 4-byte big endian integer. */
69
70 #define OP4(I) \
71 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
72 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73
74 /* Macro to call each time we push I words on the JVM stack. */
75
76 #define NOTE_PUSH(I) \
77 do { state->code_SP += (I); \
78 if (state->code_SP > state->code_SP_max) \
79 state->code_SP_max = state->code_SP; } while (0)
80
81 /* Macro to call each time we pop I words from the JVM stack. */
82
83 #define NOTE_POP(I) \
84 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85
86 /* A chunk or segment of a .class file. */
87
88 struct chunk
89 {
90 /* The next segment of this .class file. */
91 struct chunk *next;
92
93 /* The actual data in this segment to be written to the .class file. */
94 unsigned char *data;
95
96 /* The size of the segment to be written to the .class file. */
97 int size;
98 };
99
100 #define PENDING_CLEANUP_PC (-3)
101 #define PENDING_EXIT_PC (-2)
102 #define UNDEFINED_PC (-1)
103
104 /* Each "block" represents a label plus the bytecode instructions following.
105 There may be branches out of the block, but no incoming jumps, except
106 to the beginning of the block.
107
108 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
109 associated code yet), but it is an undefined label.
110 */
111
112 struct jcf_block
113 {
114 /* For blocks that that are defined, the next block (in pc order).
115 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
116 or a cleanup expression (from a TRY_FINALLY_EXPR),
117 this is the next (outer) such end label, in a stack headed by
118 labeled_blocks in jcf_partial. */
119 struct jcf_block *next;
120
121 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
122 pc is PENDING_EXIT_PC.
123 In the not-yet-defined end label for pending cleanup subroutine,
124 pc is PENDING_CLEANUP_PC.
125 For other not-yet-defined labels, pc is UNDEFINED_PC.
126
127 If the label has been defined:
128 Until perform_relocations is finished, this is the maximum possible
129 value of the bytecode offset at the begnning of this block.
130 After perform_relocations, it is the actual offset (pc). */
131 int pc;
132
133 int linenumber;
134
135 /* After finish_jcf_block is called, the actual instructions
136 contained in this block. Before that NULL, and the instructions
137 are in state->bytecode. */
138 union {
139 struct chunk *chunk;
140
141 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
142 covered by the cleanup. */
143 struct jcf_block *start_label;
144 } v;
145
146 union {
147 /* Set of relocations (in reverse offset order) for this block. */
148 struct jcf_relocation *relocations;
149
150 /* If this block is that of the not-yet-defined end label of
151 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
152 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
153 tree labeled_block;
154 } u;
155 };
156
157 /* A "relocation" type for the 0-3 bytes of padding at the start
158 of a tableswitch or a lookupswitch. */
159 #define SWITCH_ALIGN_RELOC 4
160
161 /* A relocation type for the labels in a tableswitch or a lookupswitch;
162 these are relative to the start of the instruction, but (due to
163 th 0-3 bytes of padding), we don't know the offset before relocation. */
164 #define BLOCK_START_RELOC 1
165
166 struct jcf_relocation
167 {
168 /* Next relocation for the current jcf_block. */
169 struct jcf_relocation *next;
170
171 /* The (byte) offset within the current block that needs to be relocated. */
172 HOST_WIDE_INT offset;
173
174 /* 0 if offset is a 4-byte relative offset.
175 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
176 for proper alignment in tableswitch/lookupswitch instructions.
177 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
178 to the start of the containing block.
179 -1 if offset is a 2-byte relative offset.
180 < -1 if offset is the address of an instruction with a 2-byte offset
181 that does not have a corresponding 4-byte offset version, in which
182 case the absolute value of kind is the inverted opcode.
183 > 4 if offset is the address of an instruction (such as jsr) with a
184 2-byte offset that does have a corresponding 4-byte offset version,
185 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
186 int kind;
187
188 /* The label the relocation wants to actually transfer to. */
189 struct jcf_block *label;
190 };
191
192 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
193 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194
195 /* State for single catch clause. */
196
197 struct jcf_handler
198 {
199 struct jcf_handler *next;
200
201 struct jcf_block *start_label;
202 struct jcf_block *end_label;
203 struct jcf_block *handler_label;
204
205 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
206 tree type;
207 };
208
209 /* State for the current switch statement. */
210
211 struct jcf_switch_state
212 {
213 struct jcf_switch_state *prev;
214 struct jcf_block *default_label;
215
216 struct jcf_relocation *cases;
217 int num_cases;
218 HOST_WIDE_INT min_case, max_case;
219 };
220
221 /* This structure is used to contain the various pieces that will
222 become a .class file. */
223
224 struct jcf_partial
225 {
226 struct chunk *first;
227 struct chunk *chunk;
228 struct obstack *chunk_obstack;
229 tree current_method;
230
231 /* List of basic blocks for the current method. */
232 struct jcf_block *blocks;
233 struct jcf_block *last_block;
234
235 struct localvar_info *first_lvar;
236 struct localvar_info *last_lvar;
237 int lvar_count;
238
239 CPool cpool;
240
241 int linenumber_count;
242
243 /* Until perform_relocations, this is a upper bound on the number
244 of bytes (so far) in the instructions for the current method. */
245 int code_length;
246
247 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
248 struct jcf_block *labeled_blocks;
249
250 /* The current stack size (stack pointer) in the current method. */
251 int code_SP;
252
253 /* The largest extent of stack size (stack pointer) in the current method. */
254 int code_SP_max;
255
256 /* Contains a mapping from local var slot number to localvar_info. */
257 struct buffer localvars;
258
259 /* The buffer allocated for bytecode for the current jcf_block. */
260 struct buffer bytecode;
261
262 /* Chain of exception handlers for the current method. */
263 struct jcf_handler *handlers;
264
265 /* Last element in handlers chain. */
266 struct jcf_handler *last_handler;
267
268 /* Number of exception handlers for the current method. */
269 int num_handlers;
270
271 /* Number of finalizers we are currently nested within. */
272 int num_finalizers;
273
274 /* If non-NULL, use this for the return value. */
275 tree return_value_decl;
276
277 /* Information about the current switch statement. */
278 struct jcf_switch_state *sw_state;
279 };
280
281 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
282 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
283 int, struct obstack *));
284 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
285 struct jcf_partial *));
286 static void append_chunk_copy PARAMS ((unsigned char *, int,
287 struct jcf_partial *));
288 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
289 static void finish_jcf_block PARAMS ((struct jcf_partial *));
290 static void define_jcf_label PARAMS ((struct jcf_block *,
291 struct jcf_partial *));
292 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
293 static void put_linenumber PARAMS ((int, struct jcf_partial *));
294 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
295 static void localvar_free PARAMS ((tree, struct jcf_partial *));
296 static int get_access_flags PARAMS ((tree));
297 static void write_chunks PARAMS ((FILE *, struct chunk *));
298 static int adjust_typed_op PARAMS ((tree, int));
299 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *));
302 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
303 static void perform_relocations PARAMS ((struct jcf_partial *));
304 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
305 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
306 static void release_jcf_state PARAMS ((struct jcf_partial *));
307 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
308 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
309 struct jcf_block *,
310 struct jcf_partial *));
311 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
312 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *));
314 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
315 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
316 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
317 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *));
319 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
322 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *));
324 static void field_op PARAMS ((tree, int, struct jcf_partial *));
325 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
326 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
327 static void emit_pop PARAMS ((int, struct jcf_partial *));
328 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
329 static void emit_load PARAMS ((tree, struct jcf_partial *));
330 static void emit_store PARAMS ((tree, struct jcf_partial *));
331 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
332 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
333 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *));
335 static void emit_switch_reloc PARAMS ((struct jcf_block *,
336 struct jcf_partial *));
337 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
338 struct jcf_partial *));
339 static void emit_if PARAMS ((struct jcf_block *, int, int,
340 struct jcf_partial *));
341 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
342 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
343 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
344 static char *make_class_file_name PARAMS ((tree));
345 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
346 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
347 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
348 static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
349
350 /* Utility macros for appending (big-endian) data to a buffer.
351 We assume a local variable 'ptr' points into where we want to
352 write next, and we assume enough space has been allocated. */
353
354 #ifdef ENABLE_JC1_CHECKING
355 static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
356
357 static int
358 CHECK_PUT (ptr, state, i)
359 void *ptr;
360 struct jcf_partial *state;
361 int i;
362 {
363 if ((unsigned char *) ptr < state->chunk->data
364 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 abort ();
366
367 return 0;
368 }
369 #else
370 #define CHECK_PUT(PTR, STATE, I) ((void)0)
371 #endif
372
373 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
374 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
375 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
376 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
377
378 /* There are some cases below where CHECK_PUT is guaranteed to fail.
379 Use the following macros in those specific cases. */
380 #define UNSAFE_PUT1(X) (*ptr++ = (X))
381 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
382 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
383 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
384
385 \f
386 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
387 Set the data and size fields to DATA and SIZE, respectively.
388 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
389
390 static struct chunk *
391 alloc_chunk (last, data, size, work)
392 struct chunk *last;
393 unsigned char *data;
394 int size;
395 struct obstack *work;
396 {
397 struct chunk *chunk = (struct chunk *)
398 obstack_alloc (work, sizeof(struct chunk));
399
400 if (data == NULL && size > 0)
401 data = obstack_alloc (work, size);
402
403 chunk->next = NULL;
404 chunk->data = data;
405 chunk->size = size;
406 if (last != NULL)
407 last->next = chunk;
408 return chunk;
409 }
410
411 #ifdef ENABLE_JC1_CHECKING
412 static int CHECK_OP PARAMS ((struct jcf_partial *));
413
414 static int
415 CHECK_OP (state)
416 struct jcf_partial *state;
417 {
418 if (state->bytecode.ptr > state->bytecode.limit)
419 abort ();
420
421 return 0;
422 }
423 #else
424 #define CHECK_OP(STATE) ((void) 0)
425 #endif
426
427 static unsigned char *
428 append_chunk (data, size, state)
429 unsigned char *data;
430 int size;
431 struct jcf_partial *state;
432 {
433 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
434 if (state->first == NULL)
435 state->first = state->chunk;
436 return state->chunk->data;
437 }
438
439 static void
440 append_chunk_copy (data, size, state)
441 unsigned char *data;
442 int size;
443 struct jcf_partial *state;
444 {
445 unsigned char *ptr = append_chunk (NULL, size, state);
446 memcpy (ptr, data, size);
447 }
448 \f
449 static struct jcf_block *
450 gen_jcf_label (state)
451 struct jcf_partial *state;
452 {
453 struct jcf_block *block = (struct jcf_block *)
454 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
455 block->next = NULL;
456 block->linenumber = -1;
457 block->pc = UNDEFINED_PC;
458 return block;
459 }
460
461 static void
462 finish_jcf_block (state)
463 struct jcf_partial *state;
464 {
465 struct jcf_block *block = state->last_block;
466 struct jcf_relocation *reloc;
467 int code_length = BUFFER_LENGTH (&state->bytecode);
468 int pc = state->code_length;
469 append_chunk_copy (state->bytecode.data, code_length, state);
470 BUFFER_RESET (&state->bytecode);
471 block->v.chunk = state->chunk;
472
473 /* Calculate code_length to the maximum value it can have. */
474 pc += block->v.chunk->size;
475 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
476 {
477 int kind = reloc->kind;
478 if (kind == SWITCH_ALIGN_RELOC)
479 pc += 3;
480 else if (kind > BLOCK_START_RELOC)
481 pc += 2; /* 2-byte offset may grow to 4-byte offset */
482 else if (kind < -1)
483 pc += 5; /* May need to add a goto_w. */
484 }
485 state->code_length = pc;
486 }
487
488 static void
489 define_jcf_label (label, state)
490 struct jcf_block *label;
491 struct jcf_partial *state;
492 {
493 if (state->last_block != NULL)
494 finish_jcf_block (state);
495 label->pc = state->code_length;
496 if (state->blocks == NULL)
497 state->blocks = label;
498 else
499 state->last_block->next = label;
500 state->last_block = label;
501 label->next = NULL;
502 label->u.relocations = NULL;
503 }
504
505 static struct jcf_block *
506 get_jcf_label_here (state)
507 struct jcf_partial *state;
508 {
509 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
510 return state->last_block;
511 else
512 {
513 struct jcf_block *label = gen_jcf_label (state);
514 define_jcf_label (label, state);
515 return label;
516 }
517 }
518
519 /* Note a line number entry for the current PC and given LINE. */
520
521 static void
522 put_linenumber (line, state)
523 int line;
524 struct jcf_partial *state;
525 {
526 struct jcf_block *label = get_jcf_label_here (state);
527 if (label->linenumber > 0)
528 {
529 label = gen_jcf_label (state);
530 define_jcf_label (label, state);
531 }
532 label->linenumber = line;
533 state->linenumber_count++;
534 }
535
536 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
537 in the range (START_LABEL, END_LABEL). */
538
539 static struct jcf_handler *
540 alloc_handler (start_label, end_label, state)
541 struct jcf_block *start_label;
542 struct jcf_block *end_label;
543 struct jcf_partial *state;
544 {
545 struct jcf_handler *handler = (struct jcf_handler *)
546 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
547 handler->start_label = start_label;
548 handler->end_label = end_label;
549 handler->handler_label = get_jcf_label_here (state);
550 if (state->handlers == NULL)
551 state->handlers = handler;
552 else
553 state->last_handler->next = handler;
554 state->last_handler = handler;
555 handler->next = NULL;
556 state->num_handlers++;
557 return handler;
558 }
559
560 \f
561 /* The index of jvm local variable allocated for this DECL.
562 This is assigned when generating .class files;
563 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
564 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
565
566 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
567
568 struct localvar_info
569 {
570 struct localvar_info *next;
571
572 tree decl;
573 struct jcf_block *start_label;
574 struct jcf_block *end_label;
575 };
576
577 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
578 #define localvar_max \
579 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
580
581 static void
582 localvar_alloc (decl, state)
583 tree decl;
584 struct jcf_partial *state;
585 {
586 struct jcf_block *start_label = get_jcf_label_here (state);
587 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
588 int index;
589 register struct localvar_info *info;
590 register struct localvar_info **ptr = localvar_buffer;
591 register struct localvar_info **limit
592 = (struct localvar_info**) state->localvars.ptr;
593 for (index = 0; ptr < limit; index++, ptr++)
594 {
595 if (ptr[0] == NULL
596 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
597 break;
598 }
599 if (ptr == limit)
600 {
601 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
602 ptr = (struct localvar_info**) state->localvars.data + index;
603 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
604 }
605 info = (struct localvar_info *)
606 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
607 ptr[0] = info;
608 if (wide)
609 ptr[1] = (struct localvar_info *)(~0);
610 DECL_LOCAL_INDEX (decl) = index;
611 info->decl = decl;
612 info->start_label = start_label;
613
614 if (debug_info_level > DINFO_LEVEL_TERSE
615 && DECL_NAME (decl) != NULL_TREE)
616 {
617 /* Generate debugging info. */
618 info->next = NULL;
619 if (state->last_lvar != NULL)
620 state->last_lvar->next = info;
621 else
622 state->first_lvar = info;
623 state->last_lvar = info;
624 state->lvar_count++;
625 }
626 }
627
628 static void
629 localvar_free (decl, state)
630 tree decl;
631 struct jcf_partial *state;
632 {
633 struct jcf_block *end_label = get_jcf_label_here (state);
634 int index = DECL_LOCAL_INDEX (decl);
635 register struct localvar_info **ptr = &localvar_buffer [index];
636 register struct localvar_info *info = *ptr;
637 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
638
639 info->end_label = end_label;
640
641 if (info->decl != decl)
642 abort ();
643 ptr[0] = NULL;
644 if (wide)
645 {
646 if (ptr[1] != (struct localvar_info *)(~0))
647 abort ();
648 ptr[1] = NULL;
649 }
650 }
651
652 \f
653 #define STACK_TARGET 1
654 #define IGNORE_TARGET 2
655
656 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
657 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
658
659 static int
660 get_access_flags (decl)
661 tree decl;
662 {
663 int flags = 0;
664 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
665 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
666 flags |= ACC_PUBLIC;
667 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
668 flags |= ACC_FINAL;
669 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
670 {
671 if (TREE_PROTECTED (decl))
672 flags |= ACC_PROTECTED;
673 if (TREE_PRIVATE (decl))
674 flags |= ACC_PRIVATE;
675 }
676 else if (TREE_CODE (decl) == TYPE_DECL)
677 {
678 if (CLASS_SUPER (decl))
679 flags |= ACC_SUPER;
680 if (CLASS_ABSTRACT (decl))
681 flags |= ACC_ABSTRACT;
682 if (CLASS_INTERFACE (decl))
683 flags |= ACC_INTERFACE;
684 if (CLASS_STATIC (decl))
685 flags |= ACC_STATIC;
686 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
687 || LOCAL_CLASS_P (TREE_TYPE (decl)))
688 flags |= ACC_PRIVATE;
689 if (CLASS_STRICTFP (decl))
690 flags |= ACC_STRICT;
691 }
692 else
693 abort ();
694
695 if (TREE_CODE (decl) == FUNCTION_DECL)
696 {
697 if (METHOD_NATIVE (decl))
698 flags |= ACC_NATIVE;
699 if (METHOD_STATIC (decl))
700 flags |= ACC_STATIC;
701 if (METHOD_SYNCHRONIZED (decl))
702 flags |= ACC_SYNCHRONIZED;
703 if (METHOD_ABSTRACT (decl))
704 flags |= ACC_ABSTRACT;
705 if (METHOD_STRICTFP (decl))
706 flags |= ACC_STRICT;
707 }
708 if (isfield)
709 {
710 if (FIELD_STATIC (decl))
711 flags |= ACC_STATIC;
712 if (FIELD_VOLATILE (decl))
713 flags |= ACC_VOLATILE;
714 if (FIELD_TRANSIENT (decl))
715 flags |= ACC_TRANSIENT;
716 }
717 return flags;
718 }
719
720 /* Write the list of segments starting at CHUNKS to STREAM. */
721
722 static void
723 write_chunks (stream, chunks)
724 FILE* stream;
725 struct chunk *chunks;
726 {
727 for (; chunks != NULL; chunks = chunks->next)
728 fwrite (chunks->data, chunks->size, 1, stream);
729 }
730
731 /* Push a 1-word constant in the constant pool at the given INDEX.
732 (Caller is responsible for doing NOTE_PUSH.) */
733
734 static void
735 push_constant1 (index, state)
736 HOST_WIDE_INT index;
737 struct jcf_partial *state;
738 {
739 RESERVE (3);
740 if (index < 256)
741 {
742 OP1 (OPCODE_ldc);
743 OP1 (index);
744 }
745 else
746 {
747 OP1 (OPCODE_ldc_w);
748 OP2 (index);
749 }
750 }
751
752 /* Push a 2-word constant in the constant pool at the given INDEX.
753 (Caller is responsible for doing NOTE_PUSH.) */
754
755 static void
756 push_constant2 (index, state)
757 HOST_WIDE_INT index;
758 struct jcf_partial *state;
759 {
760 RESERVE (3);
761 OP1 (OPCODE_ldc2_w);
762 OP2 (index);
763 }
764
765 /* Push 32-bit integer constant on VM stack.
766 Caller is responsible for doing NOTE_PUSH. */
767
768 static void
769 push_int_const (i, state)
770 HOST_WIDE_INT i;
771 struct jcf_partial *state;
772 {
773 RESERVE(3);
774 if (i >= -1 && i <= 5)
775 OP1(OPCODE_iconst_0 + i);
776 else if (i >= -128 && i < 128)
777 {
778 OP1(OPCODE_bipush);
779 OP1(i);
780 }
781 else if (i >= -32768 && i < 32768)
782 {
783 OP1(OPCODE_sipush);
784 OP2(i);
785 }
786 else
787 {
788 i = find_constant1 (&state->cpool, CONSTANT_Integer,
789 (jword)(i & 0xFFFFFFFF));
790 push_constant1 (i, state);
791 }
792 }
793
794 static int
795 find_constant_wide (lo, hi, state)
796 HOST_WIDE_INT lo, hi;
797 struct jcf_partial *state;
798 {
799 HOST_WIDE_INT w1, w2;
800 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
801 return find_constant2 (&state->cpool, CONSTANT_Long,
802 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
803 }
804
805 /* Find or allocate a constant pool entry for the given VALUE.
806 Return the index in the constant pool. */
807
808 static int
809 find_constant_index (value, state)
810 tree value;
811 struct jcf_partial *state;
812 {
813 if (TREE_CODE (value) == INTEGER_CST)
814 {
815 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
816 return find_constant1 (&state->cpool, CONSTANT_Integer,
817 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
818 else
819 return find_constant_wide (TREE_INT_CST_LOW (value),
820 TREE_INT_CST_HIGH (value), state);
821 }
822 else if (TREE_CODE (value) == REAL_CST)
823 {
824 long words[2];
825 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
826 {
827 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
828 return find_constant1 (&state->cpool, CONSTANT_Float,
829 (jword)words[0]);
830 }
831 else
832 {
833 etardouble (TREE_REAL_CST (value), words);
834 return find_constant2 (&state->cpool, CONSTANT_Double,
835 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
836 0xFFFFFFFF),
837 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
838 0xFFFFFFFF));
839 }
840 }
841 else if (TREE_CODE (value) == STRING_CST)
842 return find_string_constant (&state->cpool, value);
843
844 else
845 abort ();
846 }
847
848 /* Push 64-bit long constant on VM stack.
849 Caller is responsible for doing NOTE_PUSH. */
850
851 static void
852 push_long_const (lo, hi, state)
853 HOST_WIDE_INT lo, hi;
854 struct jcf_partial *state;
855 {
856 HOST_WIDE_INT highpart, dummy;
857 jint lowpart = WORD_TO_INT (lo);
858
859 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
860
861 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
862 {
863 RESERVE(1);
864 OP1(OPCODE_lconst_0 + lowpart);
865 }
866 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
867 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
868 {
869 push_int_const (lowpart, state);
870 RESERVE (1);
871 OP1 (OPCODE_i2l);
872 }
873 else
874 push_constant2 (find_constant_wide (lo, hi, state), state);
875 }
876
877 static void
878 field_op (field, opcode, state)
879 tree field;
880 int opcode;
881 struct jcf_partial *state;
882 {
883 int index = find_fieldref_index (&state->cpool, field);
884 RESERVE (3);
885 OP1 (opcode);
886 OP2 (index);
887 }
888
889 /* Returns an integer in the range 0 (for 'int') through 4 (for object
890 reference) to 7 (for 'short') which matches the pattern of how JVM
891 opcodes typically depend on the operand type. */
892
893 static int
894 adjust_typed_op (type, max)
895 tree type;
896 int max;
897 {
898 switch (TREE_CODE (type))
899 {
900 case POINTER_TYPE:
901 case RECORD_TYPE: return 4;
902 case BOOLEAN_TYPE:
903 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
904 case CHAR_TYPE:
905 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
906 case INTEGER_TYPE:
907 switch (TYPE_PRECISION (type))
908 {
909 case 8: return max < 5 ? 0 : 5;
910 case 16: return max < 7 ? 0 : 7;
911 case 32: return 0;
912 case 64: return 1;
913 }
914 break;
915 case REAL_TYPE:
916 switch (TYPE_PRECISION (type))
917 {
918 case 32: return 2;
919 case 64: return 3;
920 }
921 break;
922 default:
923 break;
924 }
925 abort ();
926 }
927
928 static void
929 maybe_wide (opcode, index, state)
930 int opcode, index;
931 struct jcf_partial *state;
932 {
933 if (index >= 256)
934 {
935 RESERVE (4);
936 OP1 (OPCODE_wide);
937 OP1 (opcode);
938 OP2 (index);
939 }
940 else
941 {
942 RESERVE (2);
943 OP1 (opcode);
944 OP1 (index);
945 }
946 }
947
948 /* Compile code to duplicate with offset, where
949 SIZE is the size of the stack item to duplicate (1 or 2), abd
950 OFFSET is where to insert the result (must be 0, 1, or 2).
951 (The new words get inserted at stack[SP-size-offset].) */
952
953 static void
954 emit_dup (size, offset, state)
955 int size, offset;
956 struct jcf_partial *state;
957 {
958 int kind;
959 if (size == 0)
960 return;
961 RESERVE(1);
962 if (offset == 0)
963 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
964 else if (offset == 1)
965 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
966 else if (offset == 2)
967 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
968 else
969 abort();
970 OP1 (kind);
971 NOTE_PUSH (size);
972 }
973
974 static void
975 emit_pop (size, state)
976 int size;
977 struct jcf_partial *state;
978 {
979 RESERVE (1);
980 OP1 (OPCODE_pop - 1 + size);
981 }
982
983 static void
984 emit_iinc (var, value, state)
985 tree var;
986 HOST_WIDE_INT value;
987 struct jcf_partial *state;
988 {
989 int slot = DECL_LOCAL_INDEX (var);
990
991 if (value < -128 || value > 127 || slot >= 256)
992 {
993 RESERVE (6);
994 OP1 (OPCODE_wide);
995 OP1 (OPCODE_iinc);
996 OP2 (slot);
997 OP2 (value);
998 }
999 else
1000 {
1001 RESERVE (3);
1002 OP1 (OPCODE_iinc);
1003 OP1 (slot);
1004 OP1 (value);
1005 }
1006 }
1007
1008 static void
1009 emit_load_or_store (var, opcode, state)
1010 tree var; /* Variable to load from or store into. */
1011 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
1012 struct jcf_partial *state;
1013 {
1014 tree type = TREE_TYPE (var);
1015 int kind = adjust_typed_op (type, 4);
1016 int index = DECL_LOCAL_INDEX (var);
1017 if (index <= 3)
1018 {
1019 RESERVE (1);
1020 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1021 }
1022 else
1023 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1024 }
1025
1026 static void
1027 emit_load (var, state)
1028 tree var;
1029 struct jcf_partial *state;
1030 {
1031 emit_load_or_store (var, OPCODE_iload, state);
1032 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1033 }
1034
1035 static void
1036 emit_store (var, state)
1037 tree var;
1038 struct jcf_partial *state;
1039 {
1040 emit_load_or_store (var, OPCODE_istore, state);
1041 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1042 }
1043
1044 static void
1045 emit_unop (opcode, type, state)
1046 enum java_opcode opcode;
1047 tree type ATTRIBUTE_UNUSED;
1048 struct jcf_partial *state;
1049 {
1050 RESERVE(1);
1051 OP1 (opcode);
1052 }
1053
1054 static void
1055 emit_binop (opcode, type, state)
1056 enum java_opcode opcode;
1057 tree type;
1058 struct jcf_partial *state;
1059 {
1060 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1061 RESERVE(1);
1062 OP1 (opcode);
1063 NOTE_POP (size);
1064 }
1065
1066 static void
1067 emit_reloc (value, kind, target, state)
1068 HOST_WIDE_INT value;
1069 int kind;
1070 struct jcf_block *target;
1071 struct jcf_partial *state;
1072 {
1073 struct jcf_relocation *reloc = (struct jcf_relocation *)
1074 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1075 struct jcf_block *block = state->last_block;
1076 reloc->next = block->u.relocations;
1077 block->u.relocations = reloc;
1078 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1079 reloc->label = target;
1080 reloc->kind = kind;
1081 if (kind == 0 || kind == BLOCK_START_RELOC)
1082 OP4 (value);
1083 else if (kind != SWITCH_ALIGN_RELOC)
1084 OP2 (value);
1085 }
1086
1087 static void
1088 emit_switch_reloc (label, state)
1089 struct jcf_block *label;
1090 struct jcf_partial *state;
1091 {
1092 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1093 }
1094
1095 /* Similar to emit_switch_reloc,
1096 but re-uses an existing case reloc. */
1097
1098 static void
1099 emit_case_reloc (reloc, state)
1100 struct jcf_relocation *reloc;
1101 struct jcf_partial *state;
1102 {
1103 struct jcf_block *block = state->last_block;
1104 reloc->next = block->u.relocations;
1105 block->u.relocations = reloc;
1106 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1107 reloc->kind = BLOCK_START_RELOC;
1108 OP4 (0);
1109 }
1110
1111 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1112 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1113
1114 static void
1115 emit_if (target, opcode, inv_opcode, state)
1116 struct jcf_block *target;
1117 int opcode, inv_opcode;
1118 struct jcf_partial *state;
1119 {
1120 RESERVE(3);
1121 OP1 (opcode);
1122 /* value is 1 byte from reloc back to start of instruction. */
1123 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1124 }
1125
1126 static void
1127 emit_goto (target, state)
1128 struct jcf_block *target;
1129 struct jcf_partial *state;
1130 {
1131 RESERVE(3);
1132 OP1 (OPCODE_goto);
1133 /* Value is 1 byte from reloc back to start of instruction. */
1134 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1135 }
1136
1137 static void
1138 emit_jsr (target, state)
1139 struct jcf_block *target;
1140 struct jcf_partial *state;
1141 {
1142 RESERVE(3);
1143 OP1 (OPCODE_jsr);
1144 /* Value is 1 byte from reloc back to start of instruction. */
1145 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1146 }
1147
1148 /* Generate code to evaluate EXP. If the result is true,
1149 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1150 TRUE_BRANCH_FIRST is a code geneation hint that the
1151 TRUE_LABEL may follow right after this. (The idea is that we
1152 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1153
1154 static void
1155 generate_bytecode_conditional (exp, true_label, false_label,
1156 true_branch_first, state)
1157 tree exp;
1158 struct jcf_block *true_label;
1159 struct jcf_block *false_label;
1160 int true_branch_first;
1161 struct jcf_partial *state;
1162 {
1163 tree exp0, exp1, type;
1164 int save_SP = state->code_SP;
1165 enum java_opcode op, negop;
1166 switch (TREE_CODE (exp))
1167 {
1168 case INTEGER_CST:
1169 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1170 break;
1171 case COND_EXPR:
1172 {
1173 struct jcf_block *then_label = gen_jcf_label (state);
1174 struct jcf_block *else_label = gen_jcf_label (state);
1175 int save_SP_before, save_SP_after;
1176 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1177 then_label, else_label, 1, state);
1178 define_jcf_label (then_label, state);
1179 save_SP_before = state->code_SP;
1180 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1181 true_label, false_label, 1, state);
1182 save_SP_after = state->code_SP;
1183 state->code_SP = save_SP_before;
1184 define_jcf_label (else_label, state);
1185 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1186 true_label, false_label,
1187 true_branch_first, state);
1188 if (state->code_SP != save_SP_after)
1189 abort ();
1190 }
1191 break;
1192 case TRUTH_NOT_EXPR:
1193 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1194 true_label, ! true_branch_first, state);
1195 break;
1196 case TRUTH_ANDIF_EXPR:
1197 {
1198 struct jcf_block *next_label = gen_jcf_label (state);
1199 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1200 next_label, false_label, 1, state);
1201 define_jcf_label (next_label, state);
1202 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1203 true_label, false_label, 1, state);
1204 }
1205 break;
1206 case TRUTH_ORIF_EXPR:
1207 {
1208 struct jcf_block *next_label = gen_jcf_label (state);
1209 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1210 true_label, next_label, 1, state);
1211 define_jcf_label (next_label, state);
1212 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1213 true_label, false_label, 1, state);
1214 }
1215 break;
1216 compare_1:
1217 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1218 set it to the corresponding 1-operand if<COND> instructions. */
1219 op = op - 6;
1220 /* FALLTHROUGH */
1221 compare_2:
1222 /* The opcodes with their inverses are allocated in pairs.
1223 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1224 negop = (op & 1) ? op + 1 : op - 1;
1225 compare_2_ptr:
1226 if (true_branch_first)
1227 {
1228 emit_if (false_label, negop, op, state);
1229 emit_goto (true_label, state);
1230 }
1231 else
1232 {
1233 emit_if (true_label, op, negop, state);
1234 emit_goto (false_label, state);
1235 }
1236 break;
1237 case EQ_EXPR:
1238 op = OPCODE_if_icmpeq;
1239 goto compare;
1240 case NE_EXPR:
1241 op = OPCODE_if_icmpne;
1242 goto compare;
1243 case GT_EXPR:
1244 op = OPCODE_if_icmpgt;
1245 goto compare;
1246 case LT_EXPR:
1247 op = OPCODE_if_icmplt;
1248 goto compare;
1249 case GE_EXPR:
1250 op = OPCODE_if_icmpge;
1251 goto compare;
1252 case LE_EXPR:
1253 op = OPCODE_if_icmple;
1254 goto compare;
1255 compare:
1256 exp0 = TREE_OPERAND (exp, 0);
1257 exp1 = TREE_OPERAND (exp, 1);
1258 type = TREE_TYPE (exp0);
1259 switch (TREE_CODE (type))
1260 {
1261 int opf;
1262 case POINTER_TYPE: case RECORD_TYPE:
1263 switch (TREE_CODE (exp))
1264 {
1265 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1266 case NE_EXPR: op = OPCODE_if_acmpne; break;
1267 default: abort();
1268 }
1269 if (integer_zerop (exp1) || integer_zerop (exp0))
1270 {
1271 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1272 STACK_TARGET, state);
1273 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1274 negop = (op & 1) ? op - 1 : op + 1;
1275 NOTE_POP (1);
1276 goto compare_2_ptr;
1277 }
1278 generate_bytecode_insns (exp0, STACK_TARGET, state);
1279 generate_bytecode_insns (exp1, STACK_TARGET, state);
1280 NOTE_POP (2);
1281 goto compare_2;
1282 case REAL_TYPE:
1283 generate_bytecode_insns (exp0, STACK_TARGET, state);
1284 generate_bytecode_insns (exp1, STACK_TARGET, state);
1285 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1286 opf = OPCODE_fcmpg;
1287 else
1288 opf = OPCODE_fcmpl;
1289 if (TYPE_PRECISION (type) > 32)
1290 {
1291 opf += 2;
1292 NOTE_POP (4);
1293 }
1294 else
1295 NOTE_POP (2);
1296 RESERVE (1);
1297 OP1 (opf);
1298 goto compare_1;
1299 case INTEGER_TYPE:
1300 if (TYPE_PRECISION (type) > 32)
1301 {
1302 generate_bytecode_insns (exp0, STACK_TARGET, state);
1303 generate_bytecode_insns (exp1, STACK_TARGET, state);
1304 NOTE_POP (4);
1305 RESERVE (1);
1306 OP1 (OPCODE_lcmp);
1307 goto compare_1;
1308 }
1309 /* FALLTHOUGH */
1310 default:
1311 if (integer_zerop (exp1))
1312 {
1313 generate_bytecode_insns (exp0, STACK_TARGET, state);
1314 NOTE_POP (1);
1315 goto compare_1;
1316 }
1317 if (integer_zerop (exp0))
1318 {
1319 switch (op)
1320 {
1321 case OPCODE_if_icmplt:
1322 case OPCODE_if_icmpge:
1323 op += 2;
1324 break;
1325 case OPCODE_if_icmpgt:
1326 case OPCODE_if_icmple:
1327 op -= 2;
1328 break;
1329 default:
1330 break;
1331 }
1332 generate_bytecode_insns (exp1, STACK_TARGET, state);
1333 NOTE_POP (1);
1334 goto compare_1;
1335 }
1336 generate_bytecode_insns (exp0, STACK_TARGET, state);
1337 generate_bytecode_insns (exp1, STACK_TARGET, state);
1338 NOTE_POP (2);
1339 goto compare_2;
1340 }
1341
1342 default:
1343 generate_bytecode_insns (exp, STACK_TARGET, state);
1344 NOTE_POP (1);
1345 if (true_branch_first)
1346 {
1347 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1348 emit_goto (true_label, state);
1349 }
1350 else
1351 {
1352 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1353 emit_goto (false_label, state);
1354 }
1355 break;
1356 }
1357 if (save_SP != state->code_SP)
1358 abort ();
1359 }
1360
1361 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1362 but only as far out as LIMIT (since we are about to jump to the
1363 emit label that is LIMIT). */
1364
1365 static void
1366 call_cleanups (limit, state)
1367 struct jcf_block *limit;
1368 struct jcf_partial *state;
1369 {
1370 struct jcf_block *block = state->labeled_blocks;
1371 for (; block != limit; block = block->next)
1372 {
1373 if (block->pc == PENDING_CLEANUP_PC)
1374 emit_jsr (block, state);
1375 }
1376 }
1377
1378 static void
1379 generate_bytecode_return (exp, state)
1380 tree exp;
1381 struct jcf_partial *state;
1382 {
1383 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1384 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1385 int op;
1386 again:
1387 if (exp != NULL)
1388 {
1389 switch (TREE_CODE (exp))
1390 {
1391 case COMPOUND_EXPR:
1392 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1393 state);
1394 exp = TREE_OPERAND (exp, 1);
1395 goto again;
1396 case COND_EXPR:
1397 {
1398 struct jcf_block *then_label = gen_jcf_label (state);
1399 struct jcf_block *else_label = gen_jcf_label (state);
1400 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1401 then_label, else_label, 1, state);
1402 define_jcf_label (then_label, state);
1403 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1404 define_jcf_label (else_label, state);
1405 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1406 }
1407 return;
1408 default:
1409 generate_bytecode_insns (exp,
1410 returns_void ? IGNORE_TARGET
1411 : STACK_TARGET, state);
1412 }
1413 }
1414 if (returns_void)
1415 {
1416 op = OPCODE_return;
1417 call_cleanups (NULL, state);
1418 }
1419 else
1420 {
1421 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1422 if (state->num_finalizers > 0)
1423 {
1424 if (state->return_value_decl == NULL_TREE)
1425 {
1426 state->return_value_decl
1427 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1428 localvar_alloc (state->return_value_decl, state);
1429 }
1430 emit_store (state->return_value_decl, state);
1431 call_cleanups (NULL, state);
1432 emit_load (state->return_value_decl, state);
1433 /* If we call localvar_free (state->return_value_decl, state),
1434 then we risk the save decl erroneously re-used in the
1435 finalizer. Instead, we keep the state->return_value_decl
1436 allocated through the rest of the method. This is not
1437 the greatest solution, but it is at least simple and safe. */
1438 }
1439 }
1440 RESERVE (1);
1441 OP1 (op);
1442 }
1443
1444 /* Generate bytecode for sub-expression EXP of METHOD.
1445 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1446
1447 static void
1448 generate_bytecode_insns (exp, target, state)
1449 tree exp;
1450 int target;
1451 struct jcf_partial *state;
1452 {
1453 tree type, arg;
1454 enum java_opcode jopcode;
1455 int op;
1456 HOST_WIDE_INT value;
1457 int post_op;
1458 int size;
1459 int offset;
1460
1461 if (exp == NULL && target == IGNORE_TARGET)
1462 return;
1463
1464 type = TREE_TYPE (exp);
1465
1466 switch (TREE_CODE (exp))
1467 {
1468 case BLOCK:
1469 if (BLOCK_EXPR_BODY (exp))
1470 {
1471 tree local;
1472 tree body = BLOCK_EXPR_BODY (exp);
1473 for (local = BLOCK_EXPR_DECLS (exp); local; )
1474 {
1475 tree next = TREE_CHAIN (local);
1476 localvar_alloc (local, state);
1477 local = next;
1478 }
1479 /* Avoid deep recursion for long blocks. */
1480 while (TREE_CODE (body) == COMPOUND_EXPR)
1481 {
1482 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1483 body = TREE_OPERAND (body, 1);
1484 }
1485 generate_bytecode_insns (body, target, state);
1486 for (local = BLOCK_EXPR_DECLS (exp); local; )
1487 {
1488 tree next = TREE_CHAIN (local);
1489 localvar_free (local, state);
1490 local = next;
1491 }
1492 }
1493 break;
1494 case COMPOUND_EXPR:
1495 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1496 /* Normally the first operand to a COMPOUND_EXPR must complete
1497 normally. However, in the special case of a do-while
1498 statement this is not necessarily the case. */
1499 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1500 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1501 break;
1502 case EXPR_WITH_FILE_LOCATION:
1503 {
1504 const char *saved_input_filename = input_filename;
1505 tree body = EXPR_WFL_NODE (exp);
1506 int saved_lineno = lineno;
1507 if (body == empty_stmt_node)
1508 break;
1509 input_filename = EXPR_WFL_FILENAME (exp);
1510 lineno = EXPR_WFL_LINENO (exp);
1511 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1512 && debug_info_level > DINFO_LEVEL_NONE)
1513 put_linenumber (lineno, state);
1514 generate_bytecode_insns (body, target, state);
1515 input_filename = saved_input_filename;
1516 lineno = saved_lineno;
1517 }
1518 break;
1519 case INTEGER_CST:
1520 if (target == IGNORE_TARGET) ; /* do nothing */
1521 else if (TREE_CODE (type) == POINTER_TYPE)
1522 {
1523 if (! integer_zerop (exp))
1524 abort();
1525 RESERVE(1);
1526 OP1 (OPCODE_aconst_null);
1527 NOTE_PUSH (1);
1528 }
1529 else if (TYPE_PRECISION (type) <= 32)
1530 {
1531 push_int_const (TREE_INT_CST_LOW (exp), state);
1532 NOTE_PUSH (1);
1533 }
1534 else
1535 {
1536 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1537 state);
1538 NOTE_PUSH (2);
1539 }
1540 break;
1541 case REAL_CST:
1542 {
1543 int prec = TYPE_PRECISION (type) >> 5;
1544 RESERVE(1);
1545 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1546 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1547 else if (real_onep (exp))
1548 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1549 /* FIXME Should also use fconst_2 for 2.0f.
1550 Also, should use iconst_2/ldc followed by i2f/i2d
1551 for other float/double when the value is a small integer. */
1552 else
1553 {
1554 offset = find_constant_index (exp, state);
1555 if (prec == 1)
1556 push_constant1 (offset, state);
1557 else
1558 push_constant2 (offset, state);
1559 }
1560 NOTE_PUSH (prec);
1561 }
1562 break;
1563 case STRING_CST:
1564 push_constant1 (find_string_constant (&state->cpool, exp), state);
1565 NOTE_PUSH (1);
1566 break;
1567 case VAR_DECL:
1568 if (TREE_STATIC (exp))
1569 {
1570 field_op (exp, OPCODE_getstatic, state);
1571 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1572 break;
1573 }
1574 /* ... fall through ... */
1575 case PARM_DECL:
1576 emit_load (exp, state);
1577 break;
1578 case NON_LVALUE_EXPR:
1579 case INDIRECT_REF:
1580 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1581 break;
1582 case ARRAY_REF:
1583 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1584 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1585 if (target != IGNORE_TARGET)
1586 {
1587 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1588 RESERVE(1);
1589 OP1 (jopcode);
1590 if (! TYPE_IS_WIDE (type))
1591 NOTE_POP (1);
1592 }
1593 break;
1594 case COMPONENT_REF:
1595 {
1596 tree obj = TREE_OPERAND (exp, 0);
1597 tree field = TREE_OPERAND (exp, 1);
1598 int is_static = FIELD_STATIC (field);
1599 generate_bytecode_insns (obj,
1600 is_static ? IGNORE_TARGET : target, state);
1601 if (target != IGNORE_TARGET)
1602 {
1603 if (DECL_NAME (field) == length_identifier_node && !is_static
1604 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1605 {
1606 RESERVE (1);
1607 OP1 (OPCODE_arraylength);
1608 }
1609 else
1610 {
1611 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1612 state);
1613 if (! is_static)
1614 NOTE_POP (1);
1615 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1616 }
1617 }
1618 }
1619 break;
1620 case TRUTH_ANDIF_EXPR:
1621 case TRUTH_ORIF_EXPR:
1622 case EQ_EXPR:
1623 case NE_EXPR:
1624 case GT_EXPR:
1625 case LT_EXPR:
1626 case GE_EXPR:
1627 case LE_EXPR:
1628 {
1629 struct jcf_block *then_label = gen_jcf_label (state);
1630 struct jcf_block *else_label = gen_jcf_label (state);
1631 struct jcf_block *end_label = gen_jcf_label (state);
1632 generate_bytecode_conditional (exp,
1633 then_label, else_label, 1, state);
1634 define_jcf_label (then_label, state);
1635 push_int_const (1, state);
1636 emit_goto (end_label, state);
1637 define_jcf_label (else_label, state);
1638 push_int_const (0, state);
1639 define_jcf_label (end_label, state);
1640 NOTE_PUSH (1);
1641 }
1642 break;
1643 case COND_EXPR:
1644 {
1645 struct jcf_block *then_label = gen_jcf_label (state);
1646 struct jcf_block *else_label = gen_jcf_label (state);
1647 struct jcf_block *end_label = gen_jcf_label (state);
1648 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1649 then_label, else_label, 1, state);
1650 define_jcf_label (then_label, state);
1651 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1652 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1653 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1654 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1655 emit_goto (end_label, state);
1656 define_jcf_label (else_label, state);
1657 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1658 define_jcf_label (end_label, state);
1659 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1660 if (TREE_TYPE (exp) != void_type_node)
1661 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1662 }
1663 break;
1664 case CASE_EXPR:
1665 {
1666 struct jcf_switch_state *sw_state = state->sw_state;
1667 struct jcf_relocation *reloc = (struct jcf_relocation *)
1668 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1669 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1670 reloc->kind = 0;
1671 reloc->label = get_jcf_label_here (state);
1672 reloc->offset = case_value;
1673 reloc->next = sw_state->cases;
1674 sw_state->cases = reloc;
1675 if (sw_state->num_cases == 0)
1676 {
1677 sw_state->min_case = case_value;
1678 sw_state->max_case = case_value;
1679 }
1680 else
1681 {
1682 if (case_value < sw_state->min_case)
1683 sw_state->min_case = case_value;
1684 if (case_value > sw_state->max_case)
1685 sw_state->max_case = case_value;
1686 }
1687 sw_state->num_cases++;
1688 }
1689 break;
1690 case DEFAULT_EXPR:
1691 state->sw_state->default_label = get_jcf_label_here (state);
1692 break;
1693
1694 case SWITCH_EXPR:
1695 {
1696 /* The SWITCH_EXPR has three parts, generated in the following order:
1697 1. the switch_expression (the value used to select the correct case);
1698 2. the switch_body;
1699 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1700 After code generation, we will re-order them in the order 1, 3, 2.
1701 This is to avoid any extra GOTOs. */
1702 struct jcf_switch_state sw_state;
1703 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1704 struct jcf_block *body_last; /* Last block of the switch_body. */
1705 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1706 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1707 struct jcf_block *body_block;
1708 int switch_length;
1709 sw_state.prev = state->sw_state;
1710 state->sw_state = &sw_state;
1711 sw_state.cases = NULL;
1712 sw_state.num_cases = 0;
1713 sw_state.default_label = NULL;
1714 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1715 expression_last = state->last_block;
1716 /* Force a new block here. */
1717 body_block = gen_jcf_label (state);
1718 define_jcf_label (body_block, state);
1719 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1720 body_last = state->last_block;
1721
1722 switch_instruction = gen_jcf_label (state);
1723 define_jcf_label (switch_instruction, state);
1724 if (sw_state.default_label == NULL)
1725 sw_state.default_label = gen_jcf_label (state);
1726
1727 if (sw_state.num_cases <= 1)
1728 {
1729 if (sw_state.num_cases == 0)
1730 {
1731 emit_pop (1, state);
1732 NOTE_POP (1);
1733 }
1734 else
1735 {
1736 push_int_const (sw_state.cases->offset, state);
1737 NOTE_PUSH (1);
1738 emit_if (sw_state.cases->label,
1739 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1740 }
1741 emit_goto (sw_state.default_label, state);
1742 }
1743 else
1744 {
1745 HOST_WIDE_INT i;
1746 /* Copy the chain of relocs into a sorted array. */
1747 struct jcf_relocation **relocs = (struct jcf_relocation **)
1748 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1749 /* The relocs arrays is a buffer with a gap.
1750 The assumption is that cases will normally come in "runs". */
1751 int gap_start = 0;
1752 int gap_end = sw_state.num_cases;
1753 struct jcf_relocation *reloc;
1754 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1755 {
1756 HOST_WIDE_INT case_value = reloc->offset;
1757 while (gap_end < sw_state.num_cases)
1758 {
1759 struct jcf_relocation *end = relocs[gap_end];
1760 if (case_value <= end->offset)
1761 break;
1762 relocs[gap_start++] = end;
1763 gap_end++;
1764 }
1765 while (gap_start > 0)
1766 {
1767 struct jcf_relocation *before = relocs[gap_start-1];
1768 if (case_value >= before->offset)
1769 break;
1770 relocs[--gap_end] = before;
1771 gap_start--;
1772 }
1773 relocs[gap_start++] = reloc;
1774 /* Note we don't check for duplicates. This is
1775 handled by the parser. */
1776 }
1777
1778 if (2 * sw_state.num_cases
1779 >= sw_state.max_case - sw_state.min_case)
1780 { /* Use tableswitch. */
1781 int index = 0;
1782 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1783 OP1 (OPCODE_tableswitch);
1784 emit_reloc (RELOCATION_VALUE_0,
1785 SWITCH_ALIGN_RELOC, NULL, state);
1786 emit_switch_reloc (sw_state.default_label, state);
1787 OP4 (sw_state.min_case);
1788 OP4 (sw_state.max_case);
1789 for (i = sw_state.min_case; ; )
1790 {
1791 reloc = relocs[index];
1792 if (i == reloc->offset)
1793 {
1794 emit_case_reloc (reloc, state);
1795 if (i == sw_state.max_case)
1796 break;
1797 index++;
1798 }
1799 else
1800 emit_switch_reloc (sw_state.default_label, state);
1801 i++;
1802 }
1803 }
1804 else
1805 { /* Use lookupswitch. */
1806 RESERVE(9 + 8 * sw_state.num_cases);
1807 OP1 (OPCODE_lookupswitch);
1808 emit_reloc (RELOCATION_VALUE_0,
1809 SWITCH_ALIGN_RELOC, NULL, state);
1810 emit_switch_reloc (sw_state.default_label, state);
1811 OP4 (sw_state.num_cases);
1812 for (i = 0; i < sw_state.num_cases; i++)
1813 {
1814 struct jcf_relocation *reloc = relocs[i];
1815 OP4 (reloc->offset);
1816 emit_case_reloc (reloc, state);
1817 }
1818 }
1819 free (relocs);
1820 }
1821
1822 instruction_last = state->last_block;
1823 if (sw_state.default_label->pc < 0)
1824 define_jcf_label (sw_state.default_label, state);
1825 else /* Force a new block. */
1826 sw_state.default_label = get_jcf_label_here (state);
1827 /* Now re-arrange the blocks so the switch_instruction
1828 comes before the switch_body. */
1829 switch_length = state->code_length - switch_instruction->pc;
1830 switch_instruction->pc = body_block->pc;
1831 instruction_last->next = body_block;
1832 instruction_last->v.chunk->next = body_block->v.chunk;
1833 expression_last->next = switch_instruction;
1834 expression_last->v.chunk->next = switch_instruction->v.chunk;
1835 body_last->next = sw_state.default_label;
1836 body_last->v.chunk->next = NULL;
1837 state->chunk = body_last->v.chunk;
1838 for (; body_block != sw_state.default_label; body_block = body_block->next)
1839 body_block->pc += switch_length;
1840
1841 state->sw_state = sw_state.prev;
1842 break;
1843 }
1844
1845 case RETURN_EXPR:
1846 exp = TREE_OPERAND (exp, 0);
1847 if (exp == NULL_TREE)
1848 exp = empty_stmt_node;
1849 else if (TREE_CODE (exp) != MODIFY_EXPR)
1850 abort ();
1851 else
1852 exp = TREE_OPERAND (exp, 1);
1853 generate_bytecode_return (exp, state);
1854 break;
1855 case LABELED_BLOCK_EXPR:
1856 {
1857 struct jcf_block *end_label = gen_jcf_label (state);
1858 end_label->next = state->labeled_blocks;
1859 state->labeled_blocks = end_label;
1860 end_label->pc = PENDING_EXIT_PC;
1861 end_label->u.labeled_block = exp;
1862 if (LABELED_BLOCK_BODY (exp))
1863 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1864 if (state->labeled_blocks != end_label)
1865 abort();
1866 state->labeled_blocks = end_label->next;
1867 define_jcf_label (end_label, state);
1868 }
1869 break;
1870 case LOOP_EXPR:
1871 {
1872 tree body = TREE_OPERAND (exp, 0);
1873 #if 0
1874 if (TREE_CODE (body) == COMPOUND_EXPR
1875 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1876 {
1877 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1878 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1879 struct jcf_block *head_label;
1880 struct jcf_block *body_label;
1881 struct jcf_block *end_label = gen_jcf_label (state);
1882 struct jcf_block *exit_label = state->labeled_blocks;
1883 head_label = gen_jcf_label (state);
1884 emit_goto (head_label, state);
1885 body_label = get_jcf_label_here (state);
1886 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1887 define_jcf_label (head_label, state);
1888 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1889 end_label, body_label, 1, state);
1890 define_jcf_label (end_label, state);
1891 }
1892 else
1893 #endif
1894 {
1895 struct jcf_block *head_label = get_jcf_label_here (state);
1896 generate_bytecode_insns (body, IGNORE_TARGET, state);
1897 if (CAN_COMPLETE_NORMALLY (body))
1898 emit_goto (head_label, state);
1899 }
1900 }
1901 break;
1902 case EXIT_EXPR:
1903 {
1904 struct jcf_block *label = state->labeled_blocks;
1905 struct jcf_block *end_label = gen_jcf_label (state);
1906 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1907 label, end_label, 0, state);
1908 define_jcf_label (end_label, state);
1909 }
1910 break;
1911 case EXIT_BLOCK_EXPR:
1912 {
1913 struct jcf_block *label = state->labeled_blocks;
1914 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1915 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1916 label = label->next;
1917 call_cleanups (label, state);
1918 emit_goto (label, state);
1919 }
1920 break;
1921
1922 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1923 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1924 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1925 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1926 increment:
1927
1928 arg = TREE_OPERAND (exp, 1);
1929 exp = TREE_OPERAND (exp, 0);
1930 type = TREE_TYPE (exp);
1931 size = TYPE_IS_WIDE (type) ? 2 : 1;
1932 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1933 && ! TREE_STATIC (exp)
1934 && TREE_CODE (type) == INTEGER_TYPE
1935 && TYPE_PRECISION (type) == 32)
1936 {
1937 if (target != IGNORE_TARGET && post_op)
1938 emit_load (exp, state);
1939 emit_iinc (exp, value, state);
1940 if (target != IGNORE_TARGET && ! post_op)
1941 emit_load (exp, state);
1942 break;
1943 }
1944 if (TREE_CODE (exp) == COMPONENT_REF)
1945 {
1946 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1947 emit_dup (1, 0, state);
1948 /* Stack: ..., objectref, objectref. */
1949 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1950 NOTE_PUSH (size-1);
1951 /* Stack: ..., objectref, oldvalue. */
1952 offset = 1;
1953 }
1954 else if (TREE_CODE (exp) == ARRAY_REF)
1955 {
1956 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1957 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1958 emit_dup (2, 0, state);
1959 /* Stack: ..., array, index, array, index. */
1960 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1961 RESERVE(1);
1962 OP1 (jopcode);
1963 NOTE_POP (2-size);
1964 /* Stack: ..., array, index, oldvalue. */
1965 offset = 2;
1966 }
1967 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1968 {
1969 generate_bytecode_insns (exp, STACK_TARGET, state);
1970 /* Stack: ..., oldvalue. */
1971 offset = 0;
1972 }
1973 else
1974 abort ();
1975
1976 if (target != IGNORE_TARGET && post_op)
1977 emit_dup (size, offset, state);
1978 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1979 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1980 /* Stack, otherwise: ..., [result, ] oldvalue. */
1981 generate_bytecode_insns (arg, STACK_TARGET, state);
1982 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1983 + adjust_typed_op (type, 3),
1984 type, state);
1985 if (target != IGNORE_TARGET && ! post_op)
1986 emit_dup (size, offset, state);
1987 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1988 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1989 /* Stack, otherwise: ..., [result, ] newvalue. */
1990 goto finish_assignment;
1991
1992 case MODIFY_EXPR:
1993 {
1994 tree lhs = TREE_OPERAND (exp, 0);
1995 tree rhs = TREE_OPERAND (exp, 1);
1996 int offset = 0;
1997
1998 /* See if we can use the iinc instruction. */
1999 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
2000 && ! TREE_STATIC (lhs)
2001 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
2002 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
2003 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
2004 {
2005 tree arg0 = TREE_OPERAND (rhs, 0);
2006 tree arg1 = TREE_OPERAND (rhs, 1);
2007 HOST_WIDE_INT min_value = -32768;
2008 HOST_WIDE_INT max_value = 32767;
2009 if (TREE_CODE (rhs) == MINUS_EXPR)
2010 {
2011 min_value++;
2012 max_value++;
2013 }
2014 else if (arg1 == lhs)
2015 {
2016 arg0 = arg1;
2017 arg1 = TREE_OPERAND (rhs, 0);
2018 }
2019 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2020 {
2021 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2022 value = TREE_INT_CST_LOW (arg1);
2023 if ((hi_value == 0 && value <= max_value)
2024 || (hi_value == -1 && value >= min_value))
2025 {
2026 if (TREE_CODE (rhs) == MINUS_EXPR)
2027 value = -value;
2028 emit_iinc (lhs, value, state);
2029 if (target != IGNORE_TARGET)
2030 emit_load (lhs, state);
2031 break;
2032 }
2033 }
2034 }
2035
2036 if (TREE_CODE (lhs) == COMPONENT_REF)
2037 {
2038 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2039 STACK_TARGET, state);
2040 offset = 1;
2041 }
2042 else if (TREE_CODE (lhs) == ARRAY_REF)
2043 {
2044 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2045 STACK_TARGET, state);
2046 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2047 STACK_TARGET, state);
2048 offset = 2;
2049 }
2050 else
2051 offset = 0;
2052
2053 /* If the rhs is a binary expression and the left operand is
2054 `==' to the lhs then we have an OP= expression. In this
2055 case we must do some special processing. */
2056 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
2057 && lhs == TREE_OPERAND (rhs, 0))
2058 {
2059 if (TREE_CODE (lhs) == COMPONENT_REF)
2060 {
2061 tree field = TREE_OPERAND (lhs, 1);
2062 if (! FIELD_STATIC (field))
2063 {
2064 /* Duplicate the object reference so we can get
2065 the field. */
2066 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2067 NOTE_POP (1);
2068 }
2069 field_op (field, (FIELD_STATIC (field)
2070 ? OPCODE_getstatic
2071 : OPCODE_getfield),
2072 state);
2073
2074 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2075 }
2076 else if (TREE_CODE (lhs) == VAR_DECL
2077 || TREE_CODE (lhs) == PARM_DECL)
2078 {
2079 if (FIELD_STATIC (lhs))
2080 {
2081 field_op (lhs, OPCODE_getstatic, state);
2082 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2083 }
2084 else
2085 emit_load (lhs, state);
2086 }
2087 else if (TREE_CODE (lhs) == ARRAY_REF)
2088 {
2089 /* Duplicate the array and index, which are on the
2090 stack, so that we can load the old value. */
2091 emit_dup (2, 0, state);
2092 NOTE_POP (2);
2093 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2094 RESERVE (1);
2095 OP1 (jopcode);
2096 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2097 }
2098 else
2099 abort ();
2100
2101 /* This function correctly handles the case where the LHS
2102 of a binary expression is NULL_TREE. */
2103 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2104 NULL_TREE, TREE_OPERAND (rhs, 1));
2105 }
2106
2107 generate_bytecode_insns (rhs, STACK_TARGET, state);
2108 if (target != IGNORE_TARGET)
2109 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2110 exp = lhs;
2111 }
2112 /* FALLTHOUGH */
2113
2114 finish_assignment:
2115 if (TREE_CODE (exp) == COMPONENT_REF)
2116 {
2117 tree field = TREE_OPERAND (exp, 1);
2118 if (! FIELD_STATIC (field))
2119 NOTE_POP (1);
2120 field_op (field,
2121 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2122 state);
2123
2124 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2125 }
2126 else if (TREE_CODE (exp) == VAR_DECL
2127 || TREE_CODE (exp) == PARM_DECL)
2128 {
2129 if (FIELD_STATIC (exp))
2130 {
2131 field_op (exp, OPCODE_putstatic, state);
2132 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2133 }
2134 else
2135 emit_store (exp, state);
2136 }
2137 else if (TREE_CODE (exp) == ARRAY_REF)
2138 {
2139 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2140 RESERVE (1);
2141 OP1 (jopcode);
2142 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2143 }
2144 else
2145 abort ();
2146 break;
2147 case PLUS_EXPR:
2148 jopcode = OPCODE_iadd;
2149 goto binop;
2150 case MINUS_EXPR:
2151 jopcode = OPCODE_isub;
2152 goto binop;
2153 case MULT_EXPR:
2154 jopcode = OPCODE_imul;
2155 goto binop;
2156 case TRUNC_DIV_EXPR:
2157 case RDIV_EXPR:
2158 jopcode = OPCODE_idiv;
2159 goto binop;
2160 case TRUNC_MOD_EXPR:
2161 jopcode = OPCODE_irem;
2162 goto binop;
2163 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2164 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2165 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2166 case TRUTH_AND_EXPR:
2167 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2168 case TRUTH_OR_EXPR:
2169 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2170 case TRUTH_XOR_EXPR:
2171 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2172 binop:
2173 {
2174 tree arg0 = TREE_OPERAND (exp, 0);
2175 tree arg1 = TREE_OPERAND (exp, 1);
2176 jopcode += adjust_typed_op (type, 3);
2177 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2178 {
2179 /* fold may (e.g) convert 2*x to x+x. */
2180 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2181 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2182 }
2183 else
2184 {
2185 /* ARG0 will be NULL_TREE if we're handling an `OP='
2186 expression. In this case the stack already holds the
2187 LHS. See the MODIFY_EXPR case. */
2188 if (arg0 != NULL_TREE)
2189 generate_bytecode_insns (arg0, target, state);
2190 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2191 arg1 = convert (int_type_node, arg1);
2192 generate_bytecode_insns (arg1, target, state);
2193 }
2194 /* For most binary operations, both operands and the result have the
2195 same type. Shift operations are different. Using arg1's type
2196 gets us the correct SP adjustment in all cases. */
2197 if (target == STACK_TARGET)
2198 emit_binop (jopcode, TREE_TYPE (arg1), state);
2199 break;
2200 }
2201 case TRUTH_NOT_EXPR:
2202 case BIT_NOT_EXPR:
2203 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2204 if (target == STACK_TARGET)
2205 {
2206 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2207 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2208 RESERVE (2);
2209 if (is_long)
2210 OP1 (OPCODE_i2l);
2211 NOTE_PUSH (1 + is_long);
2212 OP1 (OPCODE_ixor + is_long);
2213 NOTE_POP (1 + is_long);
2214 }
2215 break;
2216 case NEGATE_EXPR:
2217 jopcode = OPCODE_ineg;
2218 jopcode += adjust_typed_op (type, 3);
2219 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2220 if (target == STACK_TARGET)
2221 emit_unop (jopcode, type, state);
2222 break;
2223 case INSTANCEOF_EXPR:
2224 {
2225 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2226 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2227 RESERVE (3);
2228 OP1 (OPCODE_instanceof);
2229 OP2 (index);
2230 }
2231 break;
2232 case SAVE_EXPR:
2233 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2234 break;
2235 case CONVERT_EXPR:
2236 case NOP_EXPR:
2237 case FLOAT_EXPR:
2238 case FIX_TRUNC_EXPR:
2239 {
2240 tree src = TREE_OPERAND (exp, 0);
2241 tree src_type = TREE_TYPE (src);
2242 tree dst_type = TREE_TYPE (exp);
2243 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2244 if (target == IGNORE_TARGET || src_type == dst_type)
2245 break;
2246 if (TREE_CODE (dst_type) == POINTER_TYPE)
2247 {
2248 if (TREE_CODE (exp) == CONVERT_EXPR)
2249 {
2250 int index = find_class_constant (&state->cpool,
2251 TREE_TYPE (dst_type));
2252 RESERVE (3);
2253 OP1 (OPCODE_checkcast);
2254 OP2 (index);
2255 }
2256 }
2257 else /* Convert numeric types. */
2258 {
2259 int wide_src = TYPE_PRECISION (src_type) > 32;
2260 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2261 NOTE_POP (1 + wide_src);
2262 RESERVE (1);
2263 if (TREE_CODE (dst_type) == REAL_TYPE)
2264 {
2265 if (TREE_CODE (src_type) == REAL_TYPE)
2266 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2267 else if (TYPE_PRECISION (src_type) == 64)
2268 OP1 (OPCODE_l2f + wide_dst);
2269 else
2270 OP1 (OPCODE_i2f + wide_dst);
2271 }
2272 else /* Convert to integral type. */
2273 {
2274 if (TREE_CODE (src_type) == REAL_TYPE)
2275 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2276 else if (wide_dst)
2277 OP1 (OPCODE_i2l);
2278 else if (wide_src)
2279 OP1 (OPCODE_l2i);
2280 if (TYPE_PRECISION (dst_type) < 32)
2281 {
2282 RESERVE (1);
2283 /* Already converted to int, if needed. */
2284 if (TYPE_PRECISION (dst_type) <= 8)
2285 OP1 (OPCODE_i2b);
2286 else if (TREE_UNSIGNED (dst_type))
2287 OP1 (OPCODE_i2c);
2288 else
2289 OP1 (OPCODE_i2s);
2290 }
2291 }
2292 NOTE_PUSH (1 + wide_dst);
2293 }
2294 }
2295 break;
2296
2297 case TRY_EXPR:
2298 {
2299 tree try_clause = TREE_OPERAND (exp, 0);
2300 struct jcf_block *start_label = get_jcf_label_here (state);
2301 struct jcf_block *end_label; /* End of try clause. */
2302 struct jcf_block *finished_label = gen_jcf_label (state);
2303 tree clause = TREE_OPERAND (exp, 1);
2304 if (target != IGNORE_TARGET)
2305 abort ();
2306 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2307 end_label = get_jcf_label_here (state);
2308 if (end_label == start_label)
2309 break;
2310 if (CAN_COMPLETE_NORMALLY (try_clause))
2311 emit_goto (finished_label, state);
2312 while (clause != NULL_TREE)
2313 {
2314 tree catch_clause = TREE_OPERAND (clause, 0);
2315 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2316 struct jcf_handler *handler = alloc_handler (start_label,
2317 end_label, state);
2318 if (exception_decl == NULL_TREE)
2319 handler->type = NULL_TREE;
2320 else
2321 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2322 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2323 clause = TREE_CHAIN (clause);
2324 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2325 emit_goto (finished_label, state);
2326 }
2327 define_jcf_label (finished_label, state);
2328 }
2329 break;
2330
2331 case TRY_FINALLY_EXPR:
2332 {
2333 struct jcf_block *finished_label = NULL;
2334 struct jcf_block *finally_label, *start_label, *end_label;
2335 struct jcf_handler *handler;
2336 tree try_block = TREE_OPERAND (exp, 0);
2337 tree finally = TREE_OPERAND (exp, 1);
2338 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2339
2340 tree exception_type;
2341
2342 finally_label = gen_jcf_label (state);
2343 start_label = get_jcf_label_here (state);
2344 /* If the `finally' clause can complete normally, we emit it
2345 as a subroutine and let the other clauses call it via
2346 `jsr'. If it can't complete normally, then we simply emit
2347 `goto's directly to it. */
2348 if (CAN_COMPLETE_NORMALLY (finally))
2349 {
2350 finally_label->pc = PENDING_CLEANUP_PC;
2351 finally_label->next = state->labeled_blocks;
2352 state->labeled_blocks = finally_label;
2353 state->num_finalizers++;
2354 }
2355
2356 generate_bytecode_insns (try_block, target, state);
2357
2358 if (CAN_COMPLETE_NORMALLY (finally))
2359 {
2360 if (state->labeled_blocks != finally_label)
2361 abort();
2362 state->labeled_blocks = finally_label->next;
2363 }
2364 end_label = get_jcf_label_here (state);
2365
2366 if (end_label == start_label)
2367 {
2368 state->num_finalizers--;
2369 define_jcf_label (finally_label, state);
2370 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2371 break;
2372 }
2373
2374 if (CAN_COMPLETE_NORMALLY (finally))
2375 {
2376 return_link = build_decl (VAR_DECL, NULL_TREE,
2377 return_address_type_node);
2378 finished_label = gen_jcf_label (state);
2379 }
2380
2381 if (CAN_COMPLETE_NORMALLY (try_block))
2382 {
2383 if (CAN_COMPLETE_NORMALLY (finally))
2384 {
2385 emit_jsr (finally_label, state);
2386 emit_goto (finished_label, state);
2387 }
2388 else
2389 emit_goto (finally_label, state);
2390 }
2391
2392 /* Handle exceptions. */
2393
2394 exception_type = build_pointer_type (throwable_type_node);
2395 if (CAN_COMPLETE_NORMALLY (finally))
2396 {
2397 /* We're going to generate a subroutine, so we'll need to
2398 save and restore the exception around the `jsr'. */
2399 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2400 localvar_alloc (return_link, state);
2401 }
2402 handler = alloc_handler (start_label, end_label, state);
2403 handler->type = NULL_TREE;
2404 if (CAN_COMPLETE_NORMALLY (finally))
2405 {
2406 localvar_alloc (exception_decl, state);
2407 NOTE_PUSH (1);
2408 emit_store (exception_decl, state);
2409 emit_jsr (finally_label, state);
2410 emit_load (exception_decl, state);
2411 RESERVE (1);
2412 OP1 (OPCODE_athrow);
2413 NOTE_POP (1);
2414 }
2415 else
2416 {
2417 /* We're not generating a subroutine. In this case we can
2418 simply have the exception handler pop the exception and
2419 then fall through to the `finally' block. */
2420 NOTE_PUSH (1);
2421 emit_pop (1, state);
2422 NOTE_POP (1);
2423 }
2424
2425 /* The finally block. If we're generating a subroutine, first
2426 save return PC into return_link. Otherwise, just generate
2427 the code for the `finally' block. */
2428 define_jcf_label (finally_label, state);
2429 if (CAN_COMPLETE_NORMALLY (finally))
2430 {
2431 NOTE_PUSH (1);
2432 emit_store (return_link, state);
2433 }
2434
2435 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2436 if (CAN_COMPLETE_NORMALLY (finally))
2437 {
2438 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2439 localvar_free (exception_decl, state);
2440 localvar_free (return_link, state);
2441 define_jcf_label (finished_label, state);
2442 }
2443 }
2444 break;
2445 case THROW_EXPR:
2446 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2447 RESERVE (1);
2448 OP1 (OPCODE_athrow);
2449 break;
2450 case NEW_ARRAY_INIT:
2451 {
2452 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2453 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2454 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2455 HOST_WIDE_INT length = java_array_type_length (array_type);
2456 if (target == IGNORE_TARGET)
2457 {
2458 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2459 generate_bytecode_insns (TREE_VALUE (values), target, state);
2460 break;
2461 }
2462 push_int_const (length, state);
2463 NOTE_PUSH (1);
2464 RESERVE (3);
2465 if (JPRIMITIVE_TYPE_P (element_type))
2466 {
2467 int atype = encode_newarray_type (element_type);
2468 OP1 (OPCODE_newarray);
2469 OP1 (atype);
2470 }
2471 else
2472 {
2473 int index = find_class_constant (&state->cpool,
2474 TREE_TYPE (element_type));
2475 OP1 (OPCODE_anewarray);
2476 OP2 (index);
2477 }
2478 offset = 0;
2479 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2480 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2481 {
2482 int save_SP = state->code_SP;
2483 emit_dup (1, 0, state);
2484 push_int_const (offset, state);
2485 NOTE_PUSH (1);
2486 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2487 RESERVE (1);
2488 OP1 (jopcode);
2489 state->code_SP = save_SP;
2490 }
2491 }
2492 break;
2493 case JAVA_EXC_OBJ_EXPR:
2494 NOTE_PUSH (1); /* Pushed by exception system. */
2495 break;
2496 case NEW_CLASS_EXPR:
2497 {
2498 tree class = TREE_TYPE (TREE_TYPE (exp));
2499 int need_result = target != IGNORE_TARGET;
2500 int index = find_class_constant (&state->cpool, class);
2501 RESERVE (4);
2502 OP1 (OPCODE_new);
2503 OP2 (index);
2504 if (need_result)
2505 OP1 (OPCODE_dup);
2506 NOTE_PUSH (1 + need_result);
2507 }
2508 /* ... fall though ... */
2509 case CALL_EXPR:
2510 {
2511 tree f = TREE_OPERAND (exp, 0);
2512 tree x = TREE_OPERAND (exp, 1);
2513 int save_SP = state->code_SP;
2514 int nargs;
2515 if (TREE_CODE (f) == ADDR_EXPR)
2516 f = TREE_OPERAND (f, 0);
2517 if (f == soft_newarray_node)
2518 {
2519 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2520 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2521 STACK_TARGET, state);
2522 RESERVE (2);
2523 OP1 (OPCODE_newarray);
2524 OP1 (type_code);
2525 break;
2526 }
2527 else if (f == soft_multianewarray_node)
2528 {
2529 int ndims;
2530 int idim;
2531 int index = find_class_constant (&state->cpool,
2532 TREE_TYPE (TREE_TYPE (exp)));
2533 x = TREE_CHAIN (x); /* Skip class argument. */
2534 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2535 for (idim = ndims; --idim >= 0; )
2536 {
2537 x = TREE_CHAIN (x);
2538 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2539 }
2540 RESERVE (4);
2541 OP1 (OPCODE_multianewarray);
2542 OP2 (index);
2543 OP1 (ndims);
2544 break;
2545 }
2546 else if (f == soft_anewarray_node)
2547 {
2548 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2549 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2550 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2551 RESERVE (3);
2552 OP1 (OPCODE_anewarray);
2553 OP2 (index);
2554 break;
2555 }
2556 else if (f == soft_monitorenter_node
2557 || f == soft_monitorexit_node
2558 || f == throw_node)
2559 {
2560 if (f == soft_monitorenter_node)
2561 op = OPCODE_monitorenter;
2562 else if (f == soft_monitorexit_node)
2563 op = OPCODE_monitorexit;
2564 else
2565 op = OPCODE_athrow;
2566 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2567 RESERVE (1);
2568 OP1 (op);
2569 NOTE_POP (1);
2570 break;
2571 }
2572 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2573 {
2574 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2575 }
2576 nargs = state->code_SP - save_SP;
2577 state->code_SP = save_SP;
2578 if (f == soft_fmod_node)
2579 {
2580 RESERVE (1);
2581 OP1 (OPCODE_drem);
2582 NOTE_PUSH (2);
2583 break;
2584 }
2585 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2586 NOTE_POP (1); /* Pop implicit this. */
2587 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2588 {
2589 tree context = DECL_CONTEXT (f);
2590 int index, interface = 0;
2591 RESERVE (5);
2592 if (METHOD_STATIC (f))
2593 OP1 (OPCODE_invokestatic);
2594 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2595 || METHOD_PRIVATE (f))
2596 OP1 (OPCODE_invokespecial);
2597 else
2598 {
2599 if (CLASS_INTERFACE (TYPE_NAME (context)))
2600 {
2601 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2602 context = TREE_TYPE (TREE_TYPE (arg1));
2603 if (CLASS_INTERFACE (TYPE_NAME (context)))
2604 interface = 1;
2605 }
2606 if (interface)
2607 OP1 (OPCODE_invokeinterface);
2608 else
2609 OP1 (OPCODE_invokevirtual);
2610 }
2611 index = find_methodref_with_class_index (&state->cpool, f, context);
2612 OP2 (index);
2613 if (interface)
2614 {
2615 if (nargs <= 0)
2616 abort ();
2617
2618 OP1 (nargs);
2619 OP1 (0);
2620 }
2621 f = TREE_TYPE (TREE_TYPE (f));
2622 if (TREE_CODE (f) != VOID_TYPE)
2623 {
2624 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2625 if (target == IGNORE_TARGET)
2626 emit_pop (size, state);
2627 else
2628 NOTE_PUSH (size);
2629 }
2630 break;
2631 }
2632 }
2633 /* fall through */
2634 notimpl:
2635 default:
2636 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2637 tree_code_name [(int) TREE_CODE (exp)]);
2638 }
2639 }
2640
2641 static void
2642 perform_relocations (state)
2643 struct jcf_partial *state;
2644 {
2645 struct jcf_block *block;
2646 struct jcf_relocation *reloc;
2647 int pc;
2648 int shrink;
2649
2650 /* Before we start, the pc field of each block is an upper bound on
2651 the block's start pc (it may be less, if previous blocks need less
2652 than their maximum).
2653
2654 The minimum size of each block is in the block's chunk->size. */
2655
2656 /* First, figure out the actual locations of each block. */
2657 pc = 0;
2658 shrink = 0;
2659 for (block = state->blocks; block != NULL; block = block->next)
2660 {
2661 int block_size = block->v.chunk->size;
2662
2663 block->pc = pc;
2664
2665 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2666 Assumes relocations are in reverse order. */
2667 reloc = block->u.relocations;
2668 while (reloc != NULL
2669 && reloc->kind == OPCODE_goto_w
2670 && reloc->label->pc == block->next->pc
2671 && reloc->offset + 2 == block_size)
2672 {
2673 reloc = reloc->next;
2674 block->u.relocations = reloc;
2675 block->v.chunk->size -= 3;
2676 block_size -= 3;
2677 shrink += 3;
2678 }
2679
2680 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2681 {
2682 if (reloc->kind == SWITCH_ALIGN_RELOC)
2683 {
2684 /* We assume this is the first relocation in this block,
2685 so we know its final pc. */
2686 int where = pc + reloc->offset;
2687 int pad = ((where + 3) & ~3) - where;
2688 block_size += pad;
2689 }
2690 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2691 {
2692 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2693 int expand = reloc->kind > 0 ? 2 : 5;
2694
2695 if (delta > 0)
2696 delta -= shrink;
2697 if (delta >= -32768 && delta <= 32767)
2698 {
2699 shrink += expand;
2700 reloc->kind = -1;
2701 }
2702 else
2703 block_size += expand;
2704 }
2705 }
2706 pc += block_size;
2707 }
2708
2709 for (block = state->blocks; block != NULL; block = block->next)
2710 {
2711 struct chunk *chunk = block->v.chunk;
2712 int old_size = chunk->size;
2713 int next_pc = block->next == NULL ? pc : block->next->pc;
2714 int new_size = next_pc - block->pc;
2715 unsigned char *new_ptr;
2716 unsigned char *old_buffer = chunk->data;
2717 unsigned char *old_ptr = old_buffer + old_size;
2718 if (new_size != old_size)
2719 {
2720 chunk->data = (unsigned char *)
2721 obstack_alloc (state->chunk_obstack, new_size);
2722 chunk->size = new_size;
2723 }
2724 new_ptr = chunk->data + new_size;
2725
2726 /* We do the relocations from back to front, because
2727 the relocations are in reverse order. */
2728 for (reloc = block->u.relocations; ; reloc = reloc->next)
2729 {
2730 /* new_ptr and old_ptr point into the old and new buffers,
2731 respectively. (If no relocations cause the buffer to
2732 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2733 The bytes at higher address have been copied and relocations
2734 handled; those at lower addresses remain to process. */
2735
2736 /* Lower old index of piece to be copied with no relocation.
2737 I.e. high index of the first piece that does need relocation. */
2738 int start = reloc == NULL ? 0
2739 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2740 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2741 ? reloc->offset + 4
2742 : reloc->offset + 2;
2743 int32 value;
2744 int new_offset;
2745 int n = (old_ptr - old_buffer) - start;
2746 new_ptr -= n;
2747 old_ptr -= n;
2748 if (n > 0)
2749 memcpy (new_ptr, old_ptr, n);
2750 if (old_ptr == old_buffer)
2751 break;
2752
2753 new_offset = new_ptr - chunk->data;
2754 new_offset -= (reloc->kind == -1 ? 2 : 4);
2755 if (reloc->kind == 0)
2756 {
2757 old_ptr -= 4;
2758 value = GET_u4 (old_ptr);
2759 }
2760 else if (reloc->kind == BLOCK_START_RELOC)
2761 {
2762 old_ptr -= 4;
2763 value = 0;
2764 new_offset = 0;
2765 }
2766 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2767 {
2768 int where = block->pc + reloc->offset;
2769 int pad = ((where + 3) & ~3) - where;
2770 while (--pad >= 0)
2771 *--new_ptr = 0;
2772 continue;
2773 }
2774 else
2775 {
2776 old_ptr -= 2;
2777 value = GET_u2 (old_ptr);
2778 }
2779 value += reloc->label->pc - (block->pc + new_offset);
2780 *--new_ptr = (unsigned char) value; value >>= 8;
2781 *--new_ptr = (unsigned char) value; value >>= 8;
2782 if (reloc->kind != -1)
2783 {
2784 *--new_ptr = (unsigned char) value; value >>= 8;
2785 *--new_ptr = (unsigned char) value;
2786 }
2787 if (reloc->kind > BLOCK_START_RELOC)
2788 {
2789 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2790 --old_ptr;
2791 *--new_ptr = reloc->kind;
2792 }
2793 else if (reloc->kind < -1)
2794 {
2795 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2796 --old_ptr;
2797 *--new_ptr = OPCODE_goto_w;
2798 *--new_ptr = 3;
2799 *--new_ptr = 0;
2800 *--new_ptr = - reloc->kind;
2801 }
2802 }
2803 if (new_ptr != chunk->data)
2804 abort ();
2805 }
2806 state->code_length = pc;
2807 }
2808
2809 static void
2810 init_jcf_state (state, work)
2811 struct jcf_partial *state;
2812 struct obstack *work;
2813 {
2814 state->chunk_obstack = work;
2815 state->first = state->chunk = NULL;
2816 CPOOL_INIT (&state->cpool);
2817 BUFFER_INIT (&state->localvars);
2818 BUFFER_INIT (&state->bytecode);
2819 }
2820
2821 static void
2822 init_jcf_method (state, method)
2823 struct jcf_partial *state;
2824 tree method;
2825 {
2826 state->current_method = method;
2827 state->blocks = state->last_block = NULL;
2828 state->linenumber_count = 0;
2829 state->first_lvar = state->last_lvar = NULL;
2830 state->lvar_count = 0;
2831 state->labeled_blocks = NULL;
2832 state->code_length = 0;
2833 BUFFER_RESET (&state->bytecode);
2834 BUFFER_RESET (&state->localvars);
2835 state->code_SP = 0;
2836 state->code_SP_max = 0;
2837 state->handlers = NULL;
2838 state->last_handler = NULL;
2839 state->num_handlers = 0;
2840 state->num_finalizers = 0;
2841 state->return_value_decl = NULL_TREE;
2842 }
2843
2844 static void
2845 release_jcf_state (state)
2846 struct jcf_partial *state;
2847 {
2848 CPOOL_FINISH (&state->cpool);
2849 obstack_free (state->chunk_obstack, state->first);
2850 }
2851
2852 /* Generate and return a list of chunks containing the class CLAS
2853 in the .class file representation. The list can be written to a
2854 .class file using write_chunks. Allocate chunks from obstack WORK. */
2855
2856 static struct chunk *
2857 generate_classfile (clas, state)
2858 tree clas;
2859 struct jcf_partial *state;
2860 {
2861 struct chunk *cpool_chunk;
2862 const char *source_file, *s;
2863 char *ptr;
2864 int i;
2865 char *fields_count_ptr;
2866 int fields_count = 0;
2867 char *methods_count_ptr;
2868 int methods_count = 0;
2869 static tree SourceFile_node = NULL_TREE;
2870 tree part;
2871 int total_supers
2872 = clas == object_type_node ? 0
2873 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2874
2875 ptr = append_chunk (NULL, 8, state);
2876 PUT4 (0xCafeBabe); /* Magic number */
2877 PUT2 (3); /* Minor version */
2878 PUT2 (45); /* Major version */
2879
2880 append_chunk (NULL, 0, state);
2881 cpool_chunk = state->chunk;
2882
2883 /* Next allocate the chunk containing acces_flags through fields_counr. */
2884 if (clas == object_type_node)
2885 i = 10;
2886 else
2887 i = 8 + 2 * total_supers;
2888 ptr = append_chunk (NULL, i, state);
2889 i = get_access_flags (TYPE_NAME (clas));
2890 if (! (i & ACC_INTERFACE))
2891 i |= ACC_SUPER;
2892 PUT2 (i); /* acces_flags */
2893 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2894 if (clas == object_type_node)
2895 {
2896 PUT2(0); /* super_class */
2897 PUT2(0); /* interfaces_count */
2898 }
2899 else
2900 {
2901 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2902 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2903 int j = find_class_constant (&state->cpool, base);
2904 PUT2 (j); /* super_class */
2905 PUT2 (total_supers - 1); /* interfaces_count */
2906 for (i = 1; i < total_supers; i++)
2907 {
2908 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2909 j = find_class_constant (&state->cpool, base);
2910 PUT2 (j);
2911 }
2912 }
2913 fields_count_ptr = ptr;
2914
2915 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2916 {
2917 int have_value, attr_count = 0;
2918 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2919 continue;
2920 ptr = append_chunk (NULL, 8, state);
2921 i = get_access_flags (part); PUT2 (i);
2922 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2923 i = find_utf8_constant (&state->cpool,
2924 build_java_signature (TREE_TYPE (part)));
2925 PUT2(i);
2926 have_value = DECL_INITIAL (part) != NULL_TREE
2927 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2928 && FIELD_FINAL (part)
2929 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2930 || TREE_TYPE (part) == string_ptr_type_node);
2931 if (have_value)
2932 attr_count++;
2933
2934 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2935 attr_count++;
2936
2937 PUT2 (attr_count); /* attributes_count */
2938 if (have_value)
2939 {
2940 tree init = DECL_INITIAL (part);
2941 static tree ConstantValue_node = NULL_TREE;
2942 if (TREE_TYPE (part) != TREE_TYPE (init))
2943 fatal_error ("field initializer type mismatch");
2944 ptr = append_chunk (NULL, 8, state);
2945 if (ConstantValue_node == NULL_TREE)
2946 ConstantValue_node = get_identifier ("ConstantValue");
2947 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2948 PUT2 (i); /* attribute_name_index */
2949 PUT4 (2); /* attribute_length */
2950 i = find_constant_index (init, state); PUT2 (i);
2951 }
2952 /* Emit the "Synthetic" attribute for val$<x> and this$<n> fields. */
2953 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2954 ptr = append_synthetic_attribute (state);
2955 fields_count++;
2956 }
2957 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2958
2959 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2960 PUT2 (0);
2961
2962 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2963 {
2964 struct jcf_block *block;
2965 tree function_body = DECL_FUNCTION_BODY (part);
2966 tree body = function_body == NULL_TREE ? NULL_TREE
2967 : BLOCK_EXPR_BODY (function_body);
2968 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2969 : DECL_NAME (part);
2970 tree type = TREE_TYPE (part);
2971 tree save_function = current_function_decl;
2972 int synthetic_p = 0;
2973 current_function_decl = part;
2974 ptr = append_chunk (NULL, 8, state);
2975 i = get_access_flags (part); PUT2 (i);
2976 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2977 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2978 PUT2 (i);
2979 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2980
2981 /* Make room for the Synthetic attribute (of zero length.) */
2982 if (DECL_FINIT_P (part)
2983 || DECL_INSTINIT_P (part)
2984 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2985 || TYPE_DOT_CLASS (clas) == part)
2986 {
2987 i++;
2988 synthetic_p = 1;
2989 }
2990
2991 PUT2 (i); /* attributes_count */
2992
2993 if (synthetic_p)
2994 ptr = append_synthetic_attribute (state);
2995
2996 if (body != NULL_TREE)
2997 {
2998 int code_attributes_count = 0;
2999 static tree Code_node = NULL_TREE;
3000 tree t;
3001 char *attr_len_ptr;
3002 struct jcf_handler *handler;
3003 if (Code_node == NULL_TREE)
3004 Code_node = get_identifier ("Code");
3005 ptr = append_chunk (NULL, 14, state);
3006 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3007 attr_len_ptr = ptr;
3008 init_jcf_method (state, part);
3009 get_jcf_label_here (state); /* Force a first block. */
3010 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3011 localvar_alloc (t, state);
3012 generate_bytecode_insns (body, IGNORE_TARGET, state);
3013 if (CAN_COMPLETE_NORMALLY (body))
3014 {
3015 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3016 abort();
3017 RESERVE (1);
3018 OP1 (OPCODE_return);
3019 }
3020 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3021 localvar_free (t, state);
3022 if (state->return_value_decl != NULL_TREE)
3023 localvar_free (state->return_value_decl, state);
3024 finish_jcf_block (state);
3025 perform_relocations (state);
3026
3027 ptr = attr_len_ptr;
3028 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3029 if (state->linenumber_count > 0)
3030 {
3031 code_attributes_count++;
3032 i += 8 + 4 * state->linenumber_count;
3033 }
3034 if (state->lvar_count > 0)
3035 {
3036 code_attributes_count++;
3037 i += 8 + 10 * state->lvar_count;
3038 }
3039 UNSAFE_PUT4 (i); /* attribute_length */
3040 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3041 UNSAFE_PUT2 (localvar_max); /* max_locals */
3042 UNSAFE_PUT4 (state->code_length);
3043
3044 /* Emit the exception table. */
3045 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3046 PUT2 (state->num_handlers); /* exception_table_length */
3047 handler = state->handlers;
3048 for (; handler != NULL; handler = handler->next)
3049 {
3050 int type_index;
3051 PUT2 (handler->start_label->pc);
3052 PUT2 (handler->end_label->pc);
3053 PUT2 (handler->handler_label->pc);
3054 if (handler->type == NULL_TREE)
3055 type_index = 0;
3056 else
3057 type_index = find_class_constant (&state->cpool,
3058 handler->type);
3059 PUT2 (type_index);
3060 }
3061
3062 ptr = append_chunk (NULL, 2, state);
3063 PUT2 (code_attributes_count);
3064
3065 /* Write the LineNumberTable attribute. */
3066 if (state->linenumber_count > 0)
3067 {
3068 static tree LineNumberTable_node = NULL_TREE;
3069 ptr = append_chunk (NULL,
3070 8 + 4 * state->linenumber_count, state);
3071 if (LineNumberTable_node == NULL_TREE)
3072 LineNumberTable_node = get_identifier ("LineNumberTable");
3073 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3074 PUT2 (i); /* attribute_name_index */
3075 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3076 i = state->linenumber_count; PUT2 (i);
3077 for (block = state->blocks; block != NULL; block = block->next)
3078 {
3079 int line = block->linenumber;
3080 if (line > 0)
3081 {
3082 PUT2 (block->pc);
3083 PUT2 (line);
3084 }
3085 }
3086 }
3087
3088 /* Write the LocalVariableTable attribute. */
3089 if (state->lvar_count > 0)
3090 {
3091 static tree LocalVariableTable_node = NULL_TREE;
3092 struct localvar_info *lvar = state->first_lvar;
3093 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3094 if (LocalVariableTable_node == NULL_TREE)
3095 LocalVariableTable_node = get_identifier("LocalVariableTable");
3096 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3097 PUT2 (i); /* attribute_name_index */
3098 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3099 i = state->lvar_count; PUT2 (i);
3100 for ( ; lvar != NULL; lvar = lvar->next)
3101 {
3102 tree name = DECL_NAME (lvar->decl);
3103 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3104 i = lvar->start_label->pc; PUT2 (i);
3105 i = lvar->end_label->pc - i; PUT2 (i);
3106 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3107 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3108 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3109 }
3110 }
3111 }
3112 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3113 {
3114 tree t = DECL_FUNCTION_THROWS (part);
3115 int throws_count = list_length (t);
3116 static tree Exceptions_node = NULL_TREE;
3117 if (Exceptions_node == NULL_TREE)
3118 Exceptions_node = get_identifier ("Exceptions");
3119 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3120 i = find_utf8_constant (&state->cpool, Exceptions_node);
3121 PUT2 (i); /* attribute_name_index */
3122 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3123 i = throws_count; PUT2 (i);
3124 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3125 {
3126 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3127 PUT2 (i);
3128 }
3129 }
3130 methods_count++;
3131 current_function_decl = save_function;
3132 }
3133 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3134
3135 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3136 for (s = source_file; ; s++)
3137 {
3138 char ch = *s;
3139 if (ch == '\0')
3140 break;
3141 if (ch == '/' || ch == '\\')
3142 source_file = s+1;
3143 }
3144 ptr = append_chunk (NULL, 10, state);
3145
3146 i = 1; /* Source file always exists as an attribute */
3147 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3148 i++;
3149 if (clas == object_type_node)
3150 i++;
3151 PUT2 (i); /* attributes_count */
3152
3153 /* generate the SourceFile attribute. */
3154 if (SourceFile_node == NULL_TREE)
3155 {
3156 SourceFile_node = get_identifier ("SourceFile");
3157 ggc_add_tree_root (&SourceFile_node, 1);
3158 }
3159
3160 i = find_utf8_constant (&state->cpool, SourceFile_node);
3161 PUT2 (i); /* attribute_name_index */
3162 PUT4 (2);
3163 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3164 PUT2 (i);
3165 append_gcj_attribute (state, clas);
3166 append_innerclasses_attribute (state, clas);
3167
3168 /* New finally generate the contents of the constant pool chunk. */
3169 i = count_constant_pool_bytes (&state->cpool);
3170 ptr = obstack_alloc (state->chunk_obstack, i);
3171 cpool_chunk->data = ptr;
3172 cpool_chunk->size = i;
3173 write_constant_pool (&state->cpool, ptr, i);
3174 return state->first;
3175 }
3176
3177 static unsigned char *
3178 append_synthetic_attribute (state)
3179 struct jcf_partial *state;
3180 {
3181 static tree Synthetic_node = NULL_TREE;
3182 unsigned char *ptr = append_chunk (NULL, 6, state);
3183 int i;
3184
3185 if (Synthetic_node == NULL_TREE)
3186 {
3187 Synthetic_node = get_identifier ("Synthetic");
3188 ggc_add_tree_root (&Synthetic_node, 1);
3189 }
3190 i = find_utf8_constant (&state->cpool, Synthetic_node);
3191 PUT2 (i); /* Attribute string index */
3192 PUT4 (0); /* Attribute length */
3193
3194 return ptr;
3195 }
3196
3197 static void
3198 append_gcj_attribute (state, class)
3199 struct jcf_partial *state;
3200 tree class;
3201 {
3202 unsigned char *ptr;
3203 int i;
3204
3205 if (class != object_type_node)
3206 return;
3207
3208 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3209 i = find_utf8_constant (&state->cpool,
3210 get_identifier ("gnu.gcj.gcj-compiled"));
3211 PUT2 (i); /* Attribute string index */
3212 PUT4 (0); /* Attribute length */
3213 }
3214
3215 static void
3216 append_innerclasses_attribute (state, class)
3217 struct jcf_partial *state;
3218 tree class;
3219 {
3220 static tree InnerClasses_node = NULL_TREE;
3221 tree orig_decl = TYPE_NAME (class);
3222 tree current, decl;
3223 int length = 0, i;
3224 unsigned char *ptr, *length_marker, *number_marker;
3225
3226 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3227 return;
3228
3229 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3230
3231 if (InnerClasses_node == NULL_TREE)
3232 {
3233 InnerClasses_node = get_identifier ("InnerClasses");
3234 ggc_add_tree_root (&InnerClasses_node, 1);
3235 }
3236 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3237 PUT2 (i);
3238 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3239 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3240
3241 /* Generate the entries: all inner classes visible from the one we
3242 process: itself, up and down. */
3243 while (class && INNER_CLASS_TYPE_P (class))
3244 {
3245 const char *n;
3246
3247 decl = TYPE_NAME (class);
3248 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3249 IDENTIFIER_LENGTH (DECL_NAME (decl));
3250
3251 while (n[-1] != '$')
3252 n--;
3253 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3254 length++;
3255
3256 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3257 }
3258
3259 decl = orig_decl;
3260 for (current = DECL_INNER_CLASS_LIST (decl);
3261 current; current = TREE_CHAIN (current))
3262 {
3263 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3264 TREE_VALUE (current));
3265 length++;
3266 }
3267
3268 ptr = length_marker; PUT4 (8*length+2);
3269 ptr = number_marker; PUT2 (length);
3270 }
3271
3272 static void
3273 append_innerclasses_attribute_entry (state, decl, name)
3274 struct jcf_partial *state;
3275 tree decl, name;
3276 {
3277 int icii, icaf;
3278 int ocii = 0, ini = 0;
3279 unsigned char *ptr = append_chunk (NULL, 8, state);
3280
3281 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3282
3283 /* Sun's implementation seems to generate ocii to 0 for inner
3284 classes (which aren't considered members of the class they're
3285 in.) The specs are saying that if the class is anonymous,
3286 inner_name_index must be zero. */
3287 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3288 {
3289 ocii = find_class_constant (&state->cpool,
3290 TREE_TYPE (DECL_CONTEXT (decl)));
3291 ini = find_utf8_constant (&state->cpool, name);
3292 }
3293 icaf = get_access_flags (decl);
3294
3295 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3296 }
3297
3298 static char *
3299 make_class_file_name (clas)
3300 tree clas;
3301 {
3302 const char *dname, *cname, *slash;
3303 char *r;
3304 struct stat sb;
3305
3306 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3307 "", '.', DIR_SEPARATOR,
3308 ".class"));
3309 if (jcf_write_base_directory == NULL)
3310 {
3311 /* Make sure we put the class file into the .java file's
3312 directory, and not into some subdirectory thereof. */
3313 char *t;
3314 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3315 slash = strrchr (dname, DIR_SEPARATOR);
3316 if (! slash)
3317 {
3318 dname = ".";
3319 slash = dname + 1;
3320 }
3321 t = strrchr (cname, DIR_SEPARATOR);
3322 if (t)
3323 cname = t + 1;
3324 }
3325 else
3326 {
3327 dname = jcf_write_base_directory;
3328 slash = dname + strlen (dname);
3329 }
3330
3331 r = xmalloc (slash - dname + strlen (cname) + 2);
3332 strncpy (r, dname, slash - dname);
3333 r[slash - dname] = DIR_SEPARATOR;
3334 strcpy (&r[slash - dname + 1], cname);
3335
3336 /* We try to make new directories when we need them. We only do
3337 this for directories which "might not" exist. For instance, we
3338 assume the `-d' directory exists, but we don't assume that any
3339 subdirectory below it exists. It might be worthwhile to keep
3340 track of which directories we've created to avoid gratuitous
3341 stat()s. */
3342 dname = r + (slash - dname) + 1;
3343 while (1)
3344 {
3345 char *s = strchr (dname, DIR_SEPARATOR);
3346 if (s == NULL)
3347 break;
3348 *s = '\0';
3349 if (stat (r, &sb) == -1
3350 /* Try to make it. */
3351 && mkdir (r, 0755) == -1)
3352 fatal_io_error ("can't create directory %s", r);
3353
3354 *s = DIR_SEPARATOR;
3355 /* Skip consecutive separators. */
3356 for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3357 ;
3358 }
3359
3360 return r;
3361 }
3362
3363 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3364 The output .class file name is make_class_file_name(CLAS). */
3365
3366 void
3367 write_classfile (clas)
3368 tree clas;
3369 {
3370 struct obstack *work = &temporary_obstack;
3371 struct jcf_partial state[1];
3372 char *class_file_name = make_class_file_name (clas);
3373 struct chunk *chunks;
3374
3375 if (class_file_name != NULL)
3376 {
3377 FILE *stream = fopen (class_file_name, "wb");
3378 if (stream == NULL)
3379 fatal_io_error ("can't open %s for writing", class_file_name);
3380
3381 jcf_dependency_add_target (class_file_name);
3382 init_jcf_state (state, work);
3383 chunks = generate_classfile (clas, state);
3384 write_chunks (stream, chunks);
3385 if (fclose (stream))
3386 fatal_io_error ("error closing %s", class_file_name);
3387 free (class_file_name);
3388 }
3389 release_jcf_state (state);
3390 }
3391
3392 /* TODO:
3393 string concatenation
3394 synchronized statement
3395 */
This page took 0.276224 seconds and 5 git commands to generate.