]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
(RELOCATION_VALUE_1): Fixed integer value from 0 to 1.
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "java-tree.h"
29 #include "obstack.h"
30 #undef AND
31 #include "rtl.h"
32 #include "flags.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
35 #include "buffer.h"
36 #include "toplev.h"
37
38 #ifndef DIR_SEPARATOR
39 #define DIR_SEPARATOR '/'
40 #endif
41
42 extern struct obstack temporary_obstack;
43
44 /* Base directory in which `.class' files should be written.
45 NULL means to put the file into the same directory as the
46 corresponding .java file. */
47 char *jcf_write_base_directory = NULL;
48
49 /* Make sure bytecode.data is big enough for at least N more bytes. */
50
51 #define RESERVE(N) \
52 do { CHECK_OP(state); \
53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
54 buffer_grow (&state->bytecode, N); } while (0)
55
56 /* Add a 1-byte instruction/operand I to bytecode.data,
57 assuming space has already been RESERVE'd. */
58
59 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
60
61 /* Like OP1, but I is a 2-byte big endian integer. */
62
63 #define OP2(I) \
64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
65
66 /* Like OP1, but I is a 4-byte big endian integer. */
67
68 #define OP4(I) \
69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
71
72 /* Macro to call each time we push I words on the JVM stack. */
73
74 #define NOTE_PUSH(I) \
75 do { state->code_SP += (I); \
76 if (state->code_SP > state->code_SP_max) \
77 state->code_SP_max = state->code_SP; } while (0)
78
79 /* Macro to call each time we pop I words from the JVM stack. */
80
81 #define NOTE_POP(I) \
82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
83
84 /* A chunk or segment of a .class file. */
85
86 struct chunk
87 {
88 /* The next segment of this .class file. */
89 struct chunk *next;
90
91 /* The actual data in this segment to be written to the .class file. */
92 unsigned char *data;
93
94 /* The size of the segment to be written to the .class file. */
95 int size;
96 };
97
98 #define PENDING_CLEANUP_PC (-3)
99 #define PENDING_EXIT_PC (-2)
100 #define UNDEFINED_PC (-1)
101
102 /* Each "block" represents a label plus the bytecode instructions following.
103 There may be branches out of the block, but no incoming jumps, except
104 to the beginning of the block.
105
106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
107 assocated code yet), but it is an undefined label.
108 */
109
110 struct jcf_block
111 {
112 /* For blocks that that are defined, the next block (in pc order).
113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
114 or a cleanup expression (from a WITH_CLEANUP_EXPR),
115 this is the next (outer) such end label, in a stack headed by
116 labeled_blocks in jcf_partial. */
117 struct jcf_block *next;
118
119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
120 pc is PENDING_EXIT_PC.
121 In the not-yet-defined end label for pending cleanup subroutine,
122 pc is PENDING_CLEANUP_PC.
123 For other not-yet-defined labels, pc is UNDEFINED_PC.
124
125 If the label has been defined:
126 Until perform_relocations is finished, this is the maximum possible
127 value of the bytecode offset at the begnning of this block.
128 After perform_relocations, it is the actual offset (pc). */
129 int pc;
130
131 int linenumber;
132
133 /* After finish_jcf_block is called, The actual instructions contained in this block.
134 Before than NULL, and the instructions are in state->bytecode. */
135 union {
136 struct chunk *chunk;
137
138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
139 coveed by the cleanup. */
140 struct jcf_block *start_label;
141 } v;
142
143 union {
144 /* Set of relocations (in reverse offset order) for this block. */
145 struct jcf_relocation *relocations;
146
147 /* If this block is that of the not-yet-defined end label of
148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
150 tree labeled_block;
151 } u;
152 };
153
154 /* A "relocation" type for the 0-3 bytes of padding at the start
155 of a tableswitch or a lookupswitch. */
156 #define SWITCH_ALIGN_RELOC 4
157
158 /* A relocation type for the labels in a tableswitch or a lookupswitch;
159 these are relative to the start of the instruction, but (due to
160 th 0-3 bytes of padding), we don't know the offset before relocation. */
161 #define BLOCK_START_RELOC 1
162
163 struct jcf_relocation
164 {
165 /* Next relocation for the current jcf_block. */
166 struct jcf_relocation *next;
167
168 /* The (byte) offset within the current block that needs to be relocated. */
169 HOST_WIDE_INT offset;
170
171 /* 0 if offset is a 4-byte relative offset.
172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
173 for proper alignment in tableswitch/lookupswitch instructions.
174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
175 to the start of the containing block.
176 -1 if offset is a 2-byte relative offset.
177 < -1 if offset is the address of an instruction with a 2-byte offset
178 that does not have a corresponding 4-byte offset version, in which
179 case the absolute value of kind is the inverted opcode.
180 > 4 if offset is the address of an instruction (such as jsr) with a
181 2-byte offset that does have a corresponding 4-byte offset version,
182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
183 int kind;
184
185 /* The label the relocation wants to actually transfer to. */
186 struct jcf_block *label;
187 };
188
189 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
190 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
191
192 /* State for single catch clause. */
193
194 struct jcf_handler
195 {
196 struct jcf_handler *next;
197
198 struct jcf_block *start_label;
199 struct jcf_block *end_label;
200 struct jcf_block *handler_label;
201
202 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
203 tree type;
204 };
205
206 /* State for the current switch statement. */
207
208 struct jcf_switch_state
209 {
210 struct jcf_switch_state *prev;
211 struct jcf_block *default_label;
212
213 struct jcf_relocation *cases;
214 int num_cases;
215 HOST_WIDE_INT min_case, max_case;
216 };
217
218 /* This structure is used to contain the various pieces that will
219 become a .class file. */
220
221 struct jcf_partial
222 {
223 struct chunk *first;
224 struct chunk *chunk;
225 struct obstack *chunk_obstack;
226 tree current_method;
227
228 /* List of basic blocks for the current method. */
229 struct jcf_block *blocks;
230 struct jcf_block *last_block;
231
232 struct localvar_info *first_lvar;
233 struct localvar_info *last_lvar;
234 int lvar_count;
235
236 CPool cpool;
237
238 int linenumber_count;
239
240 /* Until perform_relocations, this is a upper bound on the number
241 of bytes (so far) in the instructions for the current method. */
242 int code_length;
243
244 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
245 struct jcf_block *labeled_blocks;
246
247 /* The current stack size (stack pointer) in the current method. */
248 int code_SP;
249
250 /* The largest extent of stack size (stack pointer) in the current method. */
251 int code_SP_max;
252
253 /* Contains a mapping from local var slot number to localvar_info. */
254 struct buffer localvars;
255
256 /* The buffer allocated for bytecode for the current jcf_block. */
257 struct buffer bytecode;
258
259 /* Chain of exception handlers for the current method. */
260 struct jcf_handler *handlers;
261
262 /* Last element in handlers chain. */
263 struct jcf_handler *last_handler;
264
265 /* Number of exception handlers for the current method. */
266 int num_handlers;
267
268 /* Number of finalizers we are currently nested within. */
269 int num_finalizers;
270
271 /* If non-NULL, use this for the return value. */
272 tree return_value_decl;
273
274 /* Information about the current switch statemenet. */
275 struct jcf_switch_state *sw_state;
276 };
277
278 static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *));
279 static struct chunk * alloc_chunk PROTO ((struct chunk *, unsigned char *,
280 int, struct obstack *));
281 static unsigned char * append_chunk PROTO ((unsigned char *, int,
282 struct jcf_partial *));
283 static void append_chunk_copy PROTO ((unsigned char *, int,
284 struct jcf_partial *));
285 static struct jcf_block * gen_jcf_label PROTO ((struct jcf_partial *));
286 static void finish_jcf_block PROTO ((struct jcf_partial *));
287 static void define_jcf_label PROTO ((struct jcf_block *,
288 struct jcf_partial *));
289 static struct jcf_block * get_jcf_label_here PROTO ((struct jcf_partial *));
290 static void put_linenumber PROTO ((int, struct jcf_partial *));
291 static void localvar_alloc PROTO ((tree, struct jcf_partial *));
292 static void localvar_free PROTO ((tree, struct jcf_partial *));
293 static int get_access_flags PROTO ((tree));
294 static void write_chunks PROTO ((FILE *, struct chunk *));
295 static int adjust_typed_op PROTO ((tree, int));
296 static void generate_bytecode_conditional PROTO ((tree, struct jcf_block *,
297 struct jcf_block *, int,
298 struct jcf_partial *));
299 static void generate_bytecode_return PROTO ((tree, struct jcf_partial *));
300 static void perform_relocations PROTO ((struct jcf_partial *));
301 static void init_jcf_state PROTO ((struct jcf_partial *, struct obstack *));
302 static void init_jcf_method PROTO ((struct jcf_partial *, tree));
303 static void release_jcf_state PROTO ((struct jcf_partial *));
304 static struct chunk * generate_classfile PROTO ((tree, struct jcf_partial *));
305 static struct jcf_handler *alloc_handler PROTO ((struct jcf_block *,
306 struct jcf_block *,
307 struct jcf_partial *));
308 static void emit_iinc PROTO ((tree, HOST_WIDE_INT, struct jcf_partial *));
309 static void emit_reloc PROTO ((HOST_WIDE_INT, int, struct jcf_block *,
310 struct jcf_partial *));
311 static void push_constant1 PROTO ((HOST_WIDE_INT, struct jcf_partial *));
312 static void push_constant2 PROTO ((HOST_WIDE_INT, struct jcf_partial *));
313 static void push_int_const PROTO ((HOST_WIDE_INT, struct jcf_partial *));
314 static int find_constant_wide PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
315 struct jcf_partial *));
316 static void push_long_const PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
317 struct jcf_partial *));
318 static int find_constant_index PROTO ((tree, struct jcf_partial *));
319 static void push_long_const PROTO ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static void field_op PROTO ((tree, int, struct jcf_partial *));
322 static void maybe_wide PROTO ((int, int, struct jcf_partial *));
323 static void emit_dup PROTO ((int, int, struct jcf_partial *));
324 static void emit_pop PROTO ((int, struct jcf_partial *));
325 static void emit_iinc PROTO ((tree, int, struct jcf_partial *));
326 static void emit_load_or_store PROTO ((tree, int, struct jcf_partial *));
327 static void emit_load PROTO ((tree, struct jcf_partial *));
328 static void emit_store PROTO ((tree, struct jcf_partial *));
329 static void emit_unop PROTO ((enum java_opcode, tree, struct jcf_partial *));
330 static void emit_binop PROTO ((enum java_opcode, tree, struct jcf_partial *));
331 static void emit_reloc PROTO ((HOST_WIDE_INT, int, struct jcf_block *,
332 struct jcf_partial *));
333 static void emit_switch_reloc PROTO ((struct jcf_block *,
334 struct jcf_partial *));
335 static void emit_case_reloc PROTO ((struct jcf_relocation *,
336 struct jcf_partial *));
337 static void emit_if PROTO ((struct jcf_block *, int, int,
338 struct jcf_partial *));
339 static void emit_goto PROTO ((struct jcf_block *, struct jcf_partial *));
340 static void emit_jsr PROTO ((struct jcf_block *, struct jcf_partial *));
341 static void call_cleanups PROTO ((struct jcf_block *, struct jcf_partial *));
342 static char *make_class_file_name PROTO ((tree));
343
344 /* Utility macros for appending (big-endian) data to a buffer.
345 We assume a local variable 'ptr' points into where we want to
346 write next, and we assume enoygh space has been allocated. */
347
348 #ifdef ENABLE_CHECKING
349 int
350 CHECK_PUT(ptr, state, i)
351 void *ptr;
352 struct jcf_partial *state;
353 int i;
354 {
355 if (ptr < state->chunk->data
356 || (char*)ptr + i > state->chunk->data + state->chunk->size)
357 fatal ("internal error - CHECK_PUT failed");
358 return 0;
359 }
360 #else
361 #define CHECK_PUT(PTR, STATE, I) ((void)0)
362 #endif
363
364 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
365 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
366 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
367 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
368
369 /* There are some cases below where CHECK_PUT is guaranteed to fail.
370 Use the following macros in those specific cases. */
371 #define UNSAFE_PUT1(X) (*ptr++ = (X))
372 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
373 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
374 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
375
376 \f
377 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
378 Set the data and size fields to DATA and SIZE, respectively.
379 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
380
381 static struct chunk *
382 alloc_chunk (last, data, size, work)
383 struct chunk *last;
384 unsigned char *data;
385 int size;
386 struct obstack *work;
387 {
388 struct chunk *chunk = (struct chunk *)
389 obstack_alloc (work, sizeof(struct chunk));
390
391 if (data == NULL && size > 0)
392 data = obstack_alloc (work, size);
393
394 chunk->next = NULL;
395 chunk->data = data;
396 chunk->size = size;
397 if (last != NULL)
398 last->next = chunk;
399 return chunk;
400 }
401
402 #ifdef ENABLE_CHECKING
403 int
404 CHECK_OP(struct jcf_partial *state)
405 {
406 if (state->bytecode.ptr > state->bytecode.limit)
407 {
408 fatal("internal error - CHECK_OP failed");
409 }
410 return 0;
411 }
412 #else
413 #define CHECK_OP(STATE) ((void)0)
414 #endif
415
416 static unsigned char *
417 append_chunk (data, size, state)
418 unsigned char *data;
419 int size;
420 struct jcf_partial *state;
421 {
422 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
423 if (state->first == NULL)
424 state->first = state->chunk;
425 return state->chunk->data;
426 }
427
428 static void
429 append_chunk_copy (data, size, state)
430 unsigned char *data;
431 int size;
432 struct jcf_partial *state;
433 {
434 unsigned char *ptr = append_chunk (NULL, size, state);
435 memcpy (ptr, data, size);
436 }
437 \f
438 static struct jcf_block *
439 gen_jcf_label (state)
440 struct jcf_partial *state;
441 {
442 struct jcf_block *block = (struct jcf_block *)
443 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
444 block->next = NULL;
445 block->linenumber = -1;
446 block->pc = UNDEFINED_PC;
447 return block;
448 }
449
450 static void
451 finish_jcf_block (state)
452 struct jcf_partial *state;
453 {
454 struct jcf_block *block = state->last_block;
455 struct jcf_relocation *reloc;
456 int code_length = BUFFER_LENGTH (&state->bytecode);
457 int pc = state->code_length;
458 append_chunk_copy (state->bytecode.data, code_length, state);
459 BUFFER_RESET (&state->bytecode);
460 block->v.chunk = state->chunk;
461
462 /* Calculate code_length to the maximum value it can have. */
463 pc += block->v.chunk->size;
464 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
465 {
466 int kind = reloc->kind;
467 if (kind == SWITCH_ALIGN_RELOC)
468 pc += 3;
469 else if (kind > BLOCK_START_RELOC)
470 pc += 2; /* 2-byte offset may grow to 4-byte offset */
471 else if (kind < -1)
472 pc += 5; /* May need to add a goto_w. */
473 }
474 state->code_length = pc;
475 }
476
477 static void
478 define_jcf_label (label, state)
479 struct jcf_block *label;
480 struct jcf_partial *state;
481 {
482 if (state->last_block != NULL)
483 finish_jcf_block (state);
484 label->pc = state->code_length;
485 if (state->blocks == NULL)
486 state->blocks = label;
487 else
488 state->last_block->next = label;
489 state->last_block = label;
490 label->next = NULL;
491 label->u.relocations = NULL;
492 }
493
494 static struct jcf_block *
495 get_jcf_label_here (state)
496 struct jcf_partial *state;
497 {
498 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
499 return state->last_block;
500 else
501 {
502 struct jcf_block *label = gen_jcf_label (state);
503 define_jcf_label (label, state);
504 return label;
505 }
506 }
507
508 /* Note a line number entry for the current PC and given LINE. */
509
510 static void
511 put_linenumber (line, state)
512 int line;
513 struct jcf_partial *state;
514 {
515 struct jcf_block *label = get_jcf_label_here (state);
516 if (label->linenumber > 0)
517 {
518 label = gen_jcf_label (state);
519 define_jcf_label (label, state);
520 }
521 label->linenumber = line;
522 state->linenumber_count++;
523 }
524
525 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
526 in the range (START_LABEL, END_LABEL). */
527
528 static struct jcf_handler *
529 alloc_handler (start_label, end_label, state)
530 struct jcf_block *start_label;
531 struct jcf_block *end_label;
532 struct jcf_partial *state;
533 {
534 struct jcf_handler *handler = (struct jcf_handler *)
535 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
536 handler->start_label = start_label;
537 handler->end_label = end_label;
538 handler->handler_label = get_jcf_label_here (state);
539 if (state->handlers == NULL)
540 state->handlers = handler;
541 else
542 state->last_handler->next = handler;
543 state->last_handler = handler;
544 handler->next = NULL;
545 state->num_handlers++;
546 return handler;
547 }
548
549 \f
550 /* The index of jvm local variable allocated for this DECL.
551 This is assigned when generating .class files;
552 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
553 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
554
555 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
556
557 struct localvar_info
558 {
559 struct localvar_info *next;
560
561 tree decl;
562 struct jcf_block *start_label;
563 struct jcf_block *end_label;
564 };
565
566 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
567 #define localvar_max \
568 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
569
570 static void
571 localvar_alloc (decl, state)
572 tree decl;
573 struct jcf_partial *state;
574 {
575 struct jcf_block *start_label = get_jcf_label_here (state);
576 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
577 int index;
578 register struct localvar_info *info;
579 register struct localvar_info **ptr = localvar_buffer;
580 register struct localvar_info **limit
581 = (struct localvar_info**) state->localvars.ptr;
582 for (index = 0; ptr < limit; index++, ptr++)
583 {
584 if (ptr[0] == NULL
585 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
586 break;
587 }
588 if (ptr == limit)
589 {
590 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
591 ptr = (struct localvar_info**) state->localvars.data + index;
592 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
593 }
594 info = (struct localvar_info *)
595 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
596 ptr[0] = info;
597 if (wide)
598 ptr[1] = (struct localvar_info *)(~0);
599 DECL_LOCAL_INDEX (decl) = index;
600 info->decl = decl;
601 info->start_label = start_label;
602
603 if (debug_info_level > DINFO_LEVEL_TERSE
604 && DECL_NAME (decl) != NULL_TREE)
605 {
606 /* Generate debugging info. */
607 info->next = NULL;
608 if (state->last_lvar != NULL)
609 state->last_lvar->next = info;
610 else
611 state->first_lvar = info;
612 state->last_lvar = info;
613 state->lvar_count++;
614 }
615 }
616
617 static void
618 localvar_free (decl, state)
619 tree decl;
620 struct jcf_partial *state;
621 {
622 struct jcf_block *end_label = get_jcf_label_here (state);
623 int index = DECL_LOCAL_INDEX (decl);
624 register struct localvar_info **ptr = &localvar_buffer [index];
625 register struct localvar_info *info = *ptr;
626 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
627
628 info->end_label = end_label;
629
630 if (info->decl != decl)
631 abort ();
632 ptr[0] = NULL;
633 if (wide)
634 {
635 if (ptr[1] != (struct localvar_info *)(~0))
636 abort ();
637 ptr[1] = NULL;
638 }
639 }
640
641 \f
642 #define STACK_TARGET 1
643 #define IGNORE_TARGET 2
644
645 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
646 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
647
648 static int
649 get_access_flags (decl)
650 tree decl;
651 {
652 int flags = 0;
653 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
654 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
655 flags |= ACC_PUBLIC;
656 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
657 flags |= ACC_FINAL;
658 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
659 {
660 if (TREE_PROTECTED (decl))
661 flags |= ACC_PROTECTED;
662 if (TREE_PRIVATE (decl))
663 flags |= ACC_PRIVATE;
664 }
665 else if (TREE_CODE (decl) == TYPE_DECL)
666 {
667 if (CLASS_SUPER (decl))
668 flags |= ACC_SUPER;
669 if (CLASS_ABSTRACT (decl))
670 flags |= ACC_ABSTRACT;
671 if (CLASS_INTERFACE (decl))
672 flags |= ACC_INTERFACE;
673 }
674 else
675 fatal ("internal error - bad argument to get_access_flags");
676 if (TREE_CODE (decl) == FUNCTION_DECL)
677 {
678 if (METHOD_NATIVE (decl))
679 flags |= ACC_NATIVE;
680 if (METHOD_STATIC (decl))
681 flags |= ACC_STATIC;
682 if (METHOD_SYNCHRONIZED (decl))
683 flags |= ACC_SYNCHRONIZED;
684 if (METHOD_ABSTRACT (decl))
685 flags |= ACC_ABSTRACT;
686 }
687 if (isfield)
688 {
689 if (FIELD_STATIC (decl))
690 flags |= ACC_STATIC;
691 if (FIELD_VOLATILE (decl))
692 flags |= ACC_VOLATILE;
693 if (FIELD_TRANSIENT (decl))
694 flags |= ACC_TRANSIENT;
695 }
696 return flags;
697 }
698
699 /* Write the list of segments starting at CHUNKS to STREAM. */
700
701 static void
702 write_chunks (stream, chunks)
703 FILE* stream;
704 struct chunk *chunks;
705 {
706 for (; chunks != NULL; chunks = chunks->next)
707 fwrite (chunks->data, chunks->size, 1, stream);
708 }
709
710 /* Push a 1-word constant in the constant pool at the given INDEX.
711 (Caller is responsible for doing NOTE_PUSH.) */
712
713 static void
714 push_constant1 (index, state)
715 HOST_WIDE_INT index;
716 struct jcf_partial *state;
717 {
718 RESERVE (3);
719 if (index < 256)
720 {
721 OP1 (OPCODE_ldc);
722 OP1 (index);
723 }
724 else
725 {
726 OP1 (OPCODE_ldc_w);
727 OP2 (index);
728 }
729 }
730
731 /* Push a 2-word constant in the constant pool at the given INDEX.
732 (Caller is responsible for doing NOTE_PUSH.) */
733
734 static void
735 push_constant2 (index, state)
736 HOST_WIDE_INT index;
737 struct jcf_partial *state;
738 {
739 RESERVE (3);
740 OP1 (OPCODE_ldc2_w);
741 OP2 (index);
742 }
743
744 /* Push 32-bit integer constant on VM stack.
745 Caller is responsible for doing NOTE_PUSH. */
746
747 static void
748 push_int_const (i, state)
749 HOST_WIDE_INT i;
750 struct jcf_partial *state;
751 {
752 RESERVE(3);
753 if (i >= -1 && i <= 5)
754 OP1(OPCODE_iconst_0 + i);
755 else if (i >= -128 && i < 128)
756 {
757 OP1(OPCODE_bipush);
758 OP1(i);
759 }
760 else if (i >= -32768 && i < 32768)
761 {
762 OP1(OPCODE_sipush);
763 OP2(i);
764 }
765 else
766 {
767 i = find_constant1 (&state->cpool, CONSTANT_Integer,
768 (jword)(i & 0xFFFFFFFF));
769 push_constant1 (i, state);
770 }
771 }
772
773 static int
774 find_constant_wide (lo, hi, state)
775 HOST_WIDE_INT lo, hi;
776 struct jcf_partial *state;
777 {
778 HOST_WIDE_INT w1, w2;
779 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
780 return find_constant2 (&state->cpool, CONSTANT_Long,
781 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
782 }
783
784 /* Find or allocate a constant pool entry for the given VALUE.
785 Return the index in the constant pool. */
786
787 static int
788 find_constant_index (value, state)
789 tree value;
790 struct jcf_partial *state;
791 {
792 if (TREE_CODE (value) == INTEGER_CST)
793 {
794 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
795 return find_constant1 (&state->cpool, CONSTANT_Integer,
796 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
797 else
798 return find_constant_wide (TREE_INT_CST_LOW (value),
799 TREE_INT_CST_HIGH (value), state);
800 }
801 else if (TREE_CODE (value) == REAL_CST)
802 {
803 long words[2];
804 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
805 {
806 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
807 return find_constant1 (&state->cpool, CONSTANT_Float,
808 (jword)words[0]);
809 }
810 else
811 {
812 etardouble (TREE_REAL_CST (value), words);
813 return find_constant2 (&state->cpool, CONSTANT_Double,
814 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
815 0xFFFFFFFF),
816 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
817 0xFFFFFFFF));
818 }
819 }
820 else if (TREE_CODE (value) == STRING_CST)
821 {
822 return find_string_constant (&state->cpool, value);
823 }
824 else
825 fatal ("find_constant_index - bad type");
826 }
827
828 /* Push 64-bit long constant on VM stack.
829 Caller is responsible for doing NOTE_PUSH. */
830
831 static void
832 push_long_const (lo, hi, state)
833 HOST_WIDE_INT lo, hi;
834 struct jcf_partial *state;
835 {
836 if (hi == 0 && lo >= 0 && lo <= 1)
837 {
838 RESERVE(1);
839 OP1(OPCODE_lconst_0 + lo);
840 }
841 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
842 {
843 push_int_const (lo, state);
844 RESERVE (1);
845 OP1 (OPCODE_i2l);
846 }
847 else
848 push_constant2 (find_constant_wide (lo, hi, state), state);
849 }
850
851 static void
852 field_op (field, opcode, state)
853 tree field;
854 int opcode;
855 struct jcf_partial *state;
856 {
857 int index = find_fieldref_index (&state->cpool, field);
858 RESERVE (3);
859 OP1 (opcode);
860 OP2 (index);
861 }
862
863 /* Returns an integer in the range 0 (for 'int') through 4 (for object
864 reference) to 7 (for 'short') which matches the pattern of how JVM
865 opcodes typically depend on the operand type. */
866
867 static int
868 adjust_typed_op (type, max)
869 tree type;
870 int max;
871 {
872 switch (TREE_CODE (type))
873 {
874 case POINTER_TYPE:
875 case RECORD_TYPE: return 4;
876 case BOOLEAN_TYPE:
877 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
878 case CHAR_TYPE:
879 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
880 case INTEGER_TYPE:
881 switch (TYPE_PRECISION (type))
882 {
883 case 8: return max < 5 ? 0 : 5;
884 case 16: return max < 7 ? 0 : 7;
885 case 32: return 0;
886 case 64: return 1;
887 }
888 break;
889 case REAL_TYPE:
890 switch (TYPE_PRECISION (type))
891 {
892 case 32: return 2;
893 case 64: return 3;
894 }
895 break;
896 default:
897 break;
898 }
899 abort ();
900 }
901
902 static void
903 maybe_wide (opcode, index, state)
904 int opcode, index;
905 struct jcf_partial *state;
906 {
907 if (index >= 256)
908 {
909 RESERVE (4);
910 OP1 (OPCODE_wide);
911 OP1 (opcode);
912 OP2 (index);
913 }
914 else
915 {
916 RESERVE (2);
917 OP1 (opcode);
918 OP1 (index);
919 }
920 }
921
922 /* Compile code to duplicate with offset, where
923 SIZE is the size of the stack item to duplicate (1 or 2), abd
924 OFFSET is where to insert the result (must be 0, 1, or 2).
925 (The new words get inserted at stack[SP-size-offset].) */
926
927 static void
928 emit_dup (size, offset, state)
929 int size, offset;
930 struct jcf_partial *state;
931 {
932 int kind;
933 if (size == 0)
934 return;
935 RESERVE(1);
936 if (offset == 0)
937 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
938 else if (offset == 1)
939 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
940 else if (offset == 2)
941 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
942 else
943 abort();
944 OP1 (kind);
945 NOTE_PUSH (size);
946 }
947
948 static void
949 emit_pop (size, state)
950 int size;
951 struct jcf_partial *state;
952 {
953 RESERVE (1);
954 OP1 (OPCODE_pop - 1 + size);
955 }
956
957 static void
958 emit_iinc (var, value, state)
959 tree var;
960 HOST_WIDE_INT value;
961 struct jcf_partial *state;
962 {
963 int slot = DECL_LOCAL_INDEX (var);
964
965 if (value < -128 || value > 127 || slot >= 256)
966 {
967 RESERVE (6);
968 OP1 (OPCODE_wide);
969 OP1 (OPCODE_iinc);
970 OP2 (slot);
971 OP2 (value);
972 }
973 else
974 {
975 RESERVE (3);
976 OP1 (OPCODE_iinc);
977 OP1 (slot);
978 OP1 (value);
979 }
980 }
981
982 static void
983 emit_load_or_store (var, opcode, state)
984 tree var; /* Variable to load from or store into. */
985 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
986 struct jcf_partial *state;
987 {
988 tree type = TREE_TYPE (var);
989 int kind = adjust_typed_op (type, 4);
990 int index = DECL_LOCAL_INDEX (var);
991 if (index <= 3)
992 {
993 RESERVE (1);
994 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
995 }
996 else
997 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
998 }
999
1000 static void
1001 emit_load (var, state)
1002 tree var;
1003 struct jcf_partial *state;
1004 {
1005 emit_load_or_store (var, OPCODE_iload, state);
1006 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1007 }
1008
1009 static void
1010 emit_store (var, state)
1011 tree var;
1012 struct jcf_partial *state;
1013 {
1014 emit_load_or_store (var, OPCODE_istore, state);
1015 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1016 }
1017
1018 static void
1019 emit_unop (opcode, type, state)
1020 enum java_opcode opcode;
1021 tree type ATTRIBUTE_UNUSED;
1022 struct jcf_partial *state;
1023 {
1024 RESERVE(1);
1025 OP1 (opcode);
1026 }
1027
1028 static void
1029 emit_binop (opcode, type, state)
1030 enum java_opcode opcode;
1031 tree type;
1032 struct jcf_partial *state;
1033 {
1034 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1035 RESERVE(1);
1036 OP1 (opcode);
1037 NOTE_POP (size);
1038 }
1039
1040 static void
1041 emit_reloc (value, kind, target, state)
1042 HOST_WIDE_INT value;
1043 int kind;
1044 struct jcf_block *target;
1045 struct jcf_partial *state;
1046 {
1047 struct jcf_relocation *reloc = (struct jcf_relocation *)
1048 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1049 struct jcf_block *block = state->last_block;
1050 reloc->next = block->u.relocations;
1051 block->u.relocations = reloc;
1052 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1053 reloc->label = target;
1054 reloc->kind = kind;
1055 if (kind == 0 || kind == BLOCK_START_RELOC)
1056 OP4 (value);
1057 else if (kind != SWITCH_ALIGN_RELOC)
1058 OP2 (value);
1059 }
1060
1061 static void
1062 emit_switch_reloc (label, state)
1063 struct jcf_block *label;
1064 struct jcf_partial *state;
1065 {
1066 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1067 }
1068
1069 /* Similar to emit_switch_reloc,
1070 but re-uses an existing case reloc. */
1071
1072 static void
1073 emit_case_reloc (reloc, state)
1074 struct jcf_relocation *reloc;
1075 struct jcf_partial *state;
1076 {
1077 struct jcf_block *block = state->last_block;
1078 reloc->next = block->u.relocations;
1079 block->u.relocations = reloc;
1080 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1081 reloc->kind = BLOCK_START_RELOC;
1082 OP4 (0);
1083 }
1084
1085 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1086 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1087
1088 static void
1089 emit_if (target, opcode, inv_opcode, state)
1090 struct jcf_block *target;
1091 int opcode, inv_opcode;
1092 struct jcf_partial *state;
1093 {
1094 OP1 (opcode);
1095 /* value is 1 byte from reloc back to start of instruction. */
1096 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1097 }
1098
1099 static void
1100 emit_goto (target, state)
1101 struct jcf_block *target;
1102 struct jcf_partial *state;
1103 {
1104 OP1 (OPCODE_goto);
1105 /* Value is 1 byte from reloc back to start of instruction. */
1106 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1107 }
1108
1109 static void
1110 emit_jsr (target, state)
1111 struct jcf_block *target;
1112 struct jcf_partial *state;
1113 {
1114 OP1 (OPCODE_jsr);
1115 /* Value is 1 byte from reloc back to start of instruction. */
1116 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1117 }
1118
1119 /* Generate code to evaluate EXP. If the result is true,
1120 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1121 TRUE_BRANCH_FIRST is a code geneation hint that the
1122 TRUE_LABEL may follow right after this. (The idea is that we
1123 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1124
1125 static void
1126 generate_bytecode_conditional (exp, true_label, false_label,
1127 true_branch_first, state)
1128 tree exp;
1129 struct jcf_block *true_label;
1130 struct jcf_block *false_label;
1131 int true_branch_first;
1132 struct jcf_partial *state;
1133 {
1134 tree exp0, exp1, type;
1135 int save_SP = state->code_SP;
1136 enum java_opcode op, negop;
1137 switch (TREE_CODE (exp))
1138 {
1139 case INTEGER_CST:
1140 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1141 break;
1142 case COND_EXPR:
1143 {
1144 struct jcf_block *then_label = gen_jcf_label (state);
1145 struct jcf_block *else_label = gen_jcf_label (state);
1146 int save_SP_before, save_SP_after;
1147 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1148 then_label, else_label, 1, state);
1149 define_jcf_label (then_label, state);
1150 save_SP_before = state->code_SP;
1151 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1152 true_label, false_label, 1, state);
1153 save_SP_after = state->code_SP;
1154 state->code_SP = save_SP_before;
1155 define_jcf_label (else_label, state);
1156 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1157 true_label, false_label,
1158 true_branch_first, state);
1159 if (state->code_SP != save_SP_after)
1160 fatal ("internal error non-matching SP");
1161 }
1162 break;
1163 case TRUTH_NOT_EXPR:
1164 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1165 ! true_branch_first, state);
1166 break;
1167 case TRUTH_ANDIF_EXPR:
1168 {
1169 struct jcf_block *next_label = gen_jcf_label (state);
1170 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1171 next_label, false_label, 1, state);
1172 define_jcf_label (next_label, state);
1173 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1174 true_label, false_label, 1, state);
1175 }
1176 break;
1177 case TRUTH_ORIF_EXPR:
1178 {
1179 struct jcf_block *next_label = gen_jcf_label (state);
1180 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1181 true_label, next_label, 1, state);
1182 define_jcf_label (next_label, state);
1183 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1184 true_label, false_label, 1, state);
1185 }
1186 break;
1187 compare_1:
1188 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1189 set it to the corresponding 1-operand if<COND> instructions. */
1190 op = op - 6;
1191 /* FALLTHROUGH */
1192 compare_2:
1193 /* The opcodes with their inverses are allocated in pairs.
1194 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1195 negop = (op & 1) ? op + 1 : op - 1;
1196 compare_2_ptr:
1197 if (true_branch_first)
1198 {
1199 emit_if (false_label, negop, op, state);
1200 emit_goto (true_label, state);
1201 }
1202 else
1203 {
1204 emit_if (true_label, op, negop, state);
1205 emit_goto (false_label, state);
1206 }
1207 break;
1208 case EQ_EXPR:
1209 op = OPCODE_if_icmpeq;
1210 goto compare;
1211 case NE_EXPR:
1212 op = OPCODE_if_icmpne;
1213 goto compare;
1214 case GT_EXPR:
1215 op = OPCODE_if_icmpgt;
1216 goto compare;
1217 case LT_EXPR:
1218 op = OPCODE_if_icmplt;
1219 goto compare;
1220 case GE_EXPR:
1221 op = OPCODE_if_icmpge;
1222 goto compare;
1223 case LE_EXPR:
1224 op = OPCODE_if_icmple;
1225 goto compare;
1226 compare:
1227 exp0 = TREE_OPERAND (exp, 0);
1228 exp1 = TREE_OPERAND (exp, 1);
1229 type = TREE_TYPE (exp0);
1230 switch (TREE_CODE (type))
1231 {
1232 int opf;
1233 case POINTER_TYPE: case RECORD_TYPE:
1234 switch (TREE_CODE (exp))
1235 {
1236 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1237 case NE_EXPR: op = OPCODE_if_acmpne; break;
1238 default: abort();
1239 }
1240 if (integer_zerop (exp1) || integer_zerop (exp0))
1241 {
1242 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1243 STACK_TARGET, state);
1244 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1245 negop = (op & 1) ? op - 1 : op + 1;
1246 NOTE_POP (1);
1247 goto compare_2_ptr;
1248 }
1249 generate_bytecode_insns (exp0, STACK_TARGET, state);
1250 generate_bytecode_insns (exp1, STACK_TARGET, state);
1251 NOTE_POP (2);
1252 goto compare_2;
1253 case REAL_TYPE:
1254 generate_bytecode_insns (exp0, STACK_TARGET, state);
1255 generate_bytecode_insns (exp1, STACK_TARGET, state);
1256 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1257 opf = OPCODE_fcmpg;
1258 else
1259 opf = OPCODE_fcmpl;
1260 if (TYPE_PRECISION (type) > 32)
1261 {
1262 opf += 2;
1263 NOTE_POP (4);
1264 }
1265 else
1266 NOTE_POP (2);
1267 RESERVE (1);
1268 OP1 (opf);
1269 goto compare_1;
1270 case INTEGER_TYPE:
1271 if (TYPE_PRECISION (type) > 32)
1272 {
1273 generate_bytecode_insns (exp0, STACK_TARGET, state);
1274 generate_bytecode_insns (exp1, STACK_TARGET, state);
1275 NOTE_POP (4);
1276 RESERVE (1);
1277 OP1 (OPCODE_lcmp);
1278 goto compare_1;
1279 }
1280 /* FALLTHOUGH */
1281 default:
1282 if (integer_zerop (exp1))
1283 {
1284 generate_bytecode_insns (exp0, STACK_TARGET, state);
1285 NOTE_POP (1);
1286 goto compare_1;
1287 }
1288 if (integer_zerop (exp0))
1289 {
1290 switch (op)
1291 {
1292 case OPCODE_if_icmplt:
1293 case OPCODE_if_icmpge:
1294 op += 2;
1295 break;
1296 case OPCODE_if_icmpgt:
1297 case OPCODE_if_icmple:
1298 op -= 2;
1299 break;
1300 default:
1301 break;
1302 }
1303 generate_bytecode_insns (exp1, STACK_TARGET, state);
1304 NOTE_POP (1);
1305 goto compare_1;
1306 }
1307 generate_bytecode_insns (exp0, STACK_TARGET, state);
1308 generate_bytecode_insns (exp1, STACK_TARGET, state);
1309 NOTE_POP (2);
1310 goto compare_2;
1311 }
1312
1313 default:
1314 generate_bytecode_insns (exp, STACK_TARGET, state);
1315 NOTE_POP (1);
1316 if (true_branch_first)
1317 {
1318 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1319 emit_goto (true_label, state);
1320 }
1321 else
1322 {
1323 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1324 emit_goto (false_label, state);
1325 }
1326 break;
1327 }
1328 if (save_SP != state->code_SP)
1329 fatal ("internal error - SP mismatch");
1330 }
1331
1332 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1333 but only as far out as LIMIT (since we are about to jump to the
1334 emit label that is LIMIT). */
1335
1336 static void
1337 call_cleanups (limit, state)
1338 struct jcf_block *limit;
1339 struct jcf_partial *state;
1340 {
1341 struct jcf_block *block = state->labeled_blocks;
1342 for (; block != limit; block = block->next)
1343 {
1344 if (block->pc == PENDING_CLEANUP_PC)
1345 emit_jsr (block, state);
1346 }
1347 }
1348
1349 static void
1350 generate_bytecode_return (exp, state)
1351 tree exp;
1352 struct jcf_partial *state;
1353 {
1354 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1355 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1356 int op;
1357 again:
1358 if (exp != NULL)
1359 {
1360 switch (TREE_CODE (exp))
1361 {
1362 case COMPOUND_EXPR:
1363 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1364 state);
1365 exp = TREE_OPERAND (exp, 1);
1366 goto again;
1367 case COND_EXPR:
1368 {
1369 struct jcf_block *then_label = gen_jcf_label (state);
1370 struct jcf_block *else_label = gen_jcf_label (state);
1371 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1372 then_label, else_label, 1, state);
1373 define_jcf_label (then_label, state);
1374 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1375 define_jcf_label (else_label, state);
1376 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1377 }
1378 return;
1379 default:
1380 generate_bytecode_insns (exp,
1381 returns_void ? IGNORE_TARGET
1382 : STACK_TARGET, state);
1383 }
1384 }
1385 if (returns_void)
1386 {
1387 op = OPCODE_return;
1388 call_cleanups (NULL_PTR, state);
1389 }
1390 else
1391 {
1392 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1393 if (state->num_finalizers > 0)
1394 {
1395 if (state->return_value_decl == NULL_TREE)
1396 {
1397 state->return_value_decl
1398 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1399 localvar_alloc (state->return_value_decl, state);
1400 }
1401 emit_store (state->return_value_decl, state);
1402 call_cleanups (NULL_PTR, state);
1403 emit_load (state->return_value_decl, state);
1404 /* If we call localvar_free (state->return_value_decl, state),
1405 then we risk the save decl erroneously re-used in the
1406 finalizer. Instead, we keep the state->return_value_decl
1407 allocated through the rest of the method. This is not
1408 the greatest solution, but it is at least simple and safe. */
1409 }
1410 }
1411 RESERVE (1);
1412 OP1 (op);
1413 }
1414
1415 /* Generate bytecode for sub-expression EXP of METHOD.
1416 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1417
1418 static void
1419 generate_bytecode_insns (exp, target, state)
1420 tree exp;
1421 int target;
1422 struct jcf_partial *state;
1423 {
1424 tree type;
1425 enum java_opcode jopcode;
1426 int op;
1427 HOST_WIDE_INT value;
1428 int post_op;
1429 int size;
1430 int offset;
1431
1432 if (exp == NULL && target == IGNORE_TARGET)
1433 return;
1434
1435 type = TREE_TYPE (exp);
1436
1437 switch (TREE_CODE (exp))
1438 {
1439 case BLOCK:
1440 if (BLOCK_EXPR_BODY (exp))
1441 {
1442 tree local;
1443 tree body = BLOCK_EXPR_BODY (exp);
1444 for (local = BLOCK_EXPR_DECLS (exp); local; )
1445 {
1446 tree next = TREE_CHAIN (local);
1447 localvar_alloc (local, state);
1448 local = next;
1449 }
1450 /* Avoid deep recursion for long blocks. */
1451 while (TREE_CODE (body) == COMPOUND_EXPR)
1452 {
1453 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1454 body = TREE_OPERAND (body, 1);
1455 }
1456 generate_bytecode_insns (body, target, state);
1457 for (local = BLOCK_EXPR_DECLS (exp); local; )
1458 {
1459 tree next = TREE_CHAIN (local);
1460 localvar_free (local, state);
1461 local = next;
1462 }
1463 }
1464 break;
1465 case COMPOUND_EXPR:
1466 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1467 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1468 break;
1469 case EXPR_WITH_FILE_LOCATION:
1470 {
1471 char *saved_input_filename = input_filename;
1472 tree body = EXPR_WFL_NODE (exp);
1473 int saved_lineno = lineno;
1474 if (body == empty_stmt_node)
1475 break;
1476 input_filename = EXPR_WFL_FILENAME (exp);
1477 lineno = EXPR_WFL_LINENO (exp);
1478 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1479 && debug_info_level > DINFO_LEVEL_NONE)
1480 put_linenumber (lineno, state);
1481 generate_bytecode_insns (body, target, state);
1482 input_filename = saved_input_filename;
1483 lineno = saved_lineno;
1484 }
1485 break;
1486 case INTEGER_CST:
1487 if (target == IGNORE_TARGET) ; /* do nothing */
1488 else if (TREE_CODE (type) == POINTER_TYPE)
1489 {
1490 if (! integer_zerop (exp))
1491 abort();
1492 RESERVE(1);
1493 OP1 (OPCODE_aconst_null);
1494 NOTE_PUSH (1);
1495 }
1496 else if (TYPE_PRECISION (type) <= 32)
1497 {
1498 push_int_const (TREE_INT_CST_LOW (exp), state);
1499 NOTE_PUSH (1);
1500 }
1501 else
1502 {
1503 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1504 state);
1505 NOTE_PUSH (2);
1506 }
1507 break;
1508 case REAL_CST:
1509 {
1510 int prec = TYPE_PRECISION (type) >> 5;
1511 RESERVE(1);
1512 if (real_zerop (exp))
1513 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1514 else if (real_onep (exp))
1515 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1516 /* FIXME Should also use fconst_2 for 2.0f.
1517 Also, should use iconst_2/ldc followed by i2f/i2d
1518 for other float/double when the value is a small integer. */
1519 else
1520 {
1521 offset = find_constant_index (exp, state);
1522 if (prec == 1)
1523 push_constant1 (offset, state);
1524 else
1525 push_constant2 (offset, state);
1526 }
1527 NOTE_PUSH (prec);
1528 }
1529 break;
1530 case STRING_CST:
1531 push_constant1 (find_string_constant (&state->cpool, exp), state);
1532 NOTE_PUSH (1);
1533 break;
1534 case VAR_DECL:
1535 if (TREE_STATIC (exp))
1536 {
1537 field_op (exp, OPCODE_getstatic, state);
1538 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1539 break;
1540 }
1541 /* ... fall through ... */
1542 case PARM_DECL:
1543 emit_load (exp, state);
1544 break;
1545 case NON_LVALUE_EXPR:
1546 case INDIRECT_REF:
1547 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1548 break;
1549 case ARRAY_REF:
1550 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1551 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1552 if (target != IGNORE_TARGET)
1553 {
1554 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1555 RESERVE(1);
1556 OP1 (jopcode);
1557 if (! TYPE_IS_WIDE (type))
1558 NOTE_POP (1);
1559 }
1560 break;
1561 case COMPONENT_REF:
1562 {
1563 tree obj = TREE_OPERAND (exp, 0);
1564 tree field = TREE_OPERAND (exp, 1);
1565 int is_static = FIELD_STATIC (field);
1566 generate_bytecode_insns (obj,
1567 is_static ? IGNORE_TARGET : target, state);
1568 if (target != IGNORE_TARGET)
1569 {
1570 if (DECL_NAME (field) == length_identifier_node && !is_static
1571 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1572 {
1573 RESERVE (1);
1574 OP1 (OPCODE_arraylength);
1575 }
1576 else
1577 {
1578 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1579 state);
1580 if (! is_static)
1581 NOTE_POP (1);
1582 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1583 }
1584 }
1585 }
1586 break;
1587 case TRUTH_ANDIF_EXPR:
1588 case TRUTH_ORIF_EXPR:
1589 case EQ_EXPR:
1590 case NE_EXPR:
1591 case GT_EXPR:
1592 case LT_EXPR:
1593 case GE_EXPR:
1594 case LE_EXPR:
1595 {
1596 struct jcf_block *then_label = gen_jcf_label (state);
1597 struct jcf_block *else_label = gen_jcf_label (state);
1598 struct jcf_block *end_label = gen_jcf_label (state);
1599 generate_bytecode_conditional (exp,
1600 then_label, else_label, 1, state);
1601 define_jcf_label (then_label, state);
1602 push_int_const (1, state);
1603 emit_goto (end_label, state);
1604 define_jcf_label (else_label, state);
1605 push_int_const (0, state);
1606 define_jcf_label (end_label, state);
1607 NOTE_PUSH (1);
1608 }
1609 break;
1610 case COND_EXPR:
1611 {
1612 struct jcf_block *then_label = gen_jcf_label (state);
1613 struct jcf_block *else_label = gen_jcf_label (state);
1614 struct jcf_block *end_label = gen_jcf_label (state);
1615 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1616 then_label, else_label, 1, state);
1617 define_jcf_label (then_label, state);
1618 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1619 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1620 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1621 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1622 emit_goto (end_label, state);
1623 define_jcf_label (else_label, state);
1624 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1625 define_jcf_label (end_label, state);
1626 }
1627 break;
1628 case CASE_EXPR:
1629 {
1630 struct jcf_switch_state *sw_state = state->sw_state;
1631 struct jcf_relocation *reloc = (struct jcf_relocation *)
1632 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1633 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1634 reloc->kind = 0;
1635 reloc->label = get_jcf_label_here (state);
1636 reloc->offset = case_value;
1637 reloc->next = sw_state->cases;
1638 sw_state->cases = reloc;
1639 if (sw_state->num_cases == 0)
1640 {
1641 sw_state->min_case = case_value;
1642 sw_state->max_case = case_value;
1643 }
1644 else
1645 {
1646 if (case_value < sw_state->min_case)
1647 sw_state->min_case = case_value;
1648 if (case_value > sw_state->max_case)
1649 sw_state->max_case = case_value;
1650 }
1651 sw_state->num_cases++;
1652 }
1653 break;
1654 case DEFAULT_EXPR:
1655 state->sw_state->default_label = get_jcf_label_here (state);
1656 break;
1657
1658 case SWITCH_EXPR:
1659 {
1660 /* The SWITCH_EXPR has three parts, generated in the following order:
1661 1. the switch_expression (the value used to select the correct case);
1662 2. the switch_body;
1663 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1664 After code generation, we will re-order then in the order 1, 3, 2.
1665 This is to avoid an extra GOTOs. */
1666 struct jcf_switch_state sw_state;
1667 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1668 struct jcf_block *body_last; /* Last block of the switch_body. */
1669 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1670 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1671 struct jcf_block *body_block;
1672 int switch_length;
1673 sw_state.prev = state->sw_state;
1674 state->sw_state = &sw_state;
1675 sw_state.cases = NULL;
1676 sw_state.num_cases = 0;
1677 sw_state.default_label = NULL;
1678 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1679 expression_last = state->last_block;
1680 body_block = get_jcf_label_here (state); /* Force a new block here. */
1681 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1682 body_last = state->last_block;
1683
1684 switch_instruction = gen_jcf_label (state);
1685 define_jcf_label (switch_instruction, state);
1686 if (sw_state.default_label == NULL)
1687 sw_state.default_label = gen_jcf_label (state);
1688
1689 if (sw_state.num_cases <= 1)
1690 {
1691 if (sw_state.num_cases == 0)
1692 {
1693 emit_pop (1, state);
1694 NOTE_POP (1);
1695 }
1696 else
1697 {
1698 push_int_const (sw_state.cases->offset, state);
1699 emit_if (sw_state.cases->label,
1700 OPCODE_ifeq, OPCODE_ifne, state);
1701 }
1702 emit_goto (sw_state.default_label, state);
1703 }
1704 else
1705 {
1706 HOST_WIDE_INT i;
1707 /* Copy the chain of relocs into a sorted array. */
1708 struct jcf_relocation **relocs = (struct jcf_relocation **)
1709 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1710 /* The relocs arrays is a buffer with a gap.
1711 The assumption is that cases will normally come in "runs". */
1712 int gap_start = 0;
1713 int gap_end = sw_state.num_cases;
1714 struct jcf_relocation *reloc;
1715 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1716 {
1717 HOST_WIDE_INT case_value = reloc->offset;
1718 while (gap_end < sw_state.num_cases)
1719 {
1720 struct jcf_relocation *end = relocs[gap_end];
1721 if (case_value <= end->offset)
1722 break;
1723 relocs[gap_start++] = end;
1724 gap_end++;
1725 }
1726 while (gap_start > 0)
1727 {
1728 struct jcf_relocation *before = relocs[gap_start-1];
1729 if (case_value >= before->offset)
1730 break;
1731 relocs[--gap_end] = before;
1732 gap_start--;
1733 }
1734 relocs[gap_start++] = reloc;
1735 /* Note we don't check for duplicates. FIXME! */
1736 }
1737
1738 if (2 * sw_state.num_cases
1739 >= sw_state.max_case - sw_state.min_case)
1740 { /* Use tableswitch. */
1741 int index = 0;
1742 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1743 OP1 (OPCODE_tableswitch);
1744 emit_reloc (RELOCATION_VALUE_0,
1745 SWITCH_ALIGN_RELOC, NULL, state);
1746 emit_switch_reloc (sw_state.default_label, state);
1747 OP4 (sw_state.min_case);
1748 OP4 (sw_state.max_case);
1749 for (i = sw_state.min_case; ; )
1750 {
1751 reloc = relocs[index];
1752 if (i == reloc->offset)
1753 {
1754 emit_case_reloc (reloc, state);
1755 if (i == sw_state.max_case)
1756 break;
1757 index++;
1758 }
1759 else
1760 emit_switch_reloc (sw_state.default_label, state);
1761 i++;
1762 }
1763 }
1764 else
1765 { /* Use lookupswitch. */
1766 RESERVE(9 + 8 * sw_state.num_cases);
1767 OP1 (OPCODE_lookupswitch);
1768 emit_reloc (RELOCATION_VALUE_0,
1769 SWITCH_ALIGN_RELOC, NULL, state);
1770 emit_switch_reloc (sw_state.default_label, state);
1771 OP4 (sw_state.num_cases);
1772 for (i = 0; i < sw_state.num_cases; i++)
1773 {
1774 struct jcf_relocation *reloc = relocs[i];
1775 OP4 (reloc->offset);
1776 emit_case_reloc (reloc, state);
1777 }
1778 }
1779 free (relocs);
1780 }
1781
1782 instruction_last = state->last_block;
1783 if (sw_state.default_label->pc < 0)
1784 define_jcf_label (sw_state.default_label, state);
1785 else /* Force a new block. */
1786 sw_state.default_label = get_jcf_label_here (state);
1787 /* Now re-arrange the blocks so the switch_instruction
1788 comes before the switch_body. */
1789 switch_length = state->code_length - switch_instruction->pc;
1790 switch_instruction->pc = body_block->pc;
1791 instruction_last->next = body_block;
1792 instruction_last->v.chunk->next = body_block->v.chunk;
1793 expression_last->next = switch_instruction;
1794 expression_last->v.chunk->next = switch_instruction->v.chunk;
1795 body_last->next = sw_state.default_label;
1796 body_last->v.chunk->next = NULL;
1797 state->chunk = body_last->v.chunk;
1798 for (; body_block != sw_state.default_label; body_block = body_block->next)
1799 body_block->pc += switch_length;
1800
1801 state->sw_state = sw_state.prev;
1802 break;
1803 }
1804
1805 case RETURN_EXPR:
1806 exp = TREE_OPERAND (exp, 0);
1807 if (exp == NULL_TREE)
1808 exp = empty_stmt_node;
1809 else if (TREE_CODE (exp) != MODIFY_EXPR)
1810 abort ();
1811 else
1812 exp = TREE_OPERAND (exp, 1);
1813 generate_bytecode_return (exp, state);
1814 break;
1815 case LABELED_BLOCK_EXPR:
1816 {
1817 struct jcf_block *end_label = gen_jcf_label (state);
1818 end_label->next = state->labeled_blocks;
1819 state->labeled_blocks = end_label;
1820 end_label->pc = PENDING_EXIT_PC;
1821 end_label->u.labeled_block = exp;
1822 if (LABELED_BLOCK_BODY (exp))
1823 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1824 if (state->labeled_blocks != end_label)
1825 abort();
1826 state->labeled_blocks = end_label->next;
1827 define_jcf_label (end_label, state);
1828 }
1829 break;
1830 case LOOP_EXPR:
1831 {
1832 tree body = TREE_OPERAND (exp, 0);
1833 #if 0
1834 if (TREE_CODE (body) == COMPOUND_EXPR
1835 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1836 {
1837 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1838 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1839 struct jcf_block *head_label;
1840 struct jcf_block *body_label;
1841 struct jcf_block *end_label = gen_jcf_label (state);
1842 struct jcf_block *exit_label = state->labeled_blocks;
1843 head_label = gen_jcf_label (state);
1844 emit_goto (head_label, state);
1845 body_label = get_jcf_label_here (state);
1846 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1847 define_jcf_label (head_label, state);
1848 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1849 end_label, body_label, 1, state);
1850 define_jcf_label (end_label, state);
1851 }
1852 else
1853 #endif
1854 {
1855 struct jcf_block *head_label = get_jcf_label_here (state);
1856 generate_bytecode_insns (body, IGNORE_TARGET, state);
1857 emit_goto (head_label, state);
1858 }
1859 }
1860 break;
1861 case EXIT_EXPR:
1862 {
1863 struct jcf_block *label = state->labeled_blocks;
1864 struct jcf_block *end_label = gen_jcf_label (state);
1865 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1866 label, end_label, 0, state);
1867 define_jcf_label (end_label, state);
1868 }
1869 break;
1870 case EXIT_BLOCK_EXPR:
1871 {
1872 struct jcf_block *label = state->labeled_blocks;
1873 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1874 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1875 label = label->next;
1876 call_cleanups (label, state);
1877 emit_goto (label, state);
1878 }
1879 break;
1880
1881 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1882 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1883 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1884 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1885 increment:
1886
1887 exp = TREE_OPERAND (exp, 0);
1888 type = TREE_TYPE (exp);
1889 size = TYPE_IS_WIDE (type) ? 2 : 1;
1890 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1891 && ! TREE_STATIC (exp)
1892 && TREE_CODE (type) == INTEGER_TYPE
1893 && TYPE_PRECISION (type) == 32)
1894 {
1895 if (target != IGNORE_TARGET && post_op)
1896 emit_load (exp, state);
1897 emit_iinc (exp, value, state);
1898 if (target != IGNORE_TARGET && ! post_op)
1899 emit_load (exp, state);
1900 break;
1901 }
1902 if (TREE_CODE (exp) == COMPONENT_REF)
1903 {
1904 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1905 emit_dup (1, 0, state);
1906 /* Stack: ..., objectref, objectref. */
1907 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1908 NOTE_PUSH (size-1);
1909 /* Stack: ..., objectref, oldvalue. */
1910 offset = 1;
1911 }
1912 else if (TREE_CODE (exp) == ARRAY_REF)
1913 {
1914 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1915 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1916 emit_dup (2, 0, state);
1917 /* Stack: ..., array, index, array, index. */
1918 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1919 RESERVE(1);
1920 OP1 (jopcode);
1921 NOTE_POP (2-size);
1922 /* Stack: ..., array, index, oldvalue. */
1923 offset = 2;
1924 }
1925 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1926 {
1927 generate_bytecode_insns (exp, STACK_TARGET, state);
1928 /* Stack: ..., oldvalue. */
1929 offset = 0;
1930 }
1931 else
1932 abort ();
1933
1934 if (target != IGNORE_TARGET && post_op)
1935 emit_dup (size, offset, state);
1936 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1937 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1938 /* Stack, otherwise: ..., [result, ] oldvalue. */
1939 if (size == 1)
1940 push_int_const (value, state);
1941 else
1942 push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
1943 NOTE_PUSH (size);
1944 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1945 if (target != IGNORE_TARGET && ! post_op)
1946 emit_dup (size, offset, state);
1947 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1948 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1949 /* Stack, otherwise: ..., [result, ] newvalue. */
1950 goto finish_assignment;
1951
1952 case MODIFY_EXPR:
1953 {
1954 tree lhs = TREE_OPERAND (exp, 0);
1955 tree rhs = TREE_OPERAND (exp, 1);
1956 int offset = 0;
1957
1958 /* See if we can use the iinc instruction. */
1959 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1960 && ! TREE_STATIC (lhs)
1961 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1962 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1963 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1964 {
1965 tree arg0 = TREE_OPERAND (rhs, 0);
1966 tree arg1 = TREE_OPERAND (rhs, 1);
1967 HOST_WIDE_INT min_value = -32768;
1968 HOST_WIDE_INT max_value = 32767;
1969 if (TREE_CODE (rhs) == MINUS_EXPR)
1970 {
1971 min_value++;
1972 max_value++;
1973 }
1974 else if (arg1 == lhs)
1975 {
1976 arg0 = arg1;
1977 arg1 = TREE_OPERAND (rhs, 0);
1978 }
1979 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1980 {
1981 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1982 value = TREE_INT_CST_LOW (arg1);
1983 if ((hi_value == 0 && value <= max_value)
1984 || (hi_value == -1 && value >= min_value))
1985 {
1986 if (TREE_CODE (rhs) == MINUS_EXPR)
1987 value = -value;
1988 emit_iinc (lhs, value, state);
1989 break;
1990 }
1991 }
1992 }
1993
1994 if (TREE_CODE (lhs) == COMPONENT_REF)
1995 {
1996 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1997 STACK_TARGET, state);
1998 offset = 1;
1999 }
2000 else if (TREE_CODE (lhs) == ARRAY_REF)
2001 {
2002 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2003 STACK_TARGET, state);
2004 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2005 STACK_TARGET, state);
2006 offset = 2;
2007 }
2008 else
2009 offset = 0;
2010 generate_bytecode_insns (rhs, STACK_TARGET, state);
2011 if (target != IGNORE_TARGET)
2012 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2013 exp = lhs;
2014 }
2015 /* FALLTHOUGH */
2016
2017 finish_assignment:
2018 if (TREE_CODE (exp) == COMPONENT_REF)
2019 {
2020 tree field = TREE_OPERAND (exp, 1);
2021 if (! FIELD_STATIC (field))
2022 NOTE_POP (1);
2023 field_op (field,
2024 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2025 state);
2026
2027 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2028 }
2029 else if (TREE_CODE (exp) == VAR_DECL
2030 || TREE_CODE (exp) == PARM_DECL)
2031 {
2032 if (FIELD_STATIC (exp))
2033 {
2034 field_op (exp, OPCODE_putstatic, state);
2035 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2036 }
2037 else
2038 emit_store (exp, state);
2039 }
2040 else if (TREE_CODE (exp) == ARRAY_REF)
2041 {
2042 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2043 RESERVE(1);
2044 OP1 (jopcode);
2045 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2046 }
2047 else
2048 fatal ("internal error (bad lhs to MODIFY_EXPR)");
2049 break;
2050 case PLUS_EXPR:
2051 jopcode = OPCODE_iadd;
2052 goto binop;
2053 case MINUS_EXPR:
2054 jopcode = OPCODE_isub;
2055 goto binop;
2056 case MULT_EXPR:
2057 jopcode = OPCODE_imul;
2058 goto binop;
2059 case TRUNC_DIV_EXPR:
2060 case RDIV_EXPR:
2061 jopcode = OPCODE_idiv;
2062 goto binop;
2063 case TRUNC_MOD_EXPR:
2064 jopcode = OPCODE_irem;
2065 goto binop;
2066 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2067 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2068 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2069 case TRUTH_AND_EXPR:
2070 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2071 case TRUTH_OR_EXPR:
2072 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2073 case TRUTH_XOR_EXPR:
2074 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2075 binop:
2076 {
2077 tree arg0 = TREE_OPERAND (exp, 0);
2078 tree arg1 = TREE_OPERAND (exp, 1);
2079 jopcode += adjust_typed_op (type, 3);
2080 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2081 {
2082 /* fold may (e.g) convert 2*x to x+x. */
2083 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2084 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2085 }
2086 else
2087 {
2088 generate_bytecode_insns (arg0, target, state);
2089 generate_bytecode_insns (arg1, target, state);
2090 }
2091 /* For most binary operations, both operands and the result have the
2092 same type. Shift operations are different. Using arg1's type
2093 gets us the correct SP adjustment in all casesd. */
2094 if (target == STACK_TARGET)
2095 emit_binop (jopcode, TREE_TYPE (arg1), state);
2096 break;
2097 }
2098 case TRUTH_NOT_EXPR:
2099 case BIT_NOT_EXPR:
2100 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2101 if (target == STACK_TARGET)
2102 {
2103 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2104 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2105 RESERVE (2);
2106 if (is_long)
2107 OP1 (OPCODE_i2l);
2108 NOTE_PUSH (1 + is_long);
2109 OP1 (OPCODE_ixor + is_long);
2110 NOTE_POP (1 + is_long);
2111 }
2112 break;
2113 case NEGATE_EXPR:
2114 jopcode = OPCODE_ineg;
2115 jopcode += adjust_typed_op (type, 3);
2116 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2117 if (target == STACK_TARGET)
2118 emit_unop (jopcode, type, state);
2119 break;
2120 case INSTANCEOF_EXPR:
2121 {
2122 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2123 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2124 RESERVE (3);
2125 OP1 (OPCODE_instanceof);
2126 OP2 (index);
2127 }
2128 break;
2129 case CONVERT_EXPR:
2130 case NOP_EXPR:
2131 case FLOAT_EXPR:
2132 case FIX_TRUNC_EXPR:
2133 {
2134 tree src = TREE_OPERAND (exp, 0);
2135 tree src_type = TREE_TYPE (src);
2136 tree dst_type = TREE_TYPE (exp);
2137 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2138 if (target == IGNORE_TARGET || src_type == dst_type)
2139 break;
2140 if (TREE_CODE (dst_type) == POINTER_TYPE)
2141 {
2142 if (TREE_CODE (exp) == CONVERT_EXPR)
2143 {
2144 int index = find_class_constant (&state->cpool, TREE_TYPE (dst_type));
2145 RESERVE (3);
2146 OP1 (OPCODE_checkcast);
2147 OP2 (index);
2148 }
2149 }
2150 else /* Convert numeric types. */
2151 {
2152 int wide_src = TYPE_PRECISION (src_type) > 32;
2153 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2154 NOTE_POP (1 + wide_src);
2155 RESERVE (1);
2156 if (TREE_CODE (dst_type) == REAL_TYPE)
2157 {
2158 if (TREE_CODE (src_type) == REAL_TYPE)
2159 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2160 else if (TYPE_PRECISION (src_type) == 64)
2161 OP1 (OPCODE_l2f + wide_dst);
2162 else
2163 OP1 (OPCODE_i2f + wide_dst);
2164 }
2165 else /* Convert to integral type. */
2166 {
2167 if (TREE_CODE (src_type) == REAL_TYPE)
2168 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2169 else if (wide_dst)
2170 OP1 (OPCODE_i2l);
2171 else if (wide_src)
2172 OP1 (OPCODE_l2i);
2173 if (TYPE_PRECISION (dst_type) < 32)
2174 {
2175 RESERVE (1);
2176 /* Already converted to int, if needed. */
2177 if (TYPE_PRECISION (dst_type) <= 8)
2178 OP1 (OPCODE_i2b);
2179 else if (TREE_UNSIGNED (dst_type))
2180 OP1 (OPCODE_i2c);
2181 else
2182 OP1 (OPCODE_i2s);
2183 }
2184 }
2185 NOTE_PUSH (1 + wide_dst);
2186 }
2187 }
2188 break;
2189
2190 case CLEANUP_POINT_EXPR:
2191 {
2192 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2193 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2194 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2195 if (target != IGNORE_TARGET)
2196 abort ();
2197 while (state->labeled_blocks != save_labeled_blocks)
2198 {
2199 struct jcf_block *finished_label = NULL;
2200 tree return_link;
2201 tree exception_type = build_pointer_type (throwable_type_node);
2202 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2203 exception_type);
2204 struct jcf_block *end_label = get_jcf_label_here (state);
2205 struct jcf_block *label = state->labeled_blocks;
2206 struct jcf_handler *handler;
2207 tree cleanup = label->u.labeled_block;
2208 state->labeled_blocks = label->next;
2209 state->num_finalizers--;
2210 if (can_complete)
2211 {
2212 finished_label = gen_jcf_label (state);
2213 emit_jsr (label, state);
2214 emit_goto (finished_label, state);
2215 if (! CAN_COMPLETE_NORMALLY (cleanup))
2216 can_complete = 0;
2217 }
2218 handler = alloc_handler (label->v.start_label, end_label, state);
2219 handler->type = NULL_TREE;
2220 localvar_alloc (exception_decl, state);
2221 NOTE_PUSH (1);
2222 emit_store (exception_decl, state);
2223 emit_jsr (label, state);
2224 emit_load (exception_decl, state);
2225 RESERVE (1);
2226 OP1 (OPCODE_athrow);
2227 NOTE_POP (1);
2228
2229 /* The finally block. */
2230 return_link = build_decl (VAR_DECL, NULL_TREE,
2231 return_address_type_node);
2232 define_jcf_label (label, state);
2233 NOTE_PUSH (1);
2234 localvar_alloc (return_link, state);
2235 emit_store (return_link, state);
2236 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2237 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2238 localvar_free (return_link, state);
2239 localvar_free (exception_decl, state);
2240 if (finished_label != NULL)
2241 define_jcf_label (finished_label, state);
2242 }
2243 }
2244 break;
2245
2246 case WITH_CLEANUP_EXPR:
2247 {
2248 struct jcf_block *label;
2249 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2250 label = gen_jcf_label (state);
2251 label->pc = PENDING_CLEANUP_PC;
2252 label->next = state->labeled_blocks;
2253 state->labeled_blocks = label;
2254 state->num_finalizers++;
2255 label->u.labeled_block = TREE_OPERAND (exp, 2);
2256 label->v.start_label = get_jcf_label_here (state);
2257 if (target != IGNORE_TARGET)
2258 abort ();
2259 }
2260 break;
2261
2262 case TRY_EXPR:
2263 {
2264 tree try_clause = TREE_OPERAND (exp, 0);
2265 struct jcf_block *start_label = get_jcf_label_here (state);
2266 struct jcf_block *end_label; /* End of try clause. */
2267 struct jcf_block *finished_label = gen_jcf_label (state);
2268 tree clause = TREE_OPERAND (exp, 1);
2269 if (target != IGNORE_TARGET)
2270 abort ();
2271 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2272 end_label = get_jcf_label_here (state);
2273 if (CAN_COMPLETE_NORMALLY (try_clause))
2274 emit_goto (finished_label, state);
2275 while (clause != NULL_TREE)
2276 {
2277 tree catch_clause = TREE_OPERAND (clause, 0);
2278 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2279 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2280 if (exception_decl == NULL_TREE)
2281 handler->type = NULL_TREE;
2282 else
2283 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2284 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2285 clause = TREE_CHAIN (clause);
2286 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2287 emit_goto (finished_label, state);
2288 }
2289 define_jcf_label (finished_label, state);
2290 }
2291 break;
2292 case TRY_FINALLY_EXPR:
2293 {
2294 tree try_block = TREE_OPERAND (exp, 0);
2295 tree finally = TREE_OPERAND (exp, 1);
2296 struct jcf_block *finished_label = gen_jcf_label (state);
2297 struct jcf_block *finally_label = gen_jcf_label (state);
2298 struct jcf_block *start_label = get_jcf_label_here (state);
2299 tree return_link = build_decl (VAR_DECL, NULL_TREE,
2300 return_address_type_node);
2301 tree exception_type = build_pointer_type (throwable_type_node);
2302 tree exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2303 struct jcf_handler *handler;
2304
2305 finally_label->pc = PENDING_CLEANUP_PC;
2306 finally_label->next = state->labeled_blocks;
2307 state->labeled_blocks = finally_label;
2308 state->num_finalizers++;
2309
2310 generate_bytecode_insns (try_block, target, state);
2311 if (state->labeled_blocks != finally_label)
2312 abort();
2313 state->labeled_blocks = finally_label->next;
2314 emit_jsr (finally_label, state);
2315 if (CAN_COMPLETE_NORMALLY (try_block))
2316 emit_goto (finished_label, state);
2317
2318 /* Handle exceptions. */
2319 localvar_alloc (return_link, state);
2320 handler = alloc_handler (start_label, NULL_PTR, state);
2321 handler->end_label = handler->handler_label;
2322 handler->type = NULL_TREE;
2323 localvar_alloc (exception_decl, state);
2324 NOTE_PUSH (1);
2325 emit_store (exception_decl, state);
2326 emit_jsr (finally_label, state);
2327 emit_load (exception_decl, state);
2328 RESERVE (1);
2329 OP1 (OPCODE_athrow);
2330 NOTE_POP (1);
2331 localvar_free (exception_decl, state);
2332
2333 /* The finally block. First save return PC into return_link. */
2334 define_jcf_label (finally_label, state);
2335 NOTE_PUSH (1);
2336 emit_store (return_link, state);
2337
2338 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2339 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2340 localvar_free (return_link, state);
2341 define_jcf_label (finished_label, state);
2342 }
2343 break;
2344 case THROW_EXPR:
2345 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2346 RESERVE (1);
2347 OP1 (OPCODE_athrow);
2348 break;
2349 case NEW_ARRAY_INIT:
2350 {
2351 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2352 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2353 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2354 HOST_WIDE_INT length = java_array_type_length (array_type);
2355 if (target == IGNORE_TARGET)
2356 {
2357 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2358 generate_bytecode_insns (TREE_VALUE (values), target, state);
2359 break;
2360 }
2361 push_int_const (length, state);
2362 NOTE_PUSH (1);
2363 RESERVE (3);
2364 if (JPRIMITIVE_TYPE_P (element_type))
2365 {
2366 int atype = encode_newarray_type (element_type);
2367 OP1 (OPCODE_newarray);
2368 OP1 (atype);
2369 }
2370 else
2371 {
2372 int index = find_class_constant (&state->cpool,
2373 TREE_TYPE (element_type));
2374 OP1 (OPCODE_anewarray);
2375 OP2 (index);
2376 }
2377 offset = 0;
2378 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2379 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2380 {
2381 int save_SP = state->code_SP;
2382 emit_dup (1, 0, state);
2383 push_int_const (offset, state);
2384 NOTE_PUSH (1);
2385 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2386 RESERVE (1);
2387 OP1 (jopcode);
2388 state->code_SP = save_SP;
2389 }
2390 }
2391 break;
2392 case NEW_CLASS_EXPR:
2393 {
2394 tree class = TREE_TYPE (TREE_TYPE (exp));
2395 int need_result = target != IGNORE_TARGET;
2396 int index = find_class_constant (&state->cpool, class);
2397 RESERVE (4);
2398 OP1 (OPCODE_new);
2399 OP2 (index);
2400 if (need_result)
2401 OP1 (OPCODE_dup);
2402 NOTE_PUSH (1 + need_result);
2403 }
2404 /* ... fall though ... */
2405 case CALL_EXPR:
2406 {
2407 tree f = TREE_OPERAND (exp, 0);
2408 tree x = TREE_OPERAND (exp, 1);
2409 int save_SP = state->code_SP;
2410 int nargs;
2411 if (TREE_CODE (f) == ADDR_EXPR)
2412 f = TREE_OPERAND (f, 0);
2413 if (f == soft_newarray_node)
2414 {
2415 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2416 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2417 STACK_TARGET, state);
2418 RESERVE (2);
2419 OP1 (OPCODE_newarray);
2420 OP1 (type_code);
2421 break;
2422 }
2423 else if (f == soft_multianewarray_node)
2424 {
2425 int ndims;
2426 int idim;
2427 int index = find_class_constant (&state->cpool,
2428 TREE_TYPE (TREE_TYPE (exp)));
2429 x = TREE_CHAIN (x); /* Skip class argument. */
2430 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2431 for (idim = ndims; --idim >= 0; )
2432 {
2433 x = TREE_CHAIN (x);
2434 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2435 }
2436 RESERVE (4);
2437 OP1 (OPCODE_multianewarray);
2438 OP2 (index);
2439 OP1 (ndims);
2440 break;
2441 }
2442 else if (f == soft_anewarray_node)
2443 {
2444 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2445 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2446 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2447 RESERVE (3);
2448 OP1 (OPCODE_anewarray);
2449 OP2 (index);
2450 break;
2451 }
2452 else if (f == soft_monitorenter_node
2453 || f == soft_monitorexit_node
2454 || f == throw_node)
2455 {
2456 if (f == soft_monitorenter_node)
2457 op = OPCODE_monitorenter;
2458 else if (f == soft_monitorexit_node)
2459 op = OPCODE_monitorexit;
2460 else
2461 op = OPCODE_athrow;
2462 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2463 RESERVE (1);
2464 OP1 (op);
2465 NOTE_POP (1);
2466 break;
2467 }
2468 else if (exp == soft_exceptioninfo_call_node)
2469 {
2470 NOTE_PUSH (1); /* Pushed by exception system. */
2471 break;
2472 }
2473 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2474 {
2475 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2476 }
2477 nargs = state->code_SP - save_SP;
2478 state->code_SP = save_SP;
2479 if (f == soft_fmod_node)
2480 {
2481 RESERVE (1);
2482 OP1 (OPCODE_drem);
2483 NOTE_PUSH (2);
2484 break;
2485 }
2486 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2487 NOTE_POP (1); /* Pop implicit this. */
2488 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2489 {
2490 int index = find_methodref_index (&state->cpool, f);
2491 int interface = 0;
2492 RESERVE (5);
2493 if (METHOD_STATIC (f))
2494 OP1 (OPCODE_invokestatic);
2495 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2496 || METHOD_PRIVATE (f))
2497 OP1 (OPCODE_invokespecial);
2498 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2499 {
2500 OP1 (OPCODE_invokeinterface);
2501 interface = 1;
2502 }
2503 else
2504 OP1 (OPCODE_invokevirtual);
2505 OP2 (index);
2506 f = TREE_TYPE (TREE_TYPE (f));
2507 if (TREE_CODE (f) != VOID_TYPE)
2508 {
2509 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2510 if (target == IGNORE_TARGET)
2511 emit_pop (size, state);
2512 else
2513 NOTE_PUSH (size);
2514 }
2515 if (interface)
2516 {
2517 OP1 (nargs);
2518 OP1 (0);
2519 }
2520 break;
2521 }
2522 }
2523 /* fall through */
2524 notimpl:
2525 default:
2526 error("internal error - tree code not implemented: %s",
2527 tree_code_name [(int) TREE_CODE (exp)]);
2528 }
2529 }
2530
2531 static void
2532 perform_relocations (state)
2533 struct jcf_partial *state;
2534 {
2535 struct jcf_block *block;
2536 struct jcf_relocation *reloc;
2537 int pc;
2538 int shrink;
2539
2540 /* Before we start, the pc field of each block is an upper bound on
2541 the block's start pc (it may be less, if previous blocks need less
2542 than their maximum).
2543
2544 The minimum size of each block is in the block's chunk->size. */
2545
2546 /* First, figure out the actual locations of each block. */
2547 pc = 0;
2548 shrink = 0;
2549 for (block = state->blocks; block != NULL; block = block->next)
2550 {
2551 int block_size = block->v.chunk->size;
2552
2553 block->pc = pc;
2554
2555 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2556 Assumes relocations are in reverse order. */
2557 reloc = block->u.relocations;
2558 while (reloc != NULL
2559 && reloc->kind == OPCODE_goto_w
2560 && reloc->label->pc == block->next->pc
2561 && reloc->offset + 2 == block_size)
2562 {
2563 reloc = reloc->next;
2564 block->u.relocations = reloc;
2565 block->v.chunk->size -= 3;
2566 block_size -= 3;
2567 shrink += 3;
2568 }
2569
2570 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2571 {
2572 if (reloc->kind == SWITCH_ALIGN_RELOC)
2573 {
2574 /* We assume this is the first relocation in this block,
2575 so we know its final pc. */
2576 int where = pc + reloc->offset;
2577 int pad = ((where + 3) & ~3) - where;
2578 block_size += pad;
2579 }
2580 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2581 {
2582 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2583 int expand = reloc->kind > 0 ? 2 : 5;
2584
2585 if (delta > 0)
2586 delta -= shrink;
2587 if (delta >= -32768 && delta <= 32767)
2588 {
2589 shrink += expand;
2590 reloc->kind = -1;
2591 }
2592 else
2593 block_size += expand;
2594 }
2595 }
2596 pc += block_size;
2597 }
2598
2599 for (block = state->blocks; block != NULL; block = block->next)
2600 {
2601 struct chunk *chunk = block->v.chunk;
2602 int old_size = chunk->size;
2603 int next_pc = block->next == NULL ? pc : block->next->pc;
2604 int new_size = next_pc - block->pc;
2605 unsigned char *new_ptr;
2606 unsigned char *old_buffer = chunk->data;
2607 unsigned char *old_ptr = old_buffer + old_size;
2608 if (new_size != old_size)
2609 {
2610 chunk->data = (unsigned char *)
2611 obstack_alloc (state->chunk_obstack, new_size);
2612 chunk->size = new_size;
2613 }
2614 new_ptr = chunk->data + new_size;
2615
2616 /* We do the relocations from back to front, because
2617 the relocations are in reverse order. */
2618 for (reloc = block->u.relocations; ; reloc = reloc->next)
2619 {
2620 /* new_ptr and old_ptr point into the old and new buffers,
2621 respectively. (If no relocations cause the buffer to
2622 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2623 The bytes at higher adress have been copied and relocations
2624 handled; those at lower addresses remain to process. */
2625
2626 /* Lower old index of piece to be copied with no relocation.
2627 I.e. high index of the first piece that does need relocation. */
2628 int start = reloc == NULL ? 0
2629 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2630 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2631 ? reloc->offset + 4
2632 : reloc->offset + 2;
2633 int32 value;
2634 int new_offset;
2635 int n = (old_ptr - old_buffer) - start;
2636 new_ptr -= n;
2637 old_ptr -= n;
2638 if (n > 0)
2639 memcpy (new_ptr, old_ptr, n);
2640 if (old_ptr == old_buffer)
2641 break;
2642
2643 new_offset = new_ptr - chunk->data;
2644 new_offset -= (reloc->kind == -1 ? 2 : 4);
2645 if (reloc->kind == 0)
2646 {
2647 old_ptr -= 4;
2648 value = GET_u4 (old_ptr);
2649 }
2650 else if (reloc->kind == BLOCK_START_RELOC)
2651 {
2652 old_ptr -= 4;
2653 value = 0;
2654 new_offset = 0;
2655 }
2656 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2657 {
2658 int where = block->pc + reloc->offset;
2659 int pad = ((where + 3) & ~3) - where;
2660 while (--pad >= 0)
2661 *--new_ptr = 0;
2662 continue;
2663 }
2664 else
2665 {
2666 old_ptr -= 2;
2667 value = GET_u2 (old_ptr);
2668 }
2669 value += reloc->label->pc - (block->pc + new_offset);
2670 *--new_ptr = (unsigned char) value; value >>= 8;
2671 *--new_ptr = (unsigned char) value; value >>= 8;
2672 if (reloc->kind != -1)
2673 {
2674 *--new_ptr = (unsigned char) value; value >>= 8;
2675 *--new_ptr = (unsigned char) value;
2676 }
2677 if (reloc->kind > BLOCK_START_RELOC)
2678 {
2679 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2680 --old_ptr;
2681 *--new_ptr = reloc->kind;
2682 }
2683 else if (reloc->kind < -1)
2684 {
2685 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2686 --old_ptr;
2687 *--new_ptr = OPCODE_goto_w;
2688 *--new_ptr = 3;
2689 *--new_ptr = 0;
2690 *--new_ptr = - reloc->kind;
2691 }
2692 }
2693 if (new_ptr != chunk->data)
2694 fatal ("internal error - perform_relocations");
2695 }
2696 state->code_length = pc;
2697 }
2698
2699 static void
2700 init_jcf_state (state, work)
2701 struct jcf_partial *state;
2702 struct obstack *work;
2703 {
2704 state->chunk_obstack = work;
2705 state->first = state->chunk = NULL;
2706 CPOOL_INIT (&state->cpool);
2707 BUFFER_INIT (&state->localvars);
2708 BUFFER_INIT (&state->bytecode);
2709 }
2710
2711 static void
2712 init_jcf_method (state, method)
2713 struct jcf_partial *state;
2714 tree method;
2715 {
2716 state->current_method = method;
2717 state->blocks = state->last_block = NULL;
2718 state->linenumber_count = 0;
2719 state->first_lvar = state->last_lvar = NULL;
2720 state->lvar_count = 0;
2721 state->labeled_blocks = NULL;
2722 state->code_length = 0;
2723 BUFFER_RESET (&state->bytecode);
2724 BUFFER_RESET (&state->localvars);
2725 state->code_SP = 0;
2726 state->code_SP_max = 0;
2727 state->handlers = NULL;
2728 state->last_handler = NULL;
2729 state->num_handlers = 0;
2730 state->num_finalizers = 0;
2731 state->return_value_decl = NULL_TREE;
2732 }
2733
2734 static void
2735 release_jcf_state (state)
2736 struct jcf_partial *state;
2737 {
2738 CPOOL_FINISH (&state->cpool);
2739 obstack_free (state->chunk_obstack, state->first);
2740 }
2741
2742 /* Generate and return a list of chunks containing the class CLAS
2743 in the .class file representation. The list can be written to a
2744 .class file using write_chunks. Allocate chunks from obstack WORK. */
2745
2746 static struct chunk *
2747 generate_classfile (clas, state)
2748 tree clas;
2749 struct jcf_partial *state;
2750 {
2751 struct chunk *cpool_chunk;
2752 char *source_file;
2753 char *ptr;
2754 int i;
2755 char *fields_count_ptr;
2756 int fields_count = 0;
2757 char *methods_count_ptr;
2758 int methods_count = 0;
2759 static tree SourceFile_node = NULL_TREE;
2760 tree part;
2761 int total_supers
2762 = clas == object_type_node ? 0
2763 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2764
2765 ptr = append_chunk (NULL, 8, state);
2766 PUT4 (0xCafeBabe); /* Magic number */
2767 PUT2 (3); /* Minor version */
2768 PUT2 (45); /* Major version */
2769
2770 append_chunk (NULL, 0, state);
2771 cpool_chunk = state->chunk;
2772
2773 /* Next allocate the chunk containing acces_flags through fields_counr. */
2774 if (clas == object_type_node)
2775 i = 10;
2776 else
2777 i = 8 + 2 * total_supers;
2778 ptr = append_chunk (NULL, i, state);
2779 i = get_access_flags (TYPE_NAME (clas));
2780 if (! (i & ACC_INTERFACE))
2781 i |= ACC_SUPER;
2782 PUT2 (i); /* acces_flags */
2783 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2784 if (clas == object_type_node)
2785 {
2786 PUT2(0); /* super_class */
2787 PUT2(0); /* interfaces_count */
2788 }
2789 else
2790 {
2791 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2792 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2793 int j = find_class_constant (&state->cpool, base);
2794 PUT2 (j); /* super_class */
2795 PUT2 (total_supers - 1); /* interfaces_count */
2796 for (i = 1; i < total_supers; i++)
2797 {
2798 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2799 j = find_class_constant (&state->cpool, base);
2800 PUT2 (j);
2801 }
2802 }
2803 fields_count_ptr = ptr;
2804
2805 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2806 {
2807 int have_value;
2808 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2809 continue;
2810 ptr = append_chunk (NULL, 8, state);
2811 i = get_access_flags (part); PUT2 (i);
2812 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2813 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part)));
2814 PUT2(i);
2815 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part);
2816 PUT2 (have_value); /* attributes_count */
2817 if (have_value)
2818 {
2819 tree init = DECL_INITIAL (part);
2820 static tree ConstantValue_node = NULL_TREE;
2821 ptr = append_chunk (NULL, 8, state);
2822 if (ConstantValue_node == NULL_TREE)
2823 ConstantValue_node = get_identifier ("ConstantValue");
2824 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2825 PUT2 (i); /* attribute_name_index */
2826 PUT4 (2); /* attribute_length */
2827 i = find_constant_index (init, state); PUT2 (i);
2828 }
2829 fields_count++;
2830 }
2831 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2832
2833 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2834 PUT2 (0);
2835
2836 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2837 {
2838 struct jcf_block *block;
2839 tree function_body = DECL_FUNCTION_BODY (part);
2840 tree body = function_body == NULL_TREE ? NULL_TREE
2841 : BLOCK_EXPR_BODY (function_body);
2842 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2843 : DECL_NAME (part);
2844 tree type = TREE_TYPE (part);
2845 tree save_function = current_function_decl;
2846 current_function_decl = part;
2847 ptr = append_chunk (NULL, 8, state);
2848 i = get_access_flags (part); PUT2 (i);
2849 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2850 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2851 PUT2 (i);
2852 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2853 PUT2 (i); /* attributes_count */
2854 if (body != NULL_TREE)
2855 {
2856 int code_attributes_count = 0;
2857 static tree Code_node = NULL_TREE;
2858 tree t;
2859 char *attr_len_ptr;
2860 struct jcf_handler *handler;
2861 if (Code_node == NULL_TREE)
2862 Code_node = get_identifier ("Code");
2863 ptr = append_chunk (NULL, 14, state);
2864 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2865 attr_len_ptr = ptr;
2866 init_jcf_method (state, part);
2867 get_jcf_label_here (state); /* Force a first block. */
2868 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2869 localvar_alloc (t, state);
2870 generate_bytecode_insns (body, IGNORE_TARGET, state);
2871 if (CAN_COMPLETE_NORMALLY (body))
2872 {
2873 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2874 abort();
2875 RESERVE (1);
2876 OP1 (OPCODE_return);
2877 }
2878 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2879 localvar_free (t, state);
2880 if (state->return_value_decl != NULL_TREE)
2881 localvar_free (state->return_value_decl, state);
2882 finish_jcf_block (state);
2883 perform_relocations (state);
2884
2885 ptr = attr_len_ptr;
2886 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2887 if (state->linenumber_count > 0)
2888 {
2889 code_attributes_count++;
2890 i += 8 + 4 * state->linenumber_count;
2891 }
2892 if (state->lvar_count > 0)
2893 {
2894 code_attributes_count++;
2895 i += 8 + 10 * state->lvar_count;
2896 }
2897 UNSAFE_PUT4 (i); /* attribute_length */
2898 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2899 UNSAFE_PUT2 (localvar_max); /* max_locals */
2900 UNSAFE_PUT4 (state->code_length);
2901
2902 /* Emit the exception table. */
2903 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2904 PUT2 (state->num_handlers); /* exception_table_length */
2905 handler = state->handlers;
2906 for (; handler != NULL; handler = handler->next)
2907 {
2908 int type_index;
2909 PUT2 (handler->start_label->pc);
2910 PUT2 (handler->end_label->pc);
2911 PUT2 (handler->handler_label->pc);
2912 if (handler->type == NULL_TREE)
2913 type_index = 0;
2914 else
2915 type_index = find_class_constant (&state->cpool,
2916 handler->type);
2917 PUT2 (type_index);
2918 }
2919
2920 ptr = append_chunk (NULL, 2, state);
2921 PUT2 (code_attributes_count);
2922
2923 /* Write the LineNumberTable attribute. */
2924 if (state->linenumber_count > 0)
2925 {
2926 static tree LineNumberTable_node = NULL_TREE;
2927 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state);
2928 if (LineNumberTable_node == NULL_TREE)
2929 LineNumberTable_node = get_identifier ("LineNumberTable");
2930 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
2931 PUT2 (i); /* attribute_name_index */
2932 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
2933 i = state->linenumber_count; PUT2 (i);
2934 for (block = state->blocks; block != NULL; block = block->next)
2935 {
2936 int line = block->linenumber;
2937 if (line > 0)
2938 {
2939 PUT2 (block->pc);
2940 PUT2 (line);
2941 }
2942 }
2943 }
2944
2945 /* Write the LocalVariableTable attribute. */
2946 if (state->lvar_count > 0)
2947 {
2948 static tree LocalVariableTable_node = NULL_TREE;
2949 struct localvar_info *lvar = state->first_lvar;
2950 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
2951 if (LocalVariableTable_node == NULL_TREE)
2952 LocalVariableTable_node = get_identifier("LocalVariableTable");
2953 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
2954 PUT2 (i); /* attribute_name_index */
2955 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
2956 i = state->lvar_count; PUT2 (i);
2957 for ( ; lvar != NULL; lvar = lvar->next)
2958 {
2959 tree name = DECL_NAME (lvar->decl);
2960 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
2961 i = lvar->start_label->pc; PUT2 (i);
2962 i = lvar->end_label->pc - i; PUT2 (i);
2963 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2964 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
2965 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
2966 }
2967 }
2968 }
2969 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
2970 {
2971 tree t = DECL_FUNCTION_THROWS (part);
2972 int throws_count = list_length (t);
2973 static tree Exceptions_node = NULL_TREE;
2974 if (Exceptions_node == NULL_TREE)
2975 Exceptions_node = get_identifier ("Exceptions");
2976 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
2977 i = find_utf8_constant (&state->cpool, Exceptions_node);
2978 PUT2 (i); /* attribute_name_index */
2979 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
2980 i = throws_count; PUT2 (i);
2981 for (; t != NULL_TREE; t = TREE_CHAIN (t))
2982 {
2983 i = find_class_constant (&state->cpool, TREE_VALUE (t));
2984 PUT2 (i);
2985 }
2986 }
2987 methods_count++;
2988 current_function_decl = save_function;
2989 }
2990 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
2991
2992 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
2993 for (ptr = source_file; ; ptr++)
2994 {
2995 char ch = *ptr;
2996 if (ch == '\0')
2997 break;
2998 if (ch == '/' || ch == '\\')
2999 source_file = ptr+1;
3000 }
3001 ptr = append_chunk (NULL, 10, state);
3002 PUT2 (1); /* attributes_count */
3003
3004 /* generate the SourceFile attribute. */
3005 if (SourceFile_node == NULL_TREE)
3006 SourceFile_node = get_identifier ("SourceFile");
3007 i = find_utf8_constant (&state->cpool, SourceFile_node);
3008 PUT2 (i); /* attribute_name_index */
3009 PUT4 (2);
3010 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3011 PUT2 (i);
3012
3013 /* New finally generate the contents of the constant pool chunk. */
3014 i = count_constant_pool_bytes (&state->cpool);
3015 ptr = obstack_alloc (state->chunk_obstack, i);
3016 cpool_chunk->data = ptr;
3017 cpool_chunk->size = i;
3018 write_constant_pool (&state->cpool, ptr, i);
3019 return state->first;
3020 }
3021
3022 static char *
3023 make_class_file_name (clas)
3024 tree clas;
3025 {
3026 const char *dname, *slash;
3027 char *cname, *r;
3028 struct stat sb;
3029
3030 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3031 "", '.', DIR_SEPARATOR,
3032 ".class"));
3033 if (jcf_write_base_directory == NULL)
3034 {
3035 /* Make sure we put the class file into the .java file's
3036 directory, and not into some subdirectory thereof. */
3037 char *t;
3038 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3039 slash = strrchr (dname, DIR_SEPARATOR);
3040 if (! slash)
3041 {
3042 dname = ".";
3043 slash = dname + 1;
3044 }
3045 t = strrchr (cname, DIR_SEPARATOR);
3046 if (t)
3047 cname = t + 1;
3048 }
3049 else
3050 {
3051 dname = jcf_write_base_directory;
3052 slash = dname + strlen (dname);
3053 }
3054
3055 r = xmalloc (slash - dname + strlen (cname) + 2);
3056 strncpy (r, dname, slash - dname);
3057 r[slash - dname] = DIR_SEPARATOR;
3058 strcpy (&r[slash - dname + 1], cname);
3059
3060 /* We try to make new directories when we need them. We only do
3061 this for directories which "might not" exist. For instance, we
3062 assume the `-d' directory exists, but we don't assume that any
3063 subdirectory below it exists. It might be worthwhile to keep
3064 track of which directories we've created to avoid gratuitous
3065 stat()s. */
3066 dname = r + (slash - dname) + 1;
3067 while (1)
3068 {
3069 cname = strchr (dname, DIR_SEPARATOR);
3070 if (cname == NULL)
3071 break;
3072 *cname = '\0';
3073 if (stat (r, &sb) == -1)
3074 {
3075 /* Try to make it. */
3076 if (mkdir (r, 0755) == -1)
3077 {
3078 fatal ("failed to create directory `%s'", r);
3079 free (r);
3080 return NULL;
3081 }
3082 }
3083 *cname = DIR_SEPARATOR;
3084 /* Skip consecutive separators. */
3085 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3086 ;
3087 }
3088
3089 return r;
3090 }
3091
3092 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3093 The output .class file name is make_class_file_name(CLAS). */
3094
3095 void
3096 write_classfile (clas)
3097 tree clas;
3098 {
3099 struct obstack *work = &temporary_obstack;
3100 struct jcf_partial state[1];
3101 char *class_file_name = make_class_file_name (clas);
3102 struct chunk *chunks;
3103
3104 if (class_file_name != NULL)
3105 {
3106 FILE* stream = fopen (class_file_name, "wb");
3107 if (stream == NULL)
3108 fatal ("failed to open `%s' for writing", class_file_name);
3109 jcf_dependency_add_target (class_file_name);
3110 init_jcf_state (state, work);
3111 chunks = generate_classfile (clas, state);
3112 write_chunks (stream, chunks);
3113 if (fclose (stream))
3114 fatal ("failed to close after writing `%s'", class_file_name);
3115 free (class_file_name);
3116 }
3117 release_jcf_state (state);
3118 }
3119
3120 /* TODO:
3121 string concatenation
3122 synchronized statement
3123 */
This page took 0.180628 seconds and 6 git commands to generate.