1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
27 #include "java-tree.h"
32 #include "java-opcodes.h"
33 #include "parse.h" /* for BLOCK_EXPR_BODY */
38 #define DIR_SEPARATOR '/'
41 extern struct obstack temporary_obstack
;
43 /* Base directory in which `.class' files should be written.
44 NULL means to put the file into the same directory as the
45 corresponding .java file. */
46 char *jcf_write_base_directory
= NULL
;
48 /* Make sure bytecode.data is big enough for at least N more bytes. */
51 do { CHECK_OP(state); \
52 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
53 buffer_grow (&state->bytecode, N); } while (0)
55 /* Add a 1-byte instruction/operand I to bytecode.data,
56 assuming space has already been RESERVE'd. */
58 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
60 /* Like OP1, but I is a 2-byte big endian integer. */
63 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
65 /* Like OP1, but I is a 4-byte big endian integer. */
68 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
69 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
71 /* Macro to call each time we push I words on the JVM stack. */
73 #define NOTE_PUSH(I) \
74 do { state->code_SP += (I); \
75 if (state->code_SP > state->code_SP_max) \
76 state->code_SP_max = state->code_SP; } while (0)
78 /* Macro to call each time we pop I words from the JVM stack. */
81 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
83 /* A chunk or segment of a .class file. */
87 /* The next segment of this .class file. */
90 /* The actual data in this segment to be written to the .class file. */
93 /* The size of the segment to be written to the .class file. */
97 #define PENDING_CLEANUP_PC (-3)
98 #define PENDING_EXIT_PC (-2)
99 #define UNDEFINED_PC (-1)
101 /* Each "block" represents a label plus the bytecode instructions following.
102 There may be branches out of the block, but no incoming jumps, except
103 to the beginning of the block.
105 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
106 assocated code yet), but it is an undefined label.
111 /* For blocks that that are defined, the next block (in pc order).
112 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
113 or a cleanup expression (from a WITH_CLEANUP_EXPR),
114 this is the next (outer) such end label, in a stack headed by
115 labeled_blocks in jcf_partial. */
116 struct jcf_block
*next
;
118 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
119 pc is PENDING_EXIT_PC.
120 In the not-yet-defined end label for pending cleanup subroutine,
121 pc is PENDING_CLEANUP_PC.
122 For other not-yet-defined labels, pc is UNDEFINED_PC.
124 If the label has been defined:
125 Until perform_relocations is finished, this is the maximum possible
126 value of the bytecode offset at the begnning of this block.
127 After perform_relocations, it is the actual offset (pc). */
132 /* After finish_jcf_block is called, The actual instructions contained in this block.
133 Before than NULL, and the instructions are in state->bytecode. */
137 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
138 coveed by the cleanup. */
139 struct jcf_block
*start_label
;
143 /* Set of relocations (in reverse offset order) for this block. */
144 struct jcf_relocation
*relocations
;
146 /* If this block is that of the not-yet-defined end label of
147 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
148 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
153 /* A "relocation" type for the 0-3 bytes of padding at the start
154 of a tableswitch or a lookupswitch. */
155 #define SWITCH_ALIGN_RELOC 4
157 /* A relocation type for the labels in a tableswitch or a lookupswitch;
158 these are relative to the start of the instruction, but (due to
159 th 0-3 bytes of padding), we don't know the offset before relocation. */
160 #define BLOCK_START_RELOC 1
162 struct jcf_relocation
164 /* Next relocation for the current jcf_block. */
165 struct jcf_relocation
*next
;
167 /* The (byte) offset within the current block that needs to be relocated. */
168 HOST_WIDE_INT offset
;
170 /* 0 if offset is a 4-byte relative offset.
171 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
172 for proper alignment in tableswitch/lookupswitch instructions.
173 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
174 to the start of the containing block.
175 -1 if offset is a 2-byte relative offset.
176 < -1 if offset is the address of an instruction with a 2-byte offset
177 that does not have a corresponding 4-byte offset version, in which
178 case the absolute value of kind is the inverted opcode.
179 > 4 if offset is the address of an instruction (such as jsr) with a
180 2-byte offset that does have a corresponding 4-byte offset version,
181 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
184 /* The label the relocation wants to actually transfer to. */
185 struct jcf_block
*label
;
188 /* State for single catch clause. */
192 struct jcf_handler
*next
;
194 struct jcf_block
*start_label
;
195 struct jcf_block
*end_label
;
196 struct jcf_block
*handler_label
;
198 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
202 /* State for the current switch statement. */
204 struct jcf_switch_state
206 struct jcf_switch_state
*prev
;
207 struct jcf_block
*default_label
;
209 struct jcf_relocation
*cases
;
211 HOST_WIDE_INT min_case
, max_case
;
214 /* This structure is used to contain the various pieces that will
215 become a .class file. */
221 struct obstack
*chunk_obstack
;
224 /* List of basic blocks for the current method. */
225 struct jcf_block
*blocks
;
226 struct jcf_block
*last_block
;
228 struct localvar_info
*first_lvar
;
229 struct localvar_info
*last_lvar
;
234 int linenumber_count
;
236 /* Until perform_relocations, this is a upper bound on the number
237 of bytes (so far) in the instructions for the current method. */
240 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
241 struct jcf_block
*labeled_blocks
;
243 /* The current stack size (stack pointer) in the current method. */
246 /* The largest extent of stack size (stack pointer) in the current method. */
249 /* Contains a mapping from local var slot number to localvar_info. */
250 struct buffer localvars
;
252 /* The buffer allocated for bytecode for the current jcf_block. */
253 struct buffer bytecode
;
255 /* Chain of exception handlers for the current method. */
256 struct jcf_handler
*handlers
;
258 /* Last element in handlers chain. */
259 struct jcf_handler
*last_handler
;
261 /* Number of exception handlers for the current method. */
264 /* Number of finalizers we are currently nested within. */
267 /* If non-NULL, use this for the return value. */
268 tree return_value_decl
;
270 /* Information about the current switch statemenet. */
271 struct jcf_switch_state
*sw_state
;
274 static void generate_bytecode_insns
PROTO ((tree
, int, struct jcf_partial
*));
276 /* Utility macros for appending (big-endian) data to a buffer.
277 We assume a local variable 'ptr' points into where we want to
278 write next, and we assume enoygh space has been allocated. */
280 #ifdef ENABLE_CHECKING
282 CHECK_PUT(ptr
, state
, i
)
284 struct jcf_partial
*state
;
287 if (ptr
< state
->chunk
->data
288 || (char*)ptr
+ i
> state
->chunk
->data
+ state
->chunk
->size
)
289 fatal ("internal error - CHECK_PUT failed");
293 #define CHECK_PUT(PTR, STATE, I) ((void)0)
296 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
297 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
298 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
299 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
302 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
303 Set the data and size fields to DATA and SIZE, respectively.
304 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
307 alloc_chunk (last
, data
, size
, work
)
311 struct obstack
*work
;
313 struct chunk
*chunk
= (struct chunk
*)
314 obstack_alloc (work
, sizeof(struct chunk
));
316 if (data
== NULL
&& size
> 0)
317 data
= obstack_alloc (work
, size
);
327 #ifdef ENABLE_CHECKING
329 CHECK_OP(struct jcf_partial
*state
)
331 if (state
->bytecode
.ptr
> state
->bytecode
.limit
)
333 fatal("internal error - CHECK_OP failed");
338 #define CHECK_OP(STATE) ((void)0)
342 append_chunk (data
, size
, state
)
345 struct jcf_partial
*state
;
347 state
->chunk
= alloc_chunk (state
->chunk
, data
, size
, state
->chunk_obstack
);
348 if (state
->first
== NULL
)
349 state
->first
= state
->chunk
;
350 return state
->chunk
->data
;
354 append_chunk_copy (data
, size
, state
)
357 struct jcf_partial
*state
;
359 unsigned char *ptr
= append_chunk (NULL
, size
, state
);
360 memcpy (ptr
, data
, size
);
364 gen_jcf_label (state
)
365 struct jcf_partial
*state
;
367 struct jcf_block
*block
= (struct jcf_block
*)
368 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_block
));
370 block
->linenumber
= -1;
371 block
->pc
= UNDEFINED_PC
;
376 finish_jcf_block (state
)
377 struct jcf_partial
*state
;
379 struct jcf_block
*block
= state
->last_block
;
380 struct jcf_relocation
*reloc
;
381 int code_length
= BUFFER_LENGTH (&state
->bytecode
);
382 int pc
= state
->code_length
;
383 append_chunk_copy (state
->bytecode
.data
, code_length
, state
);
384 BUFFER_RESET (&state
->bytecode
);
385 block
->v
.chunk
= state
->chunk
;
387 /* Calculate code_length to the maximum value it can have. */
388 pc
+= block
->v
.chunk
->size
;
389 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
391 int kind
= reloc
->kind
;
392 if (kind
== SWITCH_ALIGN_RELOC
)
394 else if (kind
> BLOCK_START_RELOC
)
395 pc
+= 2; /* 2-byte offset may grow to 4-byte offset */
397 pc
+= 5; /* May need to add a goto_w. */
399 state
->code_length
= pc
;
403 define_jcf_label (label
, state
)
404 struct jcf_block
*label
;
405 struct jcf_partial
*state
;
407 if (state
->last_block
!= NULL
)
408 finish_jcf_block (state
);
409 label
->pc
= state
->code_length
;
410 if (state
->blocks
== NULL
)
411 state
->blocks
= label
;
413 state
->last_block
->next
= label
;
414 state
->last_block
= label
;
416 label
->u
.relocations
= NULL
;
420 get_jcf_label_here (state
)
421 struct jcf_partial
*state
;
423 if (state
->last_block
!= NULL
&& BUFFER_LENGTH (&state
->bytecode
) == 0)
424 return state
->last_block
;
427 struct jcf_block
*label
= gen_jcf_label (state
);
428 define_jcf_label (label
, state
);
433 /* Note a line number entry for the current PC and given LINE. */
436 put_linenumber (line
, state
)
438 struct jcf_partial
*state
;
440 struct jcf_block
*label
= get_jcf_label_here (state
);
441 if (label
->linenumber
> 0)
443 label
= gen_jcf_label (state
);
444 define_jcf_label (label
, state
);
446 label
->linenumber
= line
;
447 state
->linenumber_count
++;
450 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
451 in the range (START_LABEL, END_LABEL). */
453 static struct jcf_handler
*
454 alloc_handler (start_label
, end_label
, state
)
455 struct jcf_block
*start_label
;
456 struct jcf_block
*end_label
;
457 struct jcf_partial
*state
;
459 struct jcf_handler
*handler
= (struct jcf_handler
*)
460 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_handler
));
461 handler
->start_label
= start_label
;
462 handler
->end_label
= end_label
;
463 handler
->handler_label
= get_jcf_label_here (state
);
464 if (state
->handlers
== NULL
)
465 state
->handlers
= handler
;
467 state
->last_handler
->next
= handler
;
468 state
->last_handler
= handler
;
469 handler
->next
= NULL
;
470 state
->num_handlers
++;
475 /* The index of jvm local variable allocated for this DECL.
476 This is assigned when generating .class files;
477 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
478 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
480 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
484 struct localvar_info
*next
;
487 struct jcf_block
*start_label
;
488 struct jcf_block
*end_label
;
491 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
492 #define localvar_max \
493 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
496 localvar_alloc (decl
, state
)
498 struct jcf_partial
*state
;
500 struct jcf_block
*start_label
= get_jcf_label_here (state
);
501 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
503 register struct localvar_info
*info
;
504 register struct localvar_info
**ptr
= localvar_buffer
;
505 register struct localvar_info
**limit
506 = (struct localvar_info
**) state
->localvars
.ptr
;
507 for (index
= 0; ptr
< limit
; index
++, ptr
++)
510 && (! wide
|| ((ptr
+1) < limit
&& ptr
[1] == NULL
)))
515 buffer_grow (&state
->localvars
, 2 * sizeof (struct localvar_info
*));
516 ptr
= (struct localvar_info
**) state
->localvars
.data
+ index
;
517 state
->localvars
.ptr
= (unsigned char *) (ptr
+ 1 + wide
);
519 info
= (struct localvar_info
*)
520 obstack_alloc (state
->chunk_obstack
, sizeof (struct localvar_info
));
523 ptr
[1] = (struct localvar_info
*)(~0);
524 DECL_LOCAL_INDEX (decl
) = index
;
526 info
->start_label
= start_label
;
528 if (DECL_NAME (decl
) != NULL_TREE
)
530 /* Generate debugging info. */
532 if (state
->last_lvar
!= NULL
)
533 state
->last_lvar
->next
= info
;
535 state
->first_lvar
= info
;
536 state
->last_lvar
= info
;
542 localvar_free (decl
, state
)
544 struct jcf_partial
*state
;
546 struct jcf_block
*end_label
= get_jcf_label_here (state
);
547 int index
= DECL_LOCAL_INDEX (decl
);
548 register struct localvar_info
**ptr
= &localvar_buffer
[index
];
549 register struct localvar_info
*info
= *ptr
;
550 int wide
= TYPE_IS_WIDE (TREE_TYPE (decl
));
552 info
->end_label
= end_label
;
554 if (info
->decl
!= decl
)
559 if (ptr
[1] != (struct localvar_info
*)(~0))
566 #define STACK_TARGET 1
567 #define IGNORE_TARGET 2
569 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
570 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
573 get_access_flags (decl
)
577 int isfield
= TREE_CODE (decl
) == FIELD_DECL
|| TREE_CODE (decl
) == VAR_DECL
;
578 if (CLASS_PUBLIC (decl
)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
580 if (CLASS_FINAL (decl
)) /* same as FIELD_FINAL and METHOD_FINAL */
582 if (isfield
|| TREE_CODE (decl
) == FUNCTION_DECL
)
584 if (TREE_PROTECTED (decl
))
585 flags
|= ACC_PROTECTED
;
586 if (TREE_PRIVATE (decl
))
587 flags
|= ACC_PRIVATE
;
589 else if (TREE_CODE (decl
) == TYPE_DECL
)
591 if (CLASS_SUPER (decl
))
593 if (CLASS_ABSTRACT (decl
))
594 flags
|= ACC_ABSTRACT
;
595 if (CLASS_INTERFACE (decl
))
596 flags
|= ACC_INTERFACE
;
599 fatal ("internal error - bad argument to get_access_flags");
600 if (TREE_CODE (decl
) == FUNCTION_DECL
)
602 if (METHOD_NATIVE (decl
))
604 if (METHOD_STATIC (decl
))
606 if (METHOD_SYNCHRONIZED (decl
))
607 flags
|= ACC_SYNCHRONIZED
;
608 if (METHOD_ABSTRACT (decl
))
609 flags
|= ACC_ABSTRACT
;
613 if (FIELD_STATIC (decl
))
615 if (FIELD_VOLATILE (decl
))
616 flags
|= ACC_VOLATILE
;
617 if (FIELD_TRANSIENT (decl
))
618 flags
|= ACC_TRANSIENT
;
623 /* Write the list of segments starting at CHUNKS to STREAM. */
626 write_chunks (stream
, chunks
)
628 struct chunk
*chunks
;
630 for (; chunks
!= NULL
; chunks
= chunks
->next
)
631 fwrite (chunks
->data
, chunks
->size
, 1, stream
);
634 /* Push a 1-word constant in the constant pool at the given INDEX.
635 (Caller is responsible for doing NOTE_PUSH.) */
638 push_constant1 (index
, state
)
640 struct jcf_partial
*state
;
655 /* Push a 2-word constant in the constant pool at the given INDEX.
656 (Caller is responsible for doing NOTE_PUSH.) */
659 push_constant2 (index
, state
)
661 struct jcf_partial
*state
;
668 /* Push 32-bit integer constant on VM stack.
669 Caller is responsible for doing NOTE_PUSH. */
672 push_int_const (i
, state
)
674 struct jcf_partial
*state
;
677 if (i
>= -1 && i
<= 5)
678 OP1(OPCODE_iconst_0
+ i
);
679 else if (i
>= -128 && i
< 128)
684 else if (i
>= -32768 && i
< 32768)
691 i
= find_constant1 (&state
->cpool
, CONSTANT_Integer
, i
& 0xFFFFFFFF);
692 push_constant1 (i
, state
);
697 find_constant_wide (lo
, hi
, state
)
698 HOST_WIDE_INT lo
, hi
;
699 struct jcf_partial
*state
;
701 HOST_WIDE_INT w1
, w2
;
702 lshift_double (lo
, hi
, -32, 64, &w1
, &w2
, 1);
703 return find_constant2 (&state
->cpool
, CONSTANT_Long
,
704 w1
& 0xFFFFFFFF, lo
& 0xFFFFFFFF);
707 /* Find or allocate a constant pool entry for the given VALUE.
708 Return the index in the constant pool. */
711 find_constant_index (value
, state
)
713 struct jcf_partial
*state
;
715 if (TREE_CODE (value
) == INTEGER_CST
)
717 if (TYPE_PRECISION (TREE_TYPE (value
)) <= 32)
718 return find_constant1 (&state
->cpool
, CONSTANT_Integer
,
719 TREE_INT_CST_LOW (value
) & 0xFFFFFFFF);
721 return find_constant_wide (TREE_INT_CST_LOW (value
),
722 TREE_INT_CST_HIGH (value
), state
);
724 else if (TREE_CODE (value
) == REAL_CST
)
727 if (TYPE_PRECISION (TREE_TYPE (value
)) == 32)
729 words
[0] = etarsingle (TREE_REAL_CST (value
)) & 0xFFFFFFFF;
730 return find_constant1 (&state
->cpool
, CONSTANT_Float
, words
[0]);
734 etardouble (TREE_REAL_CST (value
), words
);
735 return find_constant2 (&state
->cpool
, CONSTANT_Double
,
736 words
[1-FLOAT_WORDS_BIG_ENDIAN
] & 0xFFFFFFFF,
737 words
[FLOAT_WORDS_BIG_ENDIAN
] & 0xFFFFFFFF);
740 else if (TREE_CODE (value
) == STRING_CST
)
742 return find_string_constant (&state
->cpool
, value
);
745 fatal ("find_constant_index - bad type");
748 /* Push 64-bit long constant on VM stack.
749 Caller is responsible for doing NOTE_PUSH. */
752 push_long_const (lo
, hi
, state
)
753 HOST_WIDE_INT lo
, hi
;
754 struct jcf_partial
*state
;
756 if (hi
== 0 && lo
>= 0 && lo
<= 1)
759 OP1(OPCODE_lconst_0
+ lo
);
761 else if ((hi
== 0 && lo
< 32768) || (hi
== -1 && lo
>= -32768))
763 push_int_const (lo
, state
);
768 push_constant2 (find_constant_wide (lo
, hi
, state
), state
);
772 field_op (field
, opcode
, state
)
775 struct jcf_partial
*state
;
777 int index
= find_fieldref_index (&state
->cpool
, field
);
783 /* Returns an integer in the range 0 (for 'int') through 4 (for object
784 reference) to 7 (for 'short') which matches the pattern of how JVM
785 opcodes typically depend on the operand type. */
788 adjust_typed_op (type
, max
)
792 switch (TREE_CODE (type
))
795 case RECORD_TYPE
: return 4;
797 return TYPE_PRECISION (type
) == 32 || max
< 5 ? 0 : 5;
799 return TYPE_PRECISION (type
) == 32 || max
< 6 ? 0 : 6;
801 switch (TYPE_PRECISION (type
))
803 case 8: return max
< 5 ? 0 : 5;
804 case 16: return max
< 7 ? 0 : 7;
810 switch (TYPE_PRECISION (type
))
823 maybe_wide (opcode
, index
, state
)
825 struct jcf_partial
*state
;
842 /* Compile code to duplicate with offset, where
843 SIZE is the size of the stack item to duplicate (1 or 2), abd
844 OFFSET is where to insert the result (must be 0, 1, or 2).
845 (The new words get inserted at stack[SP-size-offset].) */
848 emit_dup (size
, offset
, state
)
850 struct jcf_partial
*state
;
857 kind
= size
== 1 ? OPCODE_dup
: OPCODE_dup2
;
858 else if (offset
== 1)
859 kind
= size
== 1 ? OPCODE_dup_x1
: OPCODE_dup2_x1
;
860 else if (offset
== 2)
861 kind
= size
== 1 ? OPCODE_dup_x2
: OPCODE_dup2_x2
;
869 emit_pop (size
, state
)
871 struct jcf_partial
*state
;
874 OP1 (OPCODE_pop
- 1 + size
);
878 emit_iinc (var
, value
, state
)
881 struct jcf_partial
*state
;
883 int slot
= DECL_LOCAL_INDEX (var
);
885 if (value
< -128 || value
> 127 || slot
>= 256)
903 emit_load_or_store (var
, opcode
, state
)
904 tree var
; /* Variable to load from or store into. */
905 int opcode
; /* Either OPCODE_iload or OPCODE_istore. */
906 struct jcf_partial
*state
;
908 tree type
= TREE_TYPE (var
);
909 int kind
= adjust_typed_op (type
, 4);
910 int index
= DECL_LOCAL_INDEX (var
);
914 OP1 (opcode
+ 5 + 4 * kind
+ index
); /* [ilfda]{load,store}_[0123] */
917 maybe_wide (opcode
+ kind
, index
, state
); /* [ilfda]{load,store} */
921 emit_load (var
, state
)
923 struct jcf_partial
*state
;
925 emit_load_or_store (var
, OPCODE_iload
, state
);
926 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
930 emit_store (var
, state
)
932 struct jcf_partial
*state
;
934 emit_load_or_store (var
, OPCODE_istore
, state
);
935 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var
)) ? 2 : 1);
939 emit_unop (opcode
, type
, state
)
940 enum java_opcode opcode
;
942 struct jcf_partial
*state
;
949 emit_binop (opcode
, type
, state
)
950 enum java_opcode opcode
;
952 struct jcf_partial
*state
;
954 int size
= TYPE_IS_WIDE (type
) ? 2 : 1;
961 emit_reloc (value
, kind
, target
, state
)
964 struct jcf_block
*target
;
965 struct jcf_partial
*state
;
967 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
968 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
969 struct jcf_block
*block
= state
->last_block
;
970 reloc
->next
= block
->u
.relocations
;
971 block
->u
.relocations
= reloc
;
972 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
973 reloc
->label
= target
;
975 if (kind
== 0 || kind
== BLOCK_START_RELOC
)
977 else if (kind
!= SWITCH_ALIGN_RELOC
)
982 emit_switch_reloc (label
, state
)
983 struct jcf_block
*label
;
984 struct jcf_partial
*state
;
986 emit_reloc (0, BLOCK_START_RELOC
, label
, state
);
989 /* Similar to emit_switch_reloc,
990 but re-uses an existing case reloc. */
993 emit_case_reloc (reloc
, state
)
994 struct jcf_relocation
*reloc
;
995 struct jcf_partial
*state
;
997 struct jcf_block
*block
= state
->last_block
;
998 reloc
->next
= block
->u
.relocations
;
999 block
->u
.relocations
= reloc
;
1000 reloc
->offset
= BUFFER_LENGTH (&state
->bytecode
);
1001 reloc
->kind
= BLOCK_START_RELOC
;
1005 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1006 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1009 emit_if (target
, opcode
, inv_opcode
, state
)
1010 struct jcf_block
*target
;
1011 int opcode
, inv_opcode
;
1012 struct jcf_partial
*state
;
1015 // value is 1 byte from reloc back to start of instruction.
1016 emit_reloc (1, - inv_opcode
, target
, state
);
1020 emit_goto (target
, state
)
1021 struct jcf_block
*target
;
1022 struct jcf_partial
*state
;
1025 // Value is 1 byte from reloc back to start of instruction.
1026 emit_reloc (1, OPCODE_goto_w
, target
, state
);
1030 emit_jsr (target
, state
)
1031 struct jcf_block
*target
;
1032 struct jcf_partial
*state
;
1035 // Value is 1 byte from reloc back to start of instruction.
1036 emit_reloc (1, OPCODE_jsr_w
, target
, state
);
1039 /* Generate code to evaluate EXP. If the result is true,
1040 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1041 TRUE_BRANCH_FIRST is a code geneation hint that the
1042 TRUE_LABEL may follow right after this. (The idea is that we
1043 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1046 generate_bytecode_conditional (exp
, true_label
, false_label
,
1047 true_branch_first
, state
)
1049 struct jcf_block
*true_label
;
1050 struct jcf_block
*false_label
;
1051 int true_branch_first
;
1052 struct jcf_partial
*state
;
1054 tree exp0
, exp1
, type
;
1055 int save_SP
= state
->code_SP
;
1056 enum java_opcode op
, negop
;
1057 switch (TREE_CODE (exp
))
1060 emit_goto (integer_zerop (exp
) ? false_label
: true_label
, state
);
1064 struct jcf_block
*then_label
= gen_jcf_label (state
);
1065 struct jcf_block
*else_label
= gen_jcf_label (state
);
1066 int save_SP_before
, save_SP_after
;
1067 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1068 then_label
, else_label
, 1, state
);
1069 define_jcf_label (then_label
, state
);
1070 save_SP_before
= state
->code_SP
;
1071 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1072 true_label
, false_label
, 1, state
);
1073 save_SP_after
= state
->code_SP
;
1074 state
->code_SP
= save_SP_before
;
1075 define_jcf_label (else_label
, state
);
1076 generate_bytecode_conditional (TREE_OPERAND (exp
, 2),
1077 true_label
, false_label
,
1078 true_branch_first
, state
);
1079 if (state
->code_SP
!= save_SP_after
)
1080 fatal ("internal error non-matching SP");
1083 case TRUTH_NOT_EXPR
:
1084 generate_bytecode_conditional (TREE_OPERAND (exp
, 0), false_label
, true_label
,
1085 ! true_branch_first
, state
);
1087 case TRUTH_ANDIF_EXPR
:
1089 struct jcf_block
*next_label
= gen_jcf_label (state
);
1090 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1091 next_label
, false_label
, 1, state
);
1092 define_jcf_label (next_label
, state
);
1093 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1094 true_label
, false_label
, 1, state
);
1097 case TRUTH_ORIF_EXPR
:
1099 struct jcf_block
*next_label
= gen_jcf_label (state
);
1100 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1101 true_label
, next_label
, 1, state
);
1102 define_jcf_label (next_label
, state
);
1103 generate_bytecode_conditional (TREE_OPERAND (exp
, 1),
1104 true_label
, false_label
, 1, state
);
1108 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1109 set it to the corresponding 1-operand if<COND> instructions. */
1113 /* The opcodes with their inverses are allocated in pairs.
1114 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1115 negop
= (op
& 1) ? op
+ 1 : op
- 1;
1117 if (true_branch_first
)
1119 emit_if (false_label
, negop
, op
, state
);
1120 emit_goto (true_label
, state
);
1124 emit_if (true_label
, op
, negop
, state
);
1125 emit_goto (false_label
, state
);
1129 op
= OPCODE_if_icmpeq
;
1132 op
= OPCODE_if_icmpne
;
1135 op
= OPCODE_if_icmpgt
;
1138 op
= OPCODE_if_icmplt
;
1141 op
= OPCODE_if_icmpge
;
1144 op
= OPCODE_if_icmple
;
1147 exp0
= TREE_OPERAND (exp
, 0);
1148 exp1
= TREE_OPERAND (exp
, 1);
1149 type
= TREE_TYPE (exp0
);
1150 switch (TREE_CODE (type
))
1153 case POINTER_TYPE
: case RECORD_TYPE
:
1154 switch (TREE_CODE (exp
))
1156 case EQ_EXPR
: op
= OPCODE_if_acmpeq
; break;
1157 case NE_EXPR
: op
= OPCODE_if_acmpne
; break;
1160 if (integer_zerop (exp1
) || integer_zerop (exp0
))
1162 generate_bytecode_insns (integer_zerop (exp1
) ? exp0
: exp0
,
1163 STACK_TARGET
, state
);
1164 op
= op
+ (OPCODE_ifnull
- OPCODE_if_acmpeq
);
1165 negop
= (op
& 1) ? op
- 1 : op
+ 1;
1169 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1170 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1174 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1175 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1176 if (op
== OPCODE_if_icmplt
|| op
== OPCODE_if_icmple
)
1180 if (TYPE_PRECISION (type
) > 32)
1191 if (TYPE_PRECISION (type
) > 32)
1193 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1194 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1202 if (integer_zerop (exp1
))
1204 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1208 if (integer_zerop (exp0
))
1212 case OPCODE_if_icmplt
:
1213 case OPCODE_if_icmpge
:
1216 case OPCODE_if_icmpgt
:
1217 case OPCODE_if_icmple
:
1223 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1227 generate_bytecode_insns (exp0
, STACK_TARGET
, state
);
1228 generate_bytecode_insns (exp1
, STACK_TARGET
, state
);
1234 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1236 if (true_branch_first
)
1238 emit_if (false_label
, OPCODE_ifeq
, OPCODE_ifne
, state
);
1239 emit_goto (true_label
, state
);
1243 emit_if (true_label
, OPCODE_ifne
, OPCODE_ifeq
, state
);
1244 emit_goto (false_label
, state
);
1248 if (save_SP
!= state
->code_SP
)
1249 fatal ("internal error - SP mismatch");
1252 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1253 but only as far out as LIMIT (since we are about to jump to the
1254 emit label that is LIMIT). */
1257 call_cleanups (limit
, state
)
1258 struct jcf_block
*limit
;
1259 struct jcf_partial
*state
;
1261 struct jcf_block
*block
= state
->labeled_blocks
;
1262 for (; block
!= limit
; block
= block
->next
)
1264 if (block
->pc
== PENDING_CLEANUP_PC
)
1265 emit_jsr (block
, state
);
1269 /* Generate bytecode for sub-expression EXP of METHOD.
1270 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1273 generate_bytecode_insns (exp
, target
, state
)
1276 struct jcf_partial
*state
;
1279 enum java_opcode jopcode
;
1281 HOST_WIDE_INT value
;
1286 if (exp
== NULL
&& target
== IGNORE_TARGET
)
1289 type
= TREE_TYPE (exp
);
1291 switch (TREE_CODE (exp
))
1294 if (BLOCK_EXPR_BODY (exp
))
1297 tree body
= BLOCK_EXPR_BODY (exp
);
1298 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1300 tree next
= TREE_CHAIN (local
);
1301 localvar_alloc (local
, state
);
1304 /* Avoid deep recursion for long blocks. */
1305 while (TREE_CODE (body
) == COMPOUND_EXPR
)
1307 generate_bytecode_insns (TREE_OPERAND (body
, 0), target
, state
);
1308 body
= TREE_OPERAND (body
, 1);
1310 generate_bytecode_insns (body
, target
, state
);
1311 for (local
= BLOCK_EXPR_DECLS (exp
); local
; )
1313 tree next
= TREE_CHAIN (local
);
1314 localvar_free (local
, state
);
1320 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
1321 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1323 case EXPR_WITH_FILE_LOCATION
:
1325 char *saved_input_filename
= input_filename
;
1326 tree body
= EXPR_WFL_NODE (exp
);
1327 int saved_lineno
= lineno
;
1328 if (body
== empty_stmt_node
)
1330 input_filename
= EXPR_WFL_FILENAME (exp
);
1331 lineno
= EXPR_WFL_LINENO (exp
);
1332 if (EXPR_WFL_EMIT_LINE_NOTE (exp
) && lineno
> 0)
1333 put_linenumber (lineno
, state
);
1334 generate_bytecode_insns (body
, target
, state
);
1335 input_filename
= saved_input_filename
;
1336 lineno
= saved_lineno
;
1340 if (target
== IGNORE_TARGET
) ; /* do nothing */
1341 else if (TREE_CODE (type
) == POINTER_TYPE
)
1343 if (! integer_zerop (exp
))
1346 OP1 (OPCODE_aconst_null
);
1349 else if (TYPE_PRECISION (type
) <= 32)
1351 push_int_const (TREE_INT_CST_LOW (exp
), state
);
1356 push_long_const (TREE_INT_CST_LOW (exp
), TREE_INT_CST_HIGH (exp
),
1362 offset
= find_constant_index (exp
, state
);
1363 switch (TYPE_PRECISION (type
))
1366 push_constant1 (offset
, state
);
1370 push_constant2 (offset
, state
);
1378 push_constant1 (find_string_constant (&state
->cpool
, exp
), state
);
1382 if (TREE_STATIC (exp
))
1384 field_op (exp
, OPCODE_getstatic
, state
);
1385 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1388 /* ... fall through ... */
1390 emit_load (exp
, state
);
1392 case NON_LVALUE_EXPR
:
1394 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1397 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1398 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1399 if (target
!= IGNORE_TARGET
)
1401 jopcode
= OPCODE_iaload
+ adjust_typed_op (type
, 7);
1404 if (! TYPE_IS_WIDE (type
))
1410 tree obj
= TREE_OPERAND (exp
, 0);
1411 tree field
= TREE_OPERAND (exp
, 1);
1412 int is_static
= FIELD_STATIC (field
);
1413 generate_bytecode_insns (obj
,
1414 is_static
? IGNORE_TARGET
: target
, state
);
1415 if (target
!= IGNORE_TARGET
)
1417 if (DECL_NAME (field
) == length_identifier_node
&& !is_static
1418 && TYPE_ARRAY_P (TREE_TYPE (obj
)))
1421 OP1 (OPCODE_arraylength
);
1425 field_op (field
, is_static
? OPCODE_getstatic
: OPCODE_getfield
,
1429 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1434 case TRUTH_ANDIF_EXPR
:
1435 case TRUTH_ORIF_EXPR
:
1443 struct jcf_block
*then_label
= gen_jcf_label (state
);
1444 struct jcf_block
*else_label
= gen_jcf_label (state
);
1445 struct jcf_block
*end_label
= gen_jcf_label (state
);
1446 generate_bytecode_conditional (exp
,
1447 then_label
, else_label
, 1, state
);
1448 define_jcf_label (then_label
, state
);
1449 push_int_const (1, state
);
1450 emit_goto (end_label
, state
);
1451 define_jcf_label (else_label
, state
);
1452 push_int_const (0, state
);
1453 define_jcf_label (end_label
, state
);
1459 struct jcf_block
*then_label
= gen_jcf_label (state
);
1460 struct jcf_block
*else_label
= gen_jcf_label (state
);
1461 struct jcf_block
*end_label
= gen_jcf_label (state
);
1462 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1463 then_label
, else_label
, 1, state
);
1464 define_jcf_label (then_label
, state
);
1465 generate_bytecode_insns (TREE_OPERAND (exp
, 1), target
, state
);
1466 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 1))
1467 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1468 || TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
)
1469 emit_goto (end_label
, state
);
1470 define_jcf_label (else_label
, state
);
1471 generate_bytecode_insns (TREE_OPERAND (exp
, 2), target
, state
);
1472 define_jcf_label (end_label
, state
);
1477 struct jcf_switch_state
*sw_state
= state
->sw_state
;
1478 struct jcf_relocation
*reloc
= (struct jcf_relocation
*)
1479 obstack_alloc (state
->chunk_obstack
, sizeof (struct jcf_relocation
));
1480 HOST_WIDE_INT case_value
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0));
1482 reloc
->label
= get_jcf_label_here (state
);
1483 reloc
->offset
= case_value
;
1484 reloc
->next
= sw_state
->cases
;
1485 sw_state
->cases
= reloc
;
1486 if (sw_state
->num_cases
== 0)
1488 sw_state
->min_case
= case_value
;
1489 sw_state
->max_case
= case_value
;
1493 if (case_value
< sw_state
->min_case
)
1494 sw_state
->min_case
= case_value
;
1495 if (case_value
> sw_state
->max_case
)
1496 sw_state
->max_case
= case_value
;
1498 sw_state
->num_cases
++;
1502 state
->sw_state
->default_label
= get_jcf_label_here (state
);
1507 /* The SWITCH_EXPR has three parts, generated in the following order:
1508 1. the switch_expression (the value used to select the correct case);
1510 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1511 After code generation, we will re-order then in the order 1, 3, 2.
1512 This is to avoid an extra GOTOs. */
1513 struct jcf_switch_state sw_state
;
1514 struct jcf_block
*expression_last
; /* Last block of the switch_expression. */
1515 struct jcf_block
*body_last
; /* Last block of the switch_body. */
1516 struct jcf_block
*switch_instruction
; /* First block of switch_instruction. */
1517 struct jcf_block
*instruction_last
; /* Last block of the switch_instruction. */
1518 struct jcf_block
*body_block
;
1520 sw_state
.prev
= state
->sw_state
;
1521 state
->sw_state
= &sw_state
;
1522 sw_state
.cases
= NULL
;
1523 sw_state
.num_cases
= 0;
1524 sw_state
.default_label
= NULL
;
1525 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1526 expression_last
= state
->last_block
;
1527 body_block
= get_jcf_label_here (state
); /* Force a new block here. */
1528 generate_bytecode_insns (TREE_OPERAND (exp
, 1), IGNORE_TARGET
, state
);
1529 body_last
= state
->last_block
;
1531 switch_instruction
= gen_jcf_label (state
);
1532 define_jcf_label (switch_instruction
, state
);
1533 if (sw_state
.default_label
== NULL
)
1534 sw_state
.default_label
= gen_jcf_label (state
);
1536 if (sw_state
.num_cases
<= 1)
1538 if (sw_state
.num_cases
== 0)
1540 emit_pop (1, state
);
1545 push_int_const (sw_state
.cases
->offset
, state
);
1546 emit_if (sw_state
.cases
->label
,
1547 OPCODE_ifeq
, OPCODE_ifne
, state
);
1549 emit_goto (sw_state
.default_label
, state
);
1554 /* Copy the chain of relocs into a sorted array. */
1555 struct jcf_relocation
**relocs
= (struct jcf_relocation
**)
1556 xmalloc (sw_state
.num_cases
* sizeof (struct jcf_relocation
*));
1557 /* The relocs arrays is a buffer with a gap.
1558 The assumption is that cases will normally come in "runs". */
1560 int gap_end
= sw_state
.num_cases
;
1561 struct jcf_relocation
*reloc
;
1562 for (reloc
= sw_state
.cases
; reloc
!= NULL
; reloc
= reloc
->next
)
1564 HOST_WIDE_INT case_value
= reloc
->offset
;
1565 while (gap_end
< sw_state
.num_cases
)
1567 struct jcf_relocation
*end
= relocs
[gap_end
];
1568 if (case_value
<= end
->offset
)
1570 relocs
[gap_start
++] = end
;
1573 while (gap_start
> 0)
1575 struct jcf_relocation
*before
= relocs
[gap_start
-1];
1576 if (case_value
>= before
->offset
)
1578 relocs
[--gap_end
] = before
;
1581 relocs
[gap_start
++] = reloc
;
1582 /* Note we don't check for duplicates. FIXME! */
1585 if (2 * sw_state
.num_cases
1586 >= sw_state
.max_case
- sw_state
.min_case
)
1587 { /* Use tableswitch. */
1589 RESERVE (13 + 4 * (sw_state
.max_case
- sw_state
.min_case
+ 1));
1590 OP1 (OPCODE_tableswitch
);
1591 emit_reloc (0, SWITCH_ALIGN_RELOC
, NULL
, state
);
1592 emit_switch_reloc (sw_state
.default_label
, state
);
1593 OP4 (sw_state
.min_case
);
1594 OP4 (sw_state
.max_case
);
1595 for (i
= sw_state
.min_case
; ; )
1597 reloc
= relocs
[index
];
1598 if (i
== reloc
->offset
)
1600 emit_case_reloc (reloc
, state
);
1601 if (i
== sw_state
.max_case
)
1606 emit_switch_reloc (sw_state
.default_label
, state
);
1611 { /* Use lookupswitch. */
1612 RESERVE(9 + 8 * sw_state
.num_cases
);
1613 OP1 (OPCODE_lookupswitch
);
1614 emit_reloc (0, SWITCH_ALIGN_RELOC
, NULL
, state
);
1615 emit_switch_reloc (sw_state
.default_label
, state
);
1616 OP4 (sw_state
.num_cases
);
1617 for (i
= 0; i
< sw_state
.num_cases
; i
++)
1619 struct jcf_relocation
*reloc
= relocs
[i
];
1620 OP4 (reloc
->offset
);
1621 emit_case_reloc (reloc
, state
);
1627 instruction_last
= state
->last_block
;
1628 if (sw_state
.default_label
->pc
< 0)
1629 define_jcf_label (sw_state
.default_label
, state
);
1630 else /* Force a new block. */
1631 sw_state
.default_label
= get_jcf_label_here (state
);
1632 /* Now re-arrange the blocks so the switch_instruction
1633 comes before the switch_body. */
1634 switch_length
= state
->code_length
- switch_instruction
->pc
;
1635 switch_instruction
->pc
= body_block
->pc
;
1636 instruction_last
->next
= body_block
;
1637 instruction_last
->v
.chunk
->next
= body_block
->v
.chunk
;
1638 expression_last
->next
= switch_instruction
;
1639 expression_last
->v
.chunk
->next
= switch_instruction
->v
.chunk
;
1640 body_last
->next
= sw_state
.default_label
;
1641 body_last
->v
.chunk
->next
= NULL
;
1642 state
->chunk
= body_last
->v
.chunk
;
1643 for (; body_block
!= sw_state
.default_label
; body_block
= body_block
->next
)
1644 body_block
->pc
+= switch_length
;
1646 state
->sw_state
= sw_state
.prev
;
1651 if (!TREE_OPERAND (exp
, 0))
1654 call_cleanups (NULL_TREE
, state
);
1658 exp
= TREE_OPERAND (exp
, 0);
1659 if (TREE_CODE (exp
) != MODIFY_EXPR
)
1661 exp
= TREE_OPERAND (exp
, 1);
1662 op
= OPCODE_ireturn
+ adjust_typed_op (TREE_TYPE (exp
), 4);
1663 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1664 if (state
->num_finalizers
> 0)
1666 if (state
->return_value_decl
== NULL_TREE
)
1668 state
->return_value_decl
1669 = build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1670 localvar_alloc (state
->return_value_decl
, state
);
1672 emit_store (state
->return_value_decl
, state
);
1673 call_cleanups (NULL_TREE
, state
);
1674 emit_load (state
->return_value_decl
, state
);
1675 /* If we call localvar_free (state->return_value_decl, state),
1676 then we risk the save decl erroneously re-used in the
1677 finalizer. Instead, we keep the state->return_value_decl
1678 allocated through the rest of the method. This is not
1679 the greatest solution, but it is at least simple and safe. */
1685 case LABELED_BLOCK_EXPR
:
1687 struct jcf_block
*end_label
= gen_jcf_label (state
);
1688 end_label
->next
= state
->labeled_blocks
;
1689 state
->labeled_blocks
= end_label
;
1690 end_label
->pc
= PENDING_EXIT_PC
;
1691 end_label
->u
.labeled_block
= exp
;
1692 if (LABELED_BLOCK_BODY (exp
))
1693 generate_bytecode_insns (LABELED_BLOCK_BODY (exp
), target
, state
);
1694 if (state
->labeled_blocks
!= end_label
)
1696 state
->labeled_blocks
= end_label
->next
;
1697 define_jcf_label (end_label
, state
);
1702 tree body
= TREE_OPERAND (exp
, 0);
1704 if (TREE_CODE (body
) == COMPOUND_EXPR
1705 && TREE_CODE (TREE_OPERAND (body
, 0)) == EXIT_EXPR
)
1707 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1708 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1709 struct jcf_block
*head_label
;
1710 struct jcf_block
*body_label
;
1711 struct jcf_block
*end_label
= gen_jcf_label (state
);
1712 struct jcf_block
*exit_label
= state
->labeled_blocks
;
1713 head_label
= gen_jcf_label (state
);
1714 emit_goto (head_label
, state
);
1715 body_label
= get_jcf_label_here (state
);
1716 generate_bytecode_insns (TREE_OPERAND (body
, 1), target
, state
);
1717 define_jcf_label (head_label
, state
);
1718 generate_bytecode_conditional (TREE_OPERAND (body
, 0),
1719 end_label
, body_label
, 1, state
);
1720 define_jcf_label (end_label
, state
);
1725 struct jcf_block
*head_label
= get_jcf_label_here (state
);
1726 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
1727 emit_goto (head_label
, state
);
1733 struct jcf_block
*label
= state
->labeled_blocks
;
1734 struct jcf_block
*end_label
= gen_jcf_label (state
);
1735 generate_bytecode_conditional (TREE_OPERAND (exp
, 0),
1736 label
, end_label
, 0, state
);
1737 define_jcf_label (end_label
, state
);
1740 case EXIT_BLOCK_EXPR
:
1742 struct jcf_block
*label
= state
->labeled_blocks
;
1743 if (TREE_OPERAND (exp
, 1) != NULL
) goto notimpl
;
1744 while (label
->u
.labeled_block
!= TREE_OPERAND (exp
, 0))
1745 label
= label
->next
;
1746 call_cleanups (label
, state
);
1747 emit_goto (label
, state
);
1751 case PREDECREMENT_EXPR
: value
= -1; post_op
= 0; goto increment
;
1752 case PREINCREMENT_EXPR
: value
= 1; post_op
= 0; goto increment
;
1753 case POSTDECREMENT_EXPR
: value
= -1; post_op
= 1; goto increment
;
1754 case POSTINCREMENT_EXPR
: value
= 1; post_op
= 1; goto increment
;
1757 exp
= TREE_OPERAND (exp
, 0);
1758 type
= TREE_TYPE (exp
);
1759 size
= TYPE_IS_WIDE (type
) ? 2 : 1;
1760 if ((TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1761 && ! TREE_STATIC (exp
)
1762 && TREE_CODE (type
) == INTEGER_TYPE
1763 && TYPE_PRECISION (type
) == 32)
1765 if (target
!= IGNORE_TARGET
&& post_op
)
1766 emit_load (exp
, state
);
1767 emit_iinc (exp
, value
, state
);
1768 if (target
!= IGNORE_TARGET
&& ! post_op
)
1769 emit_load (exp
, state
);
1772 if (TREE_CODE (exp
) == COMPONENT_REF
)
1774 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1775 emit_dup (1, 0, state
);
1776 /* Stack: ..., objectref, objectref. */
1777 field_op (TREE_OPERAND (exp
, 1), OPCODE_getfield
, state
);
1779 /* Stack: ..., objectref, oldvalue. */
1782 else if (TREE_CODE (exp
) == ARRAY_REF
)
1784 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
1785 generate_bytecode_insns (TREE_OPERAND (exp
, 1), STACK_TARGET
, state
);
1786 emit_dup (2, 0, state
);
1787 /* Stack: ..., array, index, array, index. */
1788 jopcode
= OPCODE_iaload
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1792 /* Stack: ..., array, index, oldvalue. */
1795 else if (TREE_CODE (exp
) == VAR_DECL
|| TREE_CODE (exp
) == PARM_DECL
)
1797 generate_bytecode_insns (exp
, STACK_TARGET
, state
);
1798 /* Stack: ..., oldvalue. */
1804 if (target
!= IGNORE_TARGET
&& post_op
)
1805 emit_dup (size
, offset
, state
);
1806 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1807 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1808 /* Stack, otherwise: ..., [result, ] oldvalue. */
1810 push_int_const (value
, state
);
1812 push_long_const (value
, value
>= 0 ? 0 : -1, state
);
1814 emit_binop (OPCODE_iadd
+ adjust_typed_op (type
, 3), type
, state
);
1815 if (target
!= IGNORE_TARGET
&& ! post_op
)
1816 emit_dup (size
, offset
, state
);
1817 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1818 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1819 /* Stack, otherwise: ..., [result, ] newvalue. */
1820 goto finish_assignment
;
1824 tree lhs
= TREE_OPERAND (exp
, 0);
1825 tree rhs
= TREE_OPERAND (exp
, 1);
1828 /* See if we can use the iinc instruction. */
1829 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1830 && ! TREE_STATIC (lhs
)
1831 && TREE_CODE (TREE_TYPE (lhs
)) == INTEGER_TYPE
1832 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 32
1833 && (TREE_CODE (rhs
) == PLUS_EXPR
|| TREE_CODE (rhs
) == MINUS_EXPR
))
1835 tree arg0
= TREE_OPERAND (rhs
, 0);
1836 tree arg1
= TREE_OPERAND (rhs
, 1);
1837 HOST_WIDE_INT min_value
= -32768;
1838 HOST_WIDE_INT max_value
= 32767;
1839 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1844 else if (arg1
== lhs
)
1847 arg1
= TREE_OPERAND (rhs
, 0);
1849 if (lhs
== arg0
&& TREE_CODE (arg1
) == INTEGER_CST
)
1851 HOST_WIDE_INT hi_value
= TREE_INT_CST_HIGH (arg1
);
1852 value
= TREE_INT_CST_LOW (arg1
);
1853 if ((hi_value
== 0 && value
<= max_value
)
1854 || (hi_value
== -1 && value
>= min_value
))
1856 if (TREE_CODE (rhs
) == MINUS_EXPR
)
1858 emit_iinc (lhs
, value
, state
);
1864 if (TREE_CODE (lhs
) == COMPONENT_REF
)
1866 generate_bytecode_insns (TREE_OPERAND (lhs
, 0),
1867 STACK_TARGET
, state
);
1870 else if (TREE_CODE (lhs
) == ARRAY_REF
)
1872 generate_bytecode_insns (TREE_OPERAND(lhs
, 0),
1873 STACK_TARGET
, state
);
1874 generate_bytecode_insns (TREE_OPERAND(lhs
, 1),
1875 STACK_TARGET
, state
);
1880 generate_bytecode_insns (rhs
, STACK_TARGET
, state
);
1881 if (target
!= IGNORE_TARGET
)
1882 emit_dup (TYPE_IS_WIDE (type
) ? 2 : 1 , offset
, state
);
1888 if (TREE_CODE (exp
) == COMPONENT_REF
)
1890 tree field
= TREE_OPERAND (exp
, 1);
1891 if (! FIELD_STATIC (field
))
1894 FIELD_STATIC (field
) ? OPCODE_putstatic
: OPCODE_putfield
,
1897 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field
)) ? 2 : 1);
1899 else if (TREE_CODE (exp
) == VAR_DECL
1900 || TREE_CODE (exp
) == PARM_DECL
)
1902 if (FIELD_STATIC (exp
))
1904 field_op (exp
, OPCODE_putstatic
, state
);
1905 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 2 : 1);
1908 emit_store (exp
, state
);
1910 else if (TREE_CODE (exp
) == ARRAY_REF
)
1912 jopcode
= OPCODE_iastore
+ adjust_typed_op (TREE_TYPE (exp
), 7);
1915 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp
)) ? 4 : 3);
1918 fatal ("internal error (bad lhs to MODIFY_EXPR)");
1921 jopcode
= OPCODE_iadd
;
1924 jopcode
= OPCODE_isub
;
1927 jopcode
= OPCODE_imul
;
1929 case TRUNC_DIV_EXPR
:
1931 jopcode
= OPCODE_idiv
;
1933 case TRUNC_MOD_EXPR
:
1934 jopcode
= OPCODE_irem
;
1936 case LSHIFT_EXPR
: jopcode
= OPCODE_ishl
; goto binop
;
1937 case RSHIFT_EXPR
: jopcode
= OPCODE_ishr
; goto binop
;
1938 case URSHIFT_EXPR
: jopcode
= OPCODE_iushr
; goto binop
;
1939 case TRUTH_AND_EXPR
:
1940 case BIT_AND_EXPR
: jopcode
= OPCODE_iand
; goto binop
;
1942 case BIT_IOR_EXPR
: jopcode
= OPCODE_ior
; goto binop
;
1943 case TRUTH_XOR_EXPR
:
1944 case BIT_XOR_EXPR
: jopcode
= OPCODE_ixor
; goto binop
;
1947 tree arg0
= TREE_OPERAND (exp
, 0);
1948 tree arg1
= TREE_OPERAND (exp
, 1);
1949 jopcode
+= adjust_typed_op (type
, 3);
1950 if (arg0
== arg1
&& TREE_CODE (arg0
) == SAVE_EXPR
)
1952 /* fold may (e.g) convert 2*x to x+x. */
1953 generate_bytecode_insns (TREE_OPERAND (arg0
, 0), target
, state
);
1954 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0
)) > 32 ? 2 : 1, 0, state
);
1958 generate_bytecode_insns (arg0
, target
, state
);
1959 generate_bytecode_insns (arg1
, target
, state
);
1961 /* For most binary operations, both operands and the result have the
1962 same type. Shift operations are different. Using arg1's type
1963 gets us the correct SP adjustment in all casesd. */
1964 if (target
== STACK_TARGET
)
1965 emit_binop (jopcode
, TREE_TYPE (arg1
), state
);
1968 case TRUTH_NOT_EXPR
:
1970 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1971 if (target
== STACK_TARGET
)
1973 int is_long
= TYPE_PRECISION (TREE_TYPE (exp
)) > 32;
1974 push_int_const (TREE_CODE (exp
) == BIT_NOT_EXPR
? -1 : 1, state
);
1978 NOTE_PUSH (1 + is_long
);
1979 OP1 (OPCODE_ixor
+ is_long
);
1980 NOTE_POP (1 + is_long
);
1984 jopcode
= OPCODE_ineg
;
1985 jopcode
+= adjust_typed_op (type
, 3);
1986 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1987 if (target
== STACK_TARGET
)
1988 emit_unop (jopcode
, type
, state
);
1990 case INSTANCEOF_EXPR
:
1992 int index
= find_class_constant (&state
->cpool
, TREE_OPERAND (exp
, 1));
1993 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
1995 OP1 (OPCODE_instanceof
);
2002 case FIX_TRUNC_EXPR
:
2004 tree src
= TREE_OPERAND (exp
, 0);
2005 tree src_type
= TREE_TYPE (src
);
2006 tree dst_type
= TREE_TYPE (exp
);
2007 generate_bytecode_insns (TREE_OPERAND (exp
, 0), target
, state
);
2008 if (target
== IGNORE_TARGET
|| src_type
== dst_type
)
2010 if (TREE_CODE (dst_type
) == POINTER_TYPE
)
2012 if (TREE_CODE (exp
) == CONVERT_EXPR
)
2014 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (dst_type
));
2016 OP1 (OPCODE_checkcast
);
2020 else /* Convert numeric types. */
2022 int wide_src
= TYPE_PRECISION (src_type
) > 32;
2023 int wide_dst
= TYPE_PRECISION (dst_type
) > 32;
2024 NOTE_POP (1 + wide_src
);
2026 if (TREE_CODE (dst_type
) == REAL_TYPE
)
2028 if (TREE_CODE (src_type
) == REAL_TYPE
)
2029 OP1 (wide_dst
? OPCODE_f2d
: OPCODE_d2f
);
2030 else if (TYPE_PRECISION (src_type
) == 64)
2031 OP1 (OPCODE_l2f
+ wide_dst
);
2033 OP1 (OPCODE_i2f
+ wide_dst
);
2035 else /* Convert to integral type. */
2037 if (TREE_CODE (src_type
) == REAL_TYPE
)
2038 OP1 (OPCODE_f2i
+ wide_dst
+ 3 * wide_src
);
2043 if (TYPE_PRECISION (dst_type
) < 32)
2046 /* Already converted to int, if needed. */
2047 if (TYPE_PRECISION (dst_type
) <= 8)
2049 else if (TREE_UNSIGNED (dst_type
))
2055 NOTE_PUSH (1 + wide_dst
);
2060 case CLEANUP_POINT_EXPR
:
2062 struct jcf_block
*save_labeled_blocks
= state
->labeled_blocks
;
2063 int can_complete
= CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp
, 0));
2064 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
2065 if (target
!= IGNORE_TARGET
)
2067 while (state
->labeled_blocks
!= save_labeled_blocks
)
2069 struct jcf_block
*finished_label
= NULL
;
2071 tree exception_type
= build_pointer_type (throwable_type_node
);
2072 tree exception_decl
= build_decl (VAR_DECL
, NULL_TREE
,
2074 struct jcf_block
*end_label
= get_jcf_label_here (state
);
2075 struct jcf_block
*label
= state
->labeled_blocks
;
2076 struct jcf_handler
*handler
;
2077 tree cleanup
= label
->u
.labeled_block
;
2078 state
->labeled_blocks
= label
->next
;
2079 state
->num_finalizers
--;
2082 finished_label
= gen_jcf_label (state
);
2083 emit_jsr (label
, state
);
2084 emit_goto (finished_label
, state
);
2085 if (! CAN_COMPLETE_NORMALLY (cleanup
))
2088 handler
= alloc_handler (label
->v
.start_label
, end_label
, state
);
2089 handler
->type
= NULL_TREE
;
2090 localvar_alloc (exception_decl
, state
);
2092 emit_store (exception_decl
, state
);
2093 emit_jsr (label
, state
);
2094 emit_load (exception_decl
, state
);
2096 OP1 (OPCODE_athrow
);
2099 /* The finally block. */
2100 return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2101 return_address_type_node
);
2102 define_jcf_label (label
, state
);
2104 localvar_alloc (return_link
, state
);
2105 emit_store (return_link
, state
);
2106 generate_bytecode_insns (cleanup
, IGNORE_TARGET
, state
);
2107 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2108 localvar_free (return_link
, state
);
2109 localvar_free (exception_decl
, state
);
2110 if (finished_label
!= NULL
)
2111 define_jcf_label (finished_label
, state
);
2116 case WITH_CLEANUP_EXPR
:
2118 struct jcf_block
*label
;
2119 generate_bytecode_insns (TREE_OPERAND (exp
, 0), IGNORE_TARGET
, state
);
2120 label
= gen_jcf_label (state
);
2121 label
->pc
= PENDING_CLEANUP_PC
;
2122 label
->next
= state
->labeled_blocks
;
2123 state
->labeled_blocks
= label
;
2124 state
->num_finalizers
++;
2125 label
->u
.labeled_block
= TREE_OPERAND (exp
, 2);
2126 label
->v
.start_label
= get_jcf_label_here (state
);
2127 if (target
!= IGNORE_TARGET
)
2134 tree try_clause
= TREE_OPERAND (exp
, 0);
2135 tree finally
= TREE_OPERAND (exp
, 2);
2136 struct jcf_block
*start_label
= get_jcf_label_here (state
);
2137 struct jcf_block
*end_label
; /* End of try clause. */
2138 struct jcf_block
*finally_label
; /* Finally subroutine. */
2139 struct jcf_block
*finished_label
= gen_jcf_label (state
);
2140 tree clause
= TREE_OPERAND (exp
, 1);
2143 finally
= FINALLY_EXPR_BLOCK (finally
);
2144 finally_label
= gen_jcf_label (state
);
2146 if (target
!= IGNORE_TARGET
)
2148 generate_bytecode_insns (try_clause
, IGNORE_TARGET
, state
);
2149 end_label
= get_jcf_label_here (state
);
2150 if (CAN_COMPLETE_NORMALLY (try_clause
))
2151 emit_goto (finished_label
, state
);
2152 for ( ; clause
!= NULL_TREE
; clause
= TREE_CHAIN (clause
))
2154 tree catch_clause
= TREE_OPERAND (clause
, 0);
2155 tree exception_decl
= BLOCK_EXPR_DECLS (catch_clause
);
2156 struct jcf_handler
*handler
= alloc_handler (start_label
, end_label
, state
);
2157 if (exception_decl
== NULL_TREE
)
2158 handler
->type
= NULL_TREE
;
2160 handler
->type
= TREE_TYPE (TREE_TYPE (exception_decl
));
2161 generate_bytecode_insns (catch_clause
, IGNORE_TARGET
, state
);
2162 if (CAN_COMPLETE_NORMALLY (catch_clause
))
2163 emit_goto (finished_label
, state
);
2168 tree exception_type
= build_pointer_type (throwable_type_node
);
2169 tree exception_decl
= build_decl (VAR_DECL
, NULL_TREE
,
2171 struct jcf_handler
*handler
2172 = alloc_handler (start_label
, NULL_TREE
, state
);
2173 handler
->end_label
= handler
->handler_label
;
2174 handler
->type
= NULL_TREE
;
2175 localvar_alloc (exception_decl
, state
);
2177 emit_store (exception_decl
, state
);
2178 emit_jsr (finally_label
, state
);
2179 emit_load (exception_decl
, state
);
2181 OP1 (OPCODE_athrow
);
2183 localvar_free (exception_decl
, state
);
2185 /* The finally block. */
2186 return_link
= build_decl (VAR_DECL
, NULL_TREE
,
2187 return_address_type_node
);
2188 define_jcf_label (finally_label
, state
);
2190 localvar_alloc (return_link
, state
);
2191 emit_store (return_link
, state
);
2192 generate_bytecode_insns (finally
, IGNORE_TARGET
, state
);
2193 maybe_wide (OPCODE_ret
, DECL_LOCAL_INDEX (return_link
), state
);
2194 localvar_free (return_link
, state
);
2196 define_jcf_label (finished_label
, state
);
2198 emit_jsr (finally_label
, state
);
2202 generate_bytecode_insns (TREE_OPERAND (exp
, 0), STACK_TARGET
, state
);
2204 OP1 (OPCODE_athrow
);
2206 case NEW_ARRAY_INIT
:
2208 tree values
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
2209 tree array_type
= TREE_TYPE (TREE_TYPE (exp
));
2210 tree element_type
= TYPE_ARRAY_ELEMENT (array_type
);
2211 HOST_WIDE_INT length
= java_array_type_length (array_type
);
2212 if (target
== IGNORE_TARGET
)
2214 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
))
2215 generate_bytecode_insns (TREE_VALUE (values
), target
, state
);
2218 push_int_const (length
, state
);
2221 if (JPRIMITIVE_TYPE_P (element_type
))
2223 int atype
= encode_newarray_type (element_type
);
2224 OP1 (OPCODE_newarray
);
2229 int index
= find_class_constant (&state
->cpool
,
2230 TREE_TYPE (element_type
));
2231 OP1 (OPCODE_anewarray
);
2235 jopcode
= OPCODE_iastore
+ adjust_typed_op (element_type
, 7);
2236 for ( ; values
!= NULL_TREE
; values
= TREE_CHAIN (values
), offset
++)
2238 int save_SP
= state
->code_SP
;
2239 emit_dup (1, 0, state
);
2240 push_int_const (offset
, state
);
2242 generate_bytecode_insns (TREE_VALUE (values
), STACK_TARGET
, state
);
2245 state
->code_SP
= save_SP
;
2249 case NEW_CLASS_EXPR
:
2251 tree
class = TREE_TYPE (TREE_TYPE (exp
));
2252 int need_result
= target
!= IGNORE_TARGET
;
2253 int index
= find_class_constant (&state
->cpool
, class);
2259 NOTE_PUSH (1 + need_result
);
2261 /* ... fall though ... */
2264 tree f
= TREE_OPERAND (exp
, 0);
2265 tree x
= TREE_OPERAND (exp
, 1);
2266 int save_SP
= state
->code_SP
;
2268 if (TREE_CODE (f
) == ADDR_EXPR
)
2269 f
= TREE_OPERAND (f
, 0);
2270 if (f
== soft_newarray_node
)
2272 int type_code
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2273 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x
)),
2274 STACK_TARGET
, state
);
2276 OP1 (OPCODE_newarray
);
2280 else if (f
== soft_multianewarray_node
)
2284 int index
= find_class_constant (&state
->cpool
,
2285 TREE_TYPE (TREE_TYPE (exp
)));
2286 x
= TREE_CHAIN (x
); /* Skip class argument. */
2287 ndims
= TREE_INT_CST_LOW (TREE_VALUE (x
));
2288 for (idim
= ndims
; --idim
>= 0; )
2291 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2294 OP1 (OPCODE_multianewarray
);
2299 else if (f
== soft_anewarray_node
)
2301 tree cl
= TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp
)));
2302 int index
= find_class_constant (&state
->cpool
, TREE_TYPE (cl
));
2303 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2305 OP1 (OPCODE_anewarray
);
2309 else if (f
== soft_monitorenter_node
2310 || f
== soft_monitorexit_node
2313 if (f
== soft_monitorenter_node
)
2314 op
= OPCODE_monitorenter
;
2315 else if (f
== soft_monitorexit_node
)
2316 op
= OPCODE_monitorexit
;
2319 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2325 else if (exp
== soft_exceptioninfo_call_node
)
2327 NOTE_PUSH (1); /* Pushed by exception system. */
2330 for ( ; x
!= NULL_TREE
; x
= TREE_CHAIN (x
))
2332 generate_bytecode_insns (TREE_VALUE (x
), STACK_TARGET
, state
);
2334 nargs
= state
->code_SP
- save_SP
;
2335 state
->code_SP
= save_SP
;
2336 if (TREE_CODE (exp
) == NEW_CLASS_EXPR
)
2337 NOTE_POP (1); /* Pop implicit this. */
2338 if (TREE_CODE (f
) == FUNCTION_DECL
&& DECL_CONTEXT (f
) != NULL_TREE
)
2340 int index
= find_methodref_index (&state
->cpool
, f
);
2343 if (METHOD_STATIC (f
))
2344 OP1 (OPCODE_invokestatic
);
2345 else if (DECL_CONSTRUCTOR_P (f
) || CALL_USING_SUPER (exp
)
2346 || METHOD_PRIVATE (f
))
2347 OP1 (OPCODE_invokespecial
);
2348 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f
))))
2350 OP1 (OPCODE_invokeinterface
);
2354 OP1 (OPCODE_invokevirtual
);
2356 f
= TREE_TYPE (TREE_TYPE (f
));
2357 if (TREE_CODE (f
) != VOID_TYPE
)
2359 int size
= TYPE_IS_WIDE (f
) ? 2 : 1;
2360 if (target
== IGNORE_TARGET
)
2361 emit_pop (size
, state
);
2376 error("internal error - tree code not implemented: %s",
2377 tree_code_name
[(int) TREE_CODE (exp
)]);
2382 perform_relocations (state
)
2383 struct jcf_partial
*state
;
2385 struct jcf_block
*block
;
2386 struct jcf_relocation
*reloc
;
2390 /* Before we start, the pc field of each block is an upper bound on
2391 the block's start pc (it may be less, if previous blocks need less
2392 than their maximum).
2394 The minimum size of each block is in the block's chunk->size. */
2396 /* First, figure out the actual locations of each block. */
2399 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2401 int block_size
= block
->v
.chunk
->size
;
2405 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2406 Assumes relocations are in reverse order. */
2407 reloc
= block
->u
.relocations
;
2408 while (reloc
!= NULL
2409 && reloc
->kind
== OPCODE_goto_w
2410 && reloc
->label
->pc
== block
->next
->pc
2411 && reloc
->offset
+ 2 == block_size
)
2413 reloc
= reloc
->next
;
2414 block
->u
.relocations
= reloc
;
2415 block
->v
.chunk
->size
-= 3;
2420 for (reloc
= block
->u
.relocations
; reloc
!= NULL
; reloc
= reloc
->next
)
2422 if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2424 /* We assume this is the first relocation in this block,
2425 so we know its final pc. */
2426 int where
= pc
+ reloc
->offset
;
2427 int pad
= ((where
+ 3) & ~3) - where
;
2430 else if (reloc
->kind
< -1 || reloc
->kind
> BLOCK_START_RELOC
)
2432 int delta
= reloc
->label
->pc
- (pc
+ reloc
->offset
- 1);
2433 int expand
= reloc
->kind
> 0 ? 2 : 5;
2437 if (delta
>= -32768 && delta
<= 32767)
2443 block_size
+= expand
;
2449 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2451 struct chunk
*chunk
= block
->v
.chunk
;
2452 int old_size
= chunk
->size
;
2453 int next_pc
= block
->next
== NULL
? pc
: block
->next
->pc
;
2454 int new_size
= next_pc
- block
->pc
;
2455 unsigned char *new_ptr
;
2456 unsigned char *old_buffer
= chunk
->data
;
2457 unsigned char *old_ptr
= old_buffer
+ old_size
;
2458 if (new_size
!= old_size
)
2460 chunk
->data
= (unsigned char *)
2461 obstack_alloc (state
->chunk_obstack
, new_size
);
2462 chunk
->size
= new_size
;
2464 new_ptr
= chunk
->data
+ new_size
;
2466 /* We do the relocations from back to front, because
2467 the relocations are in reverse order. */
2468 for (reloc
= block
->u
.relocations
; ; reloc
= reloc
->next
)
2470 /* new_ptr and old_ptr point into the old and new buffers,
2471 respectively. (If no relocations cause the buffer to
2472 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2473 The bytes at higher adress have been copied and relocations
2474 handled; those at lower addresses remain to process. */
2476 /* Lower old index of piece to be copied with no relocation.
2477 I.e. high index of the first piece that does need relocation. */
2478 int start
= reloc
== NULL
? 0
2479 : reloc
->kind
== SWITCH_ALIGN_RELOC
? reloc
->offset
2480 : (reloc
->kind
== 0 || reloc
->kind
== BLOCK_START_RELOC
)
2482 : reloc
->offset
+ 2;
2485 int n
= (old_ptr
- old_buffer
) - start
;
2489 memcpy (new_ptr
, old_ptr
, n
);
2490 if (old_ptr
== old_buffer
)
2493 new_offset
= new_ptr
- chunk
->data
;
2494 new_offset
-= (reloc
->kind
== -1 ? 2 : 4);
2495 if (reloc
->kind
== 0)
2498 value
= GET_u4 (old_ptr
);
2500 else if (reloc
->kind
== BLOCK_START_RELOC
)
2506 else if (reloc
->kind
== SWITCH_ALIGN_RELOC
)
2508 int where
= block
->pc
+ reloc
->offset
;
2509 int pad
= ((where
+ 3) & ~3) - where
;
2517 value
= GET_u2 (old_ptr
);
2519 value
+= reloc
->label
->pc
- (block
->pc
+ new_offset
);
2520 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2521 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2522 if (reloc
->kind
!= -1)
2524 *--new_ptr
= (unsigned char) value
; value
>>= 8;
2525 *--new_ptr
= (unsigned char) value
;
2527 if (reloc
->kind
> BLOCK_START_RELOC
)
2529 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2531 *--new_ptr
= reloc
->kind
;
2533 else if (reloc
->kind
< -1)
2535 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2537 *--new_ptr
= OPCODE_goto_w
;
2540 *--new_ptr
= - reloc
->kind
;
2543 if (new_ptr
!= chunk
->data
)
2544 fatal ("internal error - perform_relocations");
2546 state
->code_length
= pc
;
2550 init_jcf_state (state
, work
)
2551 struct jcf_partial
*state
;
2552 struct obstack
*work
;
2554 state
->chunk_obstack
= work
;
2555 state
->first
= state
->chunk
= NULL
;
2556 CPOOL_INIT (&state
->cpool
);
2557 BUFFER_INIT (&state
->localvars
);
2558 BUFFER_INIT (&state
->bytecode
);
2562 init_jcf_method (state
, method
)
2563 struct jcf_partial
*state
;
2566 state
->current_method
= method
;
2567 state
->blocks
= state
->last_block
= NULL
;
2568 state
->linenumber_count
= 0;
2569 state
->first_lvar
= state
->last_lvar
= NULL
;
2570 state
->lvar_count
= 0;
2571 state
->labeled_blocks
= NULL
;
2572 state
->code_length
= 0;
2573 BUFFER_RESET (&state
->bytecode
);
2574 BUFFER_RESET (&state
->localvars
);
2576 state
->code_SP_max
= 0;
2577 state
->handlers
= NULL
;
2578 state
->last_handler
= NULL
;
2579 state
->num_handlers
= 0;
2580 state
->num_finalizers
= 0;
2581 state
->return_value_decl
= NULL_TREE
;
2585 release_jcf_state (state
)
2586 struct jcf_partial
*state
;
2588 CPOOL_FINISH (&state
->cpool
);
2589 obstack_free (state
->chunk_obstack
, state
->first
);
2592 /* Generate and return a list of chunks containing the class CLAS
2593 in the .class file representation. The list can be written to a
2594 .class file using write_chunks. Allocate chunks from obstack WORK. */
2597 generate_classfile (clas
, state
)
2599 struct jcf_partial
*state
;
2601 struct chunk
*cpool_chunk
;
2605 char *fields_count_ptr
;
2606 int fields_count
= 0;
2607 char *methods_count_ptr
;
2608 int methods_count
= 0;
2609 static tree SourceFile_node
= NULL_TREE
;
2612 = clas
== object_type_node
? 0
2613 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas
));
2615 ptr
= append_chunk (NULL
, 8, state
);
2616 PUT4 (0xCafeBabe); /* Magic number */
2617 PUT2 (3); /* Minor version */
2618 PUT2 (45); /* Major version */
2620 append_chunk (NULL
, 0, state
);
2621 cpool_chunk
= state
->chunk
;
2623 /* Next allocate the chunk containing acces_flags through fields_counr. */
2624 if (clas
== object_type_node
)
2627 i
= 8 + 2 * total_supers
;
2628 ptr
= append_chunk (NULL
, i
, state
);
2629 i
= get_access_flags (TYPE_NAME (clas
)) | ACC_SUPER
;
2630 PUT2 (i
); /* acces_flags */
2631 i
= find_class_constant (&state
->cpool
, clas
); PUT2 (i
); /* this_class */
2632 if (clas
== object_type_node
)
2634 PUT2(0); /* super_class */
2635 PUT2(0); /* interfaces_count */
2639 tree basetypes
= TYPE_BINFO_BASETYPES (clas
);
2640 tree base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, 0));
2641 int j
= find_class_constant (&state
->cpool
, base
);
2642 PUT2 (j
); /* super_class */
2643 PUT2 (total_supers
- 1); /* interfaces_count */
2644 for (i
= 1; i
< total_supers
; i
++)
2646 base
= BINFO_TYPE (TREE_VEC_ELT (basetypes
, i
));
2647 j
= find_class_constant (&state
->cpool
, base
);
2651 fields_count_ptr
= ptr
;
2653 for (part
= TYPE_FIELDS (clas
); part
; part
= TREE_CHAIN (part
))
2656 if (DECL_NAME (part
) == NULL_TREE
|| DECL_ARTIFICIAL (part
))
2658 ptr
= append_chunk (NULL
, 8, state
);
2659 i
= get_access_flags (part
); PUT2 (i
);
2660 i
= find_utf8_constant (&state
->cpool
, DECL_NAME (part
)); PUT2 (i
);
2661 i
= find_utf8_constant (&state
->cpool
, build_java_signature (TREE_TYPE (part
)));
2663 have_value
= DECL_INITIAL (part
) != NULL_TREE
&& FIELD_STATIC (part
);
2664 PUT2 (have_value
); /* attributes_count */
2667 tree init
= DECL_INITIAL (part
);
2668 static tree ConstantValue_node
= NULL_TREE
;
2669 ptr
= append_chunk (NULL
, 8, state
);
2670 if (ConstantValue_node
== NULL_TREE
)
2671 ConstantValue_node
= get_identifier ("ConstantValue");
2672 i
= find_utf8_constant (&state
->cpool
, ConstantValue_node
);
2673 PUT2 (i
); /* attribute_name_index */
2674 PUT4 (2); /* attribute_length */
2675 i
= find_constant_index (init
, state
); PUT2 (i
);
2679 ptr
= fields_count_ptr
; PUT2 (fields_count
);
2681 ptr
= methods_count_ptr
= append_chunk (NULL
, 2, state
);
2684 for (part
= TYPE_METHODS (clas
); part
; part
= TREE_CHAIN (part
))
2686 struct jcf_block
*block
;
2687 tree function_body
= DECL_FUNCTION_BODY (part
);
2688 tree body
= function_body
== NULL_TREE
? NULL_TREE
2689 : BLOCK_EXPR_BODY (function_body
);
2690 tree name
= DECL_CONSTRUCTOR_P (part
) ? init_identifier_node
2692 tree type
= TREE_TYPE (part
);
2693 tree save_function
= current_function_decl
;
2694 current_function_decl
= part
;
2695 ptr
= append_chunk (NULL
, 8, state
);
2696 i
= get_access_flags (part
); PUT2 (i
);
2697 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
2698 i
= find_utf8_constant (&state
->cpool
, build_java_signature (type
));
2700 PUT2 (body
!= NULL_TREE
? 1 : 0); /* attributes_count */
2701 if (body
!= NULL_TREE
)
2703 int code_attributes_count
= 0;
2704 static tree Code_node
= NULL_TREE
;
2707 struct jcf_handler
*handler
;
2708 if (Code_node
== NULL_TREE
)
2709 Code_node
= get_identifier ("Code");
2710 ptr
= append_chunk (NULL
, 14, state
);
2711 i
= find_utf8_constant (&state
->cpool
, Code_node
); PUT2 (i
);
2713 init_jcf_method (state
, part
);
2714 get_jcf_label_here (state
); /* Force a first block. */
2715 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2716 localvar_alloc (t
, state
);
2717 generate_bytecode_insns (body
, IGNORE_TARGET
, state
);
2718 if (CAN_COMPLETE_NORMALLY (body
))
2720 if (TREE_CODE (TREE_TYPE (type
)) != VOID_TYPE
)
2723 OP1 (OPCODE_return
);
2725 for (t
= DECL_ARGUMENTS (part
); t
!= NULL_TREE
; t
= TREE_CHAIN (t
))
2726 localvar_free (t
, state
);
2727 if (state
->return_value_decl
!= NULL_TREE
)
2728 localvar_free (state
->return_value_decl
, state
);
2729 finish_jcf_block (state
);
2730 perform_relocations (state
);
2733 i
= 8 + state
->code_length
+ 4 + 8 * state
->num_handlers
;
2734 if (state
->linenumber_count
> 0)
2736 code_attributes_count
++;
2737 i
+= 8 + 4 * state
->linenumber_count
;
2739 if (state
->lvar_count
> 0)
2741 code_attributes_count
++;
2742 i
+= 8 + 10 * state
->lvar_count
;
2744 PUT4 (i
); /* attribute_length */
2745 PUT2 (state
->code_SP_max
); /* max_stack */
2746 PUT2 (localvar_max
); /* max_locals */
2747 PUT4 (state
->code_length
);
2749 /* Emit the exception table. */
2750 ptr
= append_chunk (NULL
, 2 + 8 * state
->num_handlers
, state
);
2751 PUT2 (state
->num_handlers
); /* exception_table_length */
2752 handler
= state
->handlers
;
2753 for (; handler
!= NULL
; handler
= handler
->next
)
2756 PUT2 (handler
->start_label
->pc
);
2757 PUT2 (handler
->end_label
->pc
);
2758 PUT2 (handler
->handler_label
->pc
);
2759 if (handler
->type
== NULL_TREE
)
2762 type_index
= find_class_constant (&state
->cpool
,
2767 ptr
= append_chunk (NULL
, 2, state
);
2768 PUT2 (code_attributes_count
);
2770 /* Write the LineNumberTable attribute. */
2771 if (state
->linenumber_count
> 0)
2773 static tree LineNumberTable_node
= NULL_TREE
;
2774 ptr
= append_chunk (NULL
, 8 + 4 * state
->linenumber_count
, state
);
2775 if (LineNumberTable_node
== NULL_TREE
)
2776 LineNumberTable_node
= get_identifier ("LineNumberTable");
2777 i
= find_utf8_constant (&state
->cpool
, LineNumberTable_node
);
2778 PUT2 (i
); /* attribute_name_index */
2779 i
= 2+4*state
->linenumber_count
; PUT4(i
); /* attribute_length */
2780 i
= state
->linenumber_count
; PUT2 (i
);
2781 for (block
= state
->blocks
; block
!= NULL
; block
= block
->next
)
2783 int line
= block
->linenumber
;
2792 /* Write the LocalVariableTable attribute. */
2793 if (state
->lvar_count
> 0)
2795 static tree LocalVariableTable_node
= NULL_TREE
;
2796 struct localvar_info
*lvar
= state
->first_lvar
;
2797 ptr
= append_chunk (NULL
, 8 + 10 * state
->lvar_count
, state
);
2798 if (LocalVariableTable_node
== NULL_TREE
)
2799 LocalVariableTable_node
= get_identifier("LocalVariableTable");
2800 i
= find_utf8_constant (&state
->cpool
, LocalVariableTable_node
);
2801 PUT2 (i
); /* attribute_name_index */
2802 i
= 2 + 10 * state
->lvar_count
; PUT4 (i
); /* attribute_length */
2803 i
= state
->lvar_count
; PUT2 (i
);
2804 for ( ; lvar
!= NULL
; lvar
= lvar
->next
)
2806 tree name
= DECL_NAME (lvar
->decl
);
2807 tree sig
= build_java_signature (TREE_TYPE (lvar
->decl
));
2808 i
= lvar
->start_label
->pc
; PUT2 (i
);
2809 i
= lvar
->end_label
->pc
- i
; PUT2 (i
);
2810 i
= find_utf8_constant (&state
->cpool
, name
); PUT2 (i
);
2811 i
= find_utf8_constant (&state
->cpool
, sig
); PUT2 (i
);
2812 i
= DECL_LOCAL_INDEX (lvar
->decl
); PUT2 (i
);
2817 current_function_decl
= save_function
;
2819 ptr
= methods_count_ptr
; PUT2 (methods_count
);
2821 source_file
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
2822 for (ptr
= source_file
; ; ptr
++)
2827 if (ch
== '/' || ch
== '\\')
2828 source_file
= ptr
+1;
2830 ptr
= append_chunk (NULL
, 10, state
);
2831 PUT2 (1); /* attributes_count */
2833 /* generate the SourceFile attribute. */
2834 if (SourceFile_node
== NULL_TREE
)
2835 SourceFile_node
= get_identifier ("SourceFile");
2836 i
= find_utf8_constant (&state
->cpool
, SourceFile_node
);
2837 PUT2 (i
); /* attribute_name_index */
2839 i
= find_utf8_constant (&state
->cpool
, get_identifier (source_file
));
2842 /* New finally generate the contents of the constant pool chunk. */
2843 i
= count_constant_pool_bytes (&state
->cpool
);
2844 ptr
= obstack_alloc (state
->chunk_obstack
, i
);
2845 cpool_chunk
->data
= ptr
;
2846 cpool_chunk
->size
= i
;
2847 write_constant_pool (&state
->cpool
, ptr
, i
);
2848 return state
->first
;
2852 make_class_file_name (clas
)
2855 char *cname
, *dname
, *slash
, *r
;
2858 cname
= IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas
)),
2859 "", '.', DIR_SEPARATOR
,
2861 if (jcf_write_base_directory
== NULL
)
2863 /* Make sure we put the class file into the .java file's
2864 directory, and not into some subdirectory thereof. */
2866 dname
= DECL_SOURCE_FILE (TYPE_NAME (clas
));
2867 slash
= strrchr (dname
, DIR_SEPARATOR
);
2873 t
= strrchr (cname
, DIR_SEPARATOR
);
2879 dname
= jcf_write_base_directory
;
2880 slash
= dname
+ strlen (dname
);
2883 r
= xmalloc (slash
- dname
+ strlen (cname
) + 2);
2884 strncpy (r
, dname
, slash
- dname
);
2885 r
[slash
- dname
] = DIR_SEPARATOR
;
2886 strcpy (&r
[slash
- dname
+ 1], cname
);
2888 /* We try to make new directories when we need them. We only do
2889 this for directories which "might not" exist. For instance, we
2890 assume the `-d' directory exists, but we don't assume that any
2891 subdirectory below it exists. It might be worthwhile to keep
2892 track of which directories we've created to avoid gratuitous
2894 dname
= r
+ (slash
- dname
) + 1;
2897 cname
= strchr (dname
, DIR_SEPARATOR
);
2901 if (stat (r
, &sb
) == -1)
2903 /* Try to make it. */
2904 if (mkdir (r
, 0755) == -1)
2906 fatal ("failed to create directory `%s'", r
);
2911 *cname
= DIR_SEPARATOR
;
2912 /* Skip consecutive separators. */
2913 for (dname
= cname
+ 1; *dname
&& *dname
== DIR_SEPARATOR
; ++dname
)
2920 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
2921 The output .class file name is make_class_file_name(CLAS). */
2924 write_classfile (clas
)
2927 struct obstack
*work
= &temporary_obstack
;
2928 struct jcf_partial state
[1];
2929 char *class_file_name
= make_class_file_name (clas
);
2930 struct chunk
*chunks
;
2932 if (class_file_name
!= NULL
)
2934 FILE* stream
= fopen (class_file_name
, "wb");
2936 fatal ("failed to open `%s' for writing", class_file_name
);
2937 jcf_dependency_add_target (class_file_name
);
2938 init_jcf_state (state
, work
);
2939 chunks
= generate_classfile (clas
, state
);
2940 write_chunks (stream
, chunks
);
2941 if (fclose (stream
))
2942 fatal ("failed to close after writing `%s'", class_file_name
);
2943 free (class_file_name
);
2945 release_jcf_state (state
);
2949 string concatenation
2950 synchronized statement