]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
expr.c (process_jvm_instruction): Do load_type_state after JSR.
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include <string.h>
27 #include "tree.h"
28 #include "java-tree.h"
29 #include "jcf.h"
30 #include "obstack.h"
31 #undef AND
32 #include "rtl.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
35 #include "buffer.h"
36
37 #include <sys/stat.h>
38
39 #ifndef DIR_SEPARATOR
40 #define DIR_SEPARATOR '/'
41 #endif
42
43 extern struct obstack temporary_obstack;
44
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 char *jcf_write_base_directory = NULL;
49
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
51
52 #define RESERVE(N) \
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
56
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
59
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
61
62 /* Like OP1, but I is a 2-byte big endian integer. */
63
64 #define OP2(I) \
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
66
67 /* Like OP1, but I is a 4-byte big endian integer. */
68
69 #define OP4(I) \
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
72
73 /* Macro to call each time we push I words on the JVM stack. */
74
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
79
80 /* Macro to call each time we pop I words from the JVM stack. */
81
82 #define NOTE_POP(I) \
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
84
85 /* A chunk or segment of a .class file. */
86
87 struct chunk
88 {
89 /* The next segment of this .class file. */
90 struct chunk *next;
91
92 /* The actual data in this segment to be written to the .class file. */
93 unsigned char *data;
94
95 /* The size of the segment to be written to the .class file. */
96 int size;
97 };
98
99 #define PENDING_CLEANUP_PC (-3)
100 #define PENDING_EXIT_PC (-2)
101 #define UNDEFINED_PC (-1)
102
103 /* Each "block" represents a label plus the bytecode instructions following.
104 There may be branches out of the block, but no incoming jumps, except
105 to the beginning of the block.
106
107 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
108 assocated code yet), but it is an undefined label.
109 */
110
111 struct jcf_block
112 {
113 /* For blocks that that are defined, the next block (in pc order).
114 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
115 or a cleanup expression (from a WITH_CLEANUP_EXPR),
116 this is the next (outer) such end label, in a stack headed by
117 labeled_blocks in jcf_partial. */
118 struct jcf_block *next;
119
120 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
121 pc is PENDING_EXIT_PC.
122 In the not-yet-defined end label for pending cleanup subroutine,
123 pc is PENDING_CLEANUP_PC.
124 For other not-yet-defined labels, pc is UNDEFINED_PC.
125
126 If the label has been defined:
127 Until perform_relocations is finished, this is the maximum possible
128 value of the bytecode offset at the begnning of this block.
129 After perform_relocations, it is the actual offset (pc). */
130 int pc;
131
132 int linenumber;
133
134 /* After finish_jcf_block is called, The actual instructions contained in this block.
135 Before than NULL, and the instructions are in state->bytecode. */
136 union {
137 struct chunk *chunk;
138
139 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
140 coveed by the cleanup. */
141 struct jcf_block *start_label;
142 } v;
143
144 union {
145 /* Set of relocations (in reverse offset order) for this block. */
146 struct jcf_relocation *relocations;
147
148 /* If this block is that of the not-yet-defined end label of
149 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
150 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
151 tree labeled_block;
152 } u;
153 };
154
155 /* A "relocation" type for the 0-3 bytes of padding at the start
156 of a tableswitch or a lookupswitch. */
157 #define SWITCH_ALIGN_RELOC 4
158
159 /* A relocation type for the labels in a tableswitch or a lookupswitch;
160 these are relative to the start of the instruction, but (due to
161 th 0-3 bytes of padding), we don't know the offset before relocation. */
162 #define BLOCK_START_RELOC 1
163
164 struct jcf_relocation
165 {
166 /* Next relocation for the current jcf_block. */
167 struct jcf_relocation *next;
168
169 /* The (byte) offset within the current block that needs to be relocated. */
170 HOST_WIDE_INT offset;
171
172 /* 0 if offset is a 4-byte relative offset.
173 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
174 for proper alignment in tableswitch/lookupswitch instructions.
175 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
176 to the start of the containing block.
177 -1 if offset is a 2-byte relative offset.
178 < -1 if offset is the address of an instruction with a 2-byte offset
179 that does not have a corresponding 4-byte offset version, in which
180 case the absolute value of kind is the inverted opcode.
181 > 4 if offset is the address of an instruction (such as jsr) with a
182 2-byte offset that does have a corresponding 4-byte offset version,
183 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
184 int kind;
185
186 /* The label the relocation wants to actually transfer to. */
187 struct jcf_block *label;
188 };
189
190 /* State for single catch clause. */
191
192 struct jcf_handler
193 {
194 struct jcf_handler *next;
195
196 struct jcf_block *start_label;
197 struct jcf_block *end_label;
198 struct jcf_block *handler_label;
199
200 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
201 tree type;
202 };
203
204 /* State for the current switch statement. */
205
206 struct jcf_switch_state
207 {
208 struct jcf_switch_state *prev;
209 struct jcf_block *default_label;
210
211 struct jcf_relocation *cases;
212 int num_cases;
213 HOST_WIDE_INT min_case, max_case;
214 };
215
216 /* This structure is used to contain the various pieces that will
217 become a .class file. */
218
219 struct jcf_partial
220 {
221 struct chunk *first;
222 struct chunk *chunk;
223 struct obstack *chunk_obstack;
224 tree current_method;
225
226 /* List of basic blocks for the current method. */
227 struct jcf_block *blocks;
228 struct jcf_block *last_block;
229
230 struct localvar_info *first_lvar;
231 struct localvar_info *last_lvar;
232 int lvar_count;
233
234 CPool cpool;
235
236 int linenumber_count;
237
238 /* Until perform_relocations, this is a upper bound on the number
239 of bytes (so far) in the instructions for the current method. */
240 int code_length;
241
242 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
243 struct jcf_block *labeled_blocks;
244
245 /* The current stack size (stack pointer) in the current method. */
246 int code_SP;
247
248 /* The largest extent of stack size (stack pointer) in the current method. */
249 int code_SP_max;
250
251 /* Contains a mapping from local var slot number to localvar_info. */
252 struct buffer localvars;
253
254 /* The buffer allocated for bytecode for the current jcf_block. */
255 struct buffer bytecode;
256
257 /* Chain of exception handlers for the current method. */
258 struct jcf_handler *handlers;
259
260 /* Last element in handlers chain. */
261 struct jcf_handler *last_handler;
262
263 /* Number of exception handlers for the current method. */
264 int num_handlers;
265
266 /* Number of finalizers we are currently nested within. */
267 int num_finalizers;
268
269 /* If non-NULL, use this for the return value. */
270 tree return_value_decl;
271
272 /* Information about the current switch statemenet. */
273 struct jcf_switch_state *sw_state;
274 };
275
276 static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *));
277
278 /* Utility macros for appending (big-endian) data to a buffer.
279 We assume a local variable 'ptr' points into where we want to
280 write next, and we assume enoygh space has been allocated. */
281
282 #ifdef ENABLE_CHECKING
283 int
284 CHECK_PUT(ptr, state, i)
285 void *ptr;
286 struct jcf_partial *state;
287 int i;
288 {
289 if (ptr < state->chunk->data
290 || (char*)ptr + i > state->chunk->data + state->chunk->size)
291 fatal ("internal error - CHECK_PUT failed");
292 return 0;
293 }
294 #else
295 #define CHECK_PUT(PTR, STATE, I) ((void)0)
296 #endif
297
298 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
299 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
300 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
301 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
302
303 \f
304 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
305 Set the data and size fields to DATA and SIZE, respectively.
306 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
307
308 struct chunk *
309 alloc_chunk (last, data, size, work)
310 struct chunk *last;
311 unsigned char *data;
312 int size;
313 struct obstack *work;
314 {
315 struct chunk *chunk = (struct chunk *)
316 obstack_alloc (work, sizeof(struct chunk));
317
318 if (data == NULL && size > 0)
319 data = obstack_alloc (work, size);
320
321 chunk->next = NULL;
322 chunk->data = data;
323 chunk->size = size;
324 if (last != NULL)
325 last->next = chunk;
326 return chunk;
327 }
328
329 #ifdef ENABLE_CHECKING
330 int
331 CHECK_OP(struct jcf_partial *state)
332 {
333 if (state->bytecode.ptr > state->bytecode.limit)
334 {
335 fatal("internal error - CHECK_OP failed");
336 }
337 return 0;
338 }
339 #else
340 #define CHECK_OP(STATE) ((void)0)
341 #endif
342
343 unsigned char *
344 append_chunk (data, size, state)
345 unsigned char *data;
346 int size;
347 struct jcf_partial *state;
348 {
349 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
350 if (state->first == NULL)
351 state->first = state->chunk;
352 return state->chunk->data;
353 }
354
355 void
356 append_chunk_copy (data, size, state)
357 unsigned char *data;
358 int size;
359 struct jcf_partial *state;
360 {
361 unsigned char *ptr = append_chunk (NULL, size, state);
362 memcpy (ptr, data, size);
363 }
364 \f
365 struct jcf_block *
366 gen_jcf_label (state)
367 struct jcf_partial *state;
368 {
369 struct jcf_block *block = (struct jcf_block *)
370 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
371 block->next = NULL;
372 block->linenumber = -1;
373 block->pc = UNDEFINED_PC;
374 return block;
375 }
376
377 void
378 finish_jcf_block (state)
379 struct jcf_partial *state;
380 {
381 struct jcf_block *block = state->last_block;
382 struct jcf_relocation *reloc;
383 int code_length = BUFFER_LENGTH (&state->bytecode);
384 int pc = state->code_length;
385 append_chunk_copy (state->bytecode.data, code_length, state);
386 BUFFER_RESET (&state->bytecode);
387 block->v.chunk = state->chunk;
388
389 /* Calculate code_length to the maximum value it can have. */
390 pc += block->v.chunk->size;
391 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
392 {
393 int kind = reloc->kind;
394 if (kind == SWITCH_ALIGN_RELOC)
395 pc += 3;
396 else if (kind > BLOCK_START_RELOC)
397 pc += 2; /* 2-byte offset may grow to 4-byte offset */
398 else if (kind < -1)
399 pc += 5; /* May need to add a goto_w. */
400 }
401 state->code_length = pc;
402 }
403
404 void
405 define_jcf_label (label, state)
406 struct jcf_block *label;
407 struct jcf_partial *state;
408 {
409 if (state->last_block != NULL)
410 finish_jcf_block (state);
411 label->pc = state->code_length;
412 if (state->blocks == NULL)
413 state->blocks = label;
414 else
415 state->last_block->next = label;
416 state->last_block = label;
417 label->next = NULL;
418 label->u.relocations = NULL;
419 }
420
421 struct jcf_block *
422 get_jcf_label_here (state)
423 struct jcf_partial *state;
424 {
425 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
426 return state->last_block;
427 else
428 {
429 struct jcf_block *label = gen_jcf_label (state);
430 define_jcf_label (label, state);
431 return label;
432 }
433 }
434
435 /* Note a line number entry for the current PC and given LINE. */
436
437 void
438 put_linenumber (line, state)
439 int line;
440 struct jcf_partial *state;
441 {
442 struct jcf_block *label = get_jcf_label_here (state);
443 if (label->linenumber > 0)
444 {
445 label = gen_jcf_label (state);
446 define_jcf_label (label, state);
447 }
448 label->linenumber = line;
449 state->linenumber_count++;
450 }
451
452 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
453 in the range (START_LABEL, END_LABEL). */
454
455 static struct jcf_handler *
456 alloc_handler (start_label, end_label, state)
457 struct jcf_block *start_label;
458 struct jcf_block *end_label;
459 struct jcf_partial *state;
460 {
461 struct jcf_handler *handler = (struct jcf_handler *)
462 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
463 handler->start_label = start_label;
464 handler->end_label = end_label;
465 handler->handler_label = get_jcf_label_here (state);
466 if (state->handlers == NULL)
467 state->handlers = handler;
468 else
469 state->last_handler->next = handler;
470 state->last_handler = handler;
471 handler->next = NULL;
472 state->num_handlers++;
473 return handler;
474 }
475
476 \f
477 /* The index of jvm local variable allocated for this DECL.
478 This is assigned when generating .class files;
479 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
480 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
481
482 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
483
484 struct localvar_info
485 {
486 struct localvar_info *next;
487
488 tree decl;
489 struct jcf_block *start_label;
490 struct jcf_block *end_label;
491 };
492
493 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
494 #define localvar_max \
495 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
496
497 void
498 localvar_alloc (decl, state)
499 tree decl;
500 struct jcf_partial *state;
501 {
502 struct jcf_block *start_label = get_jcf_label_here (state);
503 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
504 int index;
505 register struct localvar_info *info;
506 register struct localvar_info **ptr = localvar_buffer;
507 register struct localvar_info **limit
508 = (struct localvar_info**) state->localvars.ptr;
509 for (index = 0; ptr < limit; index++, ptr++)
510 {
511 if (ptr[0] == NULL
512 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
513 break;
514 }
515 if (ptr == limit)
516 {
517 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
518 ptr = (struct localvar_info**) state->localvars.data + index;
519 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
520 }
521 info = (struct localvar_info *)
522 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
523 ptr[0] = info;
524 if (wide)
525 ptr[1] = (struct localvar_info *)(~0);
526 DECL_LOCAL_INDEX (decl) = index;
527 info->decl = decl;
528 info->start_label = start_label;
529
530 if (DECL_NAME (decl) != NULL_TREE)
531 {
532 /* Generate debugging info. */
533 info->next = NULL;
534 if (state->last_lvar != NULL)
535 state->last_lvar->next = info;
536 else
537 state->first_lvar = info;
538 state->last_lvar = info;
539 state->lvar_count++;
540 }
541 }
542
543 int
544 localvar_free (decl, state)
545 tree decl;
546 struct jcf_partial *state;
547 {
548 struct jcf_block *end_label = get_jcf_label_here (state);
549 int index = DECL_LOCAL_INDEX (decl);
550 register struct localvar_info **ptr = &localvar_buffer [index];
551 register struct localvar_info *info = *ptr;
552 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
553 int i;
554
555 info->end_label = end_label;
556
557 if (info->decl != decl)
558 abort ();
559 ptr[0] = NULL;
560 if (wide)
561 {
562 if (ptr[1] != (struct localvar_info *)(~0))
563 abort ();
564 ptr[1] = NULL;
565 }
566 }
567
568 \f
569 #define STACK_TARGET 1
570 #define IGNORE_TARGET 2
571
572 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
573 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
574
575 int
576 get_access_flags (decl)
577 tree decl;
578 {
579 int flags = 0;
580 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
581 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
582 flags |= ACC_PUBLIC;
583 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
584 flags |= ACC_FINAL;
585 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
586 {
587 if (TREE_PROTECTED (decl))
588 flags |= ACC_PROTECTED;
589 if (TREE_PRIVATE (decl))
590 flags |= ACC_PRIVATE;
591 }
592 else if (TREE_CODE (decl) == TYPE_DECL)
593 {
594 if (CLASS_SUPER (decl))
595 flags |= ACC_SUPER;
596 if (CLASS_ABSTRACT (decl))
597 flags |= ACC_ABSTRACT;
598 if (CLASS_INTERFACE (decl))
599 flags |= ACC_INTERFACE;
600 }
601 else
602 fatal ("internal error - bad argument to get_access_flags");
603 if (TREE_CODE (decl) == FUNCTION_DECL)
604 {
605 if (METHOD_NATIVE (decl))
606 flags |= ACC_NATIVE;
607 if (METHOD_STATIC (decl))
608 flags |= ACC_STATIC;
609 if (METHOD_SYNCHRONIZED (decl))
610 flags |= ACC_SYNCHRONIZED;
611 if (METHOD_ABSTRACT (decl))
612 flags |= ACC_ABSTRACT;
613 }
614 if (isfield)
615 {
616 if (FIELD_STATIC (decl))
617 flags |= ACC_STATIC;
618 if (FIELD_VOLATILE (decl))
619 flags |= ACC_VOLATILE;
620 if (FIELD_TRANSIENT (decl))
621 flags |= ACC_TRANSIENT;
622 }
623 return flags;
624 }
625
626 /* Write the list of segments starting at CHUNKS to STREAM. */
627
628 void
629 write_chunks (stream, chunks)
630 FILE* stream;
631 struct chunk *chunks;
632 {
633 for (; chunks != NULL; chunks = chunks->next)
634 fwrite (chunks->data, chunks->size, 1, stream);
635 }
636
637 /* Push a 1-word constant in the constant pool at the given INDEX.
638 (Caller is responsible for doing NOTE_PUSH.) */
639
640 static void
641 push_constant1 (index, state)
642 int index;
643 struct jcf_partial *state;
644 {
645 RESERVE (3);
646 if (index < 256)
647 {
648 OP1 (OPCODE_ldc);
649 OP1 (index);
650 }
651 else
652 {
653 OP1 (OPCODE_ldc_w);
654 OP2 (index);
655 }
656 }
657
658 /* Push a 2-word constant in the constant pool at the given INDEX.
659 (Caller is responsible for doing NOTE_PUSH.) */
660
661 static void
662 push_constant2 (index, state)
663 int index;
664 struct jcf_partial *state;
665 {
666 RESERVE (3);
667 OP1 (OPCODE_ldc2_w);
668 OP2 (index);
669 }
670
671 /* Push 32-bit integer constant on VM stack.
672 Caller is responsible for doing NOTE_PUSH. */
673
674 static void
675 push_int_const (i, state)
676 HOST_WIDE_INT i;
677 struct jcf_partial *state;
678 {
679 RESERVE(3);
680 if (i >= -1 && i <= 5)
681 OP1(OPCODE_iconst_0 + i);
682 else if (i >= -128 && i < 128)
683 {
684 OP1(OPCODE_bipush);
685 OP1(i);
686 }
687 else if (i >= -32768 && i < 32768)
688 {
689 OP1(OPCODE_sipush);
690 OP2(i);
691 }
692 else
693 {
694 i = find_constant1 (&state->cpool, CONSTANT_Integer, i & 0xFFFFFFFF);
695 push_constant1 (i, state);
696 }
697 }
698
699 static int
700 find_constant_wide (lo, hi, state)
701 HOST_WIDE_INT lo, hi;
702 struct jcf_partial *state;
703 {
704 HOST_WIDE_INT w1, w2;
705 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
706 return find_constant2 (&state->cpool, CONSTANT_Long,
707 w1 & 0xFFFFFFFF, lo & 0xFFFFFFFF);
708 }
709
710 /* Find or allocate a constant pool entry for the given VALUE.
711 Return the index in the constant pool. */
712
713 static int
714 find_constant_index (value, state)
715 tree value;
716 struct jcf_partial *state;
717 {
718 if (TREE_CODE (value) == INTEGER_CST)
719 {
720 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
721 return find_constant1 (&state->cpool, CONSTANT_Integer,
722 TREE_INT_CST_LOW (value) & 0xFFFFFFFF);
723 else
724 return find_constant_wide (TREE_INT_CST_LOW (value),
725 TREE_INT_CST_HIGH (value), state);
726 }
727 else if (TREE_CODE (value) == REAL_CST)
728 {
729 long words[2];
730 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
731 {
732 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
733 return find_constant1 (&state->cpool, CONSTANT_Float, words[0]);
734 }
735 else
736 {
737 etardouble (TREE_REAL_CST (value), words);
738 return find_constant2 (&state->cpool, CONSTANT_Double,
739 words[1-FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF,
740 words[FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF);
741 }
742 }
743 else if (TREE_CODE (value) == STRING_CST)
744 {
745 return find_string_constant (&state->cpool, value);
746 }
747 else
748 fatal ("find_constant_index - bad type");
749 }
750
751 /* Push 64-bit long constant on VM stack.
752 Caller is responsible for doing NOTE_PUSH. */
753
754 static void
755 push_long_const (lo, hi, state)
756 HOST_WIDE_INT lo, hi;
757 struct jcf_partial *state;
758 {
759 if (hi == 0 && lo >= 0 && lo <= 1)
760 {
761 RESERVE(1);
762 OP1(OPCODE_lconst_0 + lo);
763 }
764 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
765 {
766 push_int_const (lo, state);
767 RESERVE (1);
768 OP1 (OPCODE_i2l);
769 }
770 else
771 push_constant2 (find_constant_wide (lo, hi, state), state);
772 }
773
774 static void
775 field_op (field, opcode, state)
776 tree field;
777 int opcode;
778 struct jcf_partial *state;
779 {
780 int index = find_fieldref_index (&state->cpool, field);
781 RESERVE (3);
782 OP1 (opcode);
783 OP2 (index);
784 }
785
786 /* Returns an integer in the range 0 (for 'int') through 4 (for object
787 reference) to 7 (for 'short') which matches the pattern of how JVM
788 opcodes typically depend on the operand type. */
789
790 int
791 adjust_typed_op (type, max)
792 tree type;
793 int max;
794 {
795 switch (TREE_CODE (type))
796 {
797 case POINTER_TYPE:
798 case RECORD_TYPE: return 4;
799 case BOOLEAN_TYPE:
800 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
801 case CHAR_TYPE:
802 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
803 case INTEGER_TYPE:
804 switch (TYPE_PRECISION (type))
805 {
806 case 8: return max < 5 ? 0 : 5;
807 case 16: return max < 7 ? 0 : 7;
808 case 32: return 0;
809 case 64: return 1;
810 }
811 break;
812 case REAL_TYPE:
813 switch (TYPE_PRECISION (type))
814 {
815 case 32: return 2;
816 case 64: return 3;
817 }
818 break;
819 default:
820 break;
821 }
822 abort ();
823 }
824
825 static void
826 maybe_wide (opcode, index, state)
827 int opcode, index;
828 struct jcf_partial *state;
829 {
830 if (index >= 256)
831 {
832 RESERVE (4);
833 OP1 (OPCODE_wide);
834 OP1 (opcode);
835 OP2 (index);
836 }
837 else
838 {
839 RESERVE (2);
840 OP1 (opcode);
841 OP1 (index);
842 }
843 }
844
845 /* Compile code to duplicate with offset, where
846 SIZE is the size of the stack item to duplicate (1 or 2), abd
847 OFFSET is where to insert the result (must be 0, 1, or 2).
848 (The new words get inserted at stack[SP-size-offset].) */
849
850 static void
851 emit_dup (size, offset, state)
852 int size, offset;
853 struct jcf_partial *state;
854 {
855 int kind;
856 if (size == 0)
857 return;
858 RESERVE(1);
859 if (offset == 0)
860 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
861 else if (offset == 1)
862 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
863 else if (offset == 2)
864 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
865 else
866 abort();
867 OP1 (kind);
868 NOTE_PUSH (size);
869 }
870
871 static void
872 emit_pop (size, state)
873 int size;
874 struct jcf_partial *state;
875 {
876 RESERVE (1);
877 OP1 (OPCODE_pop - 1 + size);
878 }
879
880 static void
881 emit_iinc (var, value, state)
882 tree var;
883 int value;
884 struct jcf_partial *state;
885 {
886 int slot = DECL_LOCAL_INDEX (var);
887
888 if (value < -128 || value > 127 || slot >= 256)
889 {
890 RESERVE (6);
891 OP1 (OPCODE_wide);
892 OP1 (OPCODE_iinc);
893 OP2 (slot);
894 OP2 (value);
895 }
896 else
897 {
898 RESERVE (3);
899 OP1 (OPCODE_iinc);
900 OP1 (slot);
901 OP1 (value);
902 }
903 }
904
905 static void
906 emit_load_or_store (var, opcode, state)
907 tree var; /* Variable to load from or store into. */
908 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
909 struct jcf_partial *state;
910 {
911 tree type = TREE_TYPE (var);
912 int kind = adjust_typed_op (type, 4);
913 int index = DECL_LOCAL_INDEX (var);
914 if (index <= 3)
915 {
916 RESERVE (1);
917 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
918 }
919 else
920 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
921 }
922
923 static void
924 emit_load (var, state)
925 tree var;
926 struct jcf_partial *state;
927 {
928 emit_load_or_store (var, OPCODE_iload, state);
929 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
930 }
931
932 static void
933 emit_store (var, state)
934 tree var;
935 struct jcf_partial *state;
936 {
937 emit_load_or_store (var, OPCODE_istore, state);
938 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
939 }
940
941 static void
942 emit_unop (opcode, type, state)
943 enum java_opcode opcode;
944 tree type;
945 struct jcf_partial *state;
946 {
947 RESERVE(1);
948 OP1 (opcode);
949 }
950
951 static void
952 emit_binop (opcode, type, state)
953 enum java_opcode opcode;
954 tree type;
955 struct jcf_partial *state;
956 {
957 int size = TYPE_IS_WIDE (type) ? 2 : 1;
958 RESERVE(1);
959 OP1 (opcode);
960 NOTE_POP (size);
961 }
962
963 static void
964 emit_reloc (value, kind, target, state)
965 HOST_WIDE_INT value;
966 int kind;
967 struct jcf_block *target;
968 struct jcf_partial *state;
969 {
970 struct jcf_relocation *reloc = (struct jcf_relocation *)
971 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
972 struct jcf_block *block = state->last_block;
973 reloc->next = block->u.relocations;
974 block->u.relocations = reloc;
975 reloc->offset = BUFFER_LENGTH (&state->bytecode);
976 reloc->label = target;
977 reloc->kind = kind;
978 if (kind == 0 || kind == BLOCK_START_RELOC)
979 OP4 (value);
980 else if (kind != SWITCH_ALIGN_RELOC)
981 OP2 (value);
982 }
983
984 static void
985 emit_switch_reloc (label, state)
986 struct jcf_block *label;
987 struct jcf_partial *state;
988 {
989 emit_reloc (0, BLOCK_START_RELOC, label, state);
990 }
991
992 /* Similar to emit_switch_reloc,
993 but re-uses an existing case reloc. */
994
995 static void
996 emit_case_reloc (reloc, state)
997 struct jcf_relocation *reloc;
998 struct jcf_partial *state;
999 {
1000 struct jcf_block *block = state->last_block;
1001 reloc->next = block->u.relocations;
1002 block->u.relocations = reloc;
1003 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1004 reloc->kind = BLOCK_START_RELOC;
1005 OP4 (0);
1006 }
1007
1008 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1009 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1010
1011 static void
1012 emit_if (target, opcode, inv_opcode, state)
1013 struct jcf_block *target;
1014 int opcode, inv_opcode;
1015 struct jcf_partial *state;
1016 {
1017 OP1 (opcode);
1018 // value is 1 byte from reloc back to start of instruction.
1019 emit_reloc (1, - inv_opcode, target, state);
1020 }
1021
1022 static void
1023 emit_goto (target, state)
1024 struct jcf_block *target;
1025 struct jcf_partial *state;
1026 {
1027 OP1 (OPCODE_goto);
1028 // Value is 1 byte from reloc back to start of instruction.
1029 emit_reloc (1, OPCODE_goto_w, target, state);
1030 }
1031
1032 static void
1033 emit_jsr (target, state)
1034 struct jcf_block *target;
1035 struct jcf_partial *state;
1036 {
1037 OP1 (OPCODE_jsr);
1038 // Value is 1 byte from reloc back to start of instruction.
1039 emit_reloc (1, OPCODE_jsr_w, target, state);
1040 }
1041
1042 /* Generate code to evaluate EXP. If the result is true,
1043 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1044 TRUE_BRANCH_FIRST is a code geneation hint that the
1045 TRUE_LABEL may follow right after this. (The idea is that we
1046 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1047
1048 void
1049 generate_bytecode_conditional (exp, true_label, false_label,
1050 true_branch_first, state)
1051 tree exp;
1052 struct jcf_block *true_label;
1053 struct jcf_block *false_label;
1054 int true_branch_first;
1055 struct jcf_partial *state;
1056 {
1057 int kind;
1058 tree exp0, exp1, type;
1059 int save_SP = state->code_SP;
1060 enum java_opcode op, negop;
1061 switch (TREE_CODE (exp))
1062 {
1063 case INTEGER_CST:
1064 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1065 break;
1066 case COND_EXPR:
1067 {
1068 struct jcf_block *then_label = gen_jcf_label (state);
1069 struct jcf_block *else_label = gen_jcf_label (state);
1070 int save_SP_before, save_SP_after;
1071 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1072 then_label, else_label, 1, state);
1073 define_jcf_label (then_label, state);
1074 save_SP_before = state->code_SP;
1075 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1076 true_label, false_label, 1, state);
1077 save_SP_after = state->code_SP;
1078 state->code_SP = save_SP_before;
1079 define_jcf_label (else_label, state);
1080 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1081 true_label, false_label,
1082 true_branch_first, state);
1083 if (state->code_SP != save_SP_after)
1084 fatal ("internal error non-matching SP");
1085 }
1086 break;
1087 case TRUTH_NOT_EXPR:
1088 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1089 ! true_branch_first, state);
1090 break;
1091 case TRUTH_ANDIF_EXPR:
1092 {
1093 struct jcf_block *next_label = gen_jcf_label (state);
1094 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1095 next_label, false_label, 1, state);
1096 define_jcf_label (next_label, state);
1097 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1098 true_label, false_label, 1, state);
1099 }
1100 break;
1101 case TRUTH_ORIF_EXPR:
1102 {
1103 struct jcf_block *next_label = gen_jcf_label (state);
1104 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1105 true_label, next_label, 1, state);
1106 define_jcf_label (next_label, state);
1107 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1108 true_label, false_label, 1, state);
1109 }
1110 break;
1111 compare_1:
1112 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1113 set it to the corresponding 1-operand if<COND> instructions. */
1114 op = op - 6;
1115 /* FALLTHROUGH */
1116 compare_2:
1117 /* The opcodes with their inverses are allocated in pairs.
1118 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1119 negop = (op & 1) ? op + 1 : op - 1;
1120 compare_2_ptr:
1121 if (true_branch_first)
1122 {
1123 emit_if (false_label, negop, op, state);
1124 emit_goto (true_label, state);
1125 }
1126 else
1127 {
1128 emit_if (true_label, op, negop, state);
1129 emit_goto (false_label, state);
1130 }
1131 break;
1132 case EQ_EXPR:
1133 op = OPCODE_if_icmpeq;
1134 goto compare;
1135 case NE_EXPR:
1136 op = OPCODE_if_icmpne;
1137 goto compare;
1138 case GT_EXPR:
1139 op = OPCODE_if_icmpgt;
1140 goto compare;
1141 case LT_EXPR:
1142 op = OPCODE_if_icmplt;
1143 goto compare;
1144 case GE_EXPR:
1145 op = OPCODE_if_icmpge;
1146 goto compare;
1147 case LE_EXPR:
1148 op = OPCODE_if_icmple;
1149 goto compare;
1150 compare:
1151 exp0 = TREE_OPERAND (exp, 0);
1152 exp1 = TREE_OPERAND (exp, 1);
1153 type = TREE_TYPE (exp0);
1154 switch (TREE_CODE (type))
1155 {
1156 int opf;
1157 case POINTER_TYPE: case RECORD_TYPE:
1158 switch (TREE_CODE (exp))
1159 {
1160 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1161 case NE_EXPR: op = OPCODE_if_acmpne; break;
1162 default: abort();
1163 }
1164 if (integer_zerop (exp1) || integer_zerop (exp0))
1165 {
1166 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1167 STACK_TARGET, state);
1168 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1169 negop = (op & 1) ? op - 1 : op + 1;
1170 NOTE_POP (1);
1171 goto compare_2_ptr;
1172 }
1173 generate_bytecode_insns (exp0, STACK_TARGET, state);
1174 generate_bytecode_insns (exp1, STACK_TARGET, state);
1175 NOTE_POP (2);
1176 goto compare_2;
1177 case REAL_TYPE:
1178 generate_bytecode_insns (exp0, STACK_TARGET, state);
1179 generate_bytecode_insns (exp1, STACK_TARGET, state);
1180 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1181 opf = OPCODE_fcmpg;
1182 else
1183 opf = OPCODE_fcmpl;
1184 if (TYPE_PRECISION (type) > 32)
1185 {
1186 opf += 2;
1187 NOTE_POP (4);
1188 }
1189 else
1190 NOTE_POP (2);
1191 RESERVE (1);
1192 OP1 (opf);
1193 goto compare_1;
1194 case INTEGER_TYPE:
1195 if (TYPE_PRECISION (type) > 32)
1196 {
1197 generate_bytecode_insns (exp0, STACK_TARGET, state);
1198 generate_bytecode_insns (exp1, STACK_TARGET, state);
1199 NOTE_POP (4);
1200 RESERVE (1);
1201 OP1 (OPCODE_lcmp);
1202 goto compare_1;
1203 }
1204 /* FALLTHOUGH */
1205 default:
1206 if (integer_zerop (exp1))
1207 {
1208 generate_bytecode_insns (exp0, STACK_TARGET, state);
1209 NOTE_POP (1);
1210 goto compare_1;
1211 }
1212 if (integer_zerop (exp0))
1213 {
1214 switch (op)
1215 {
1216 case OPCODE_if_icmplt:
1217 case OPCODE_if_icmpge:
1218 op += 2;
1219 break;
1220 case OPCODE_if_icmpgt:
1221 case OPCODE_if_icmple:
1222 op -= 2;
1223 break;
1224 default:
1225 break;
1226 }
1227 generate_bytecode_insns (exp1, STACK_TARGET, state);
1228 NOTE_POP (1);
1229 goto compare_1;
1230 }
1231 generate_bytecode_insns (exp0, STACK_TARGET, state);
1232 generate_bytecode_insns (exp1, STACK_TARGET, state);
1233 NOTE_POP (2);
1234 goto compare_2;
1235 }
1236
1237 default:
1238 generate_bytecode_insns (exp, STACK_TARGET, state);
1239 NOTE_POP (1);
1240 if (true_branch_first)
1241 {
1242 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1243 emit_goto (true_label, state);
1244 }
1245 else
1246 {
1247 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1248 emit_goto (false_label, state);
1249 }
1250 break;
1251 }
1252 if (save_SP != state->code_SP)
1253 fatal ("internal error - SP mismatch");
1254 }
1255
1256 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1257 but only as far out as LIMIT (since we are about to jump to the
1258 emit label that is LIMIT). */
1259
1260 static void
1261 call_cleanups (limit, state)
1262 struct jcf_block *limit;
1263 struct jcf_partial *state;
1264 {
1265 struct jcf_block *block = state->labeled_blocks;
1266 for (; block != limit; block = block->next)
1267 {
1268 if (block->pc == PENDING_CLEANUP_PC)
1269 emit_jsr (block, state);
1270 }
1271 }
1272
1273 /* Generate bytecode for sub-expression EXP of METHOD.
1274 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1275
1276 static void
1277 generate_bytecode_insns (exp, target, state)
1278 tree exp;
1279 int target;
1280 struct jcf_partial *state;
1281 {
1282 tree type;
1283 enum java_opcode jopcode;
1284 int op;
1285 HOST_WIDE_INT value;
1286 int post_op;
1287 int size;
1288 int offset;
1289
1290 if (exp == NULL && target == IGNORE_TARGET)
1291 return;
1292
1293 type = TREE_TYPE (exp);
1294
1295 switch (TREE_CODE (exp))
1296 {
1297 case BLOCK:
1298 if (BLOCK_EXPR_BODY (exp))
1299 {
1300 tree local;
1301 tree body = BLOCK_EXPR_BODY (exp);
1302 for (local = BLOCK_EXPR_DECLS (exp); local; )
1303 {
1304 tree next = TREE_CHAIN (local);
1305 localvar_alloc (local, state);
1306 local = next;
1307 }
1308 /* Avoid deep recursion for long blocks. */
1309 while (TREE_CODE (body) == COMPOUND_EXPR)
1310 {
1311 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1312 body = TREE_OPERAND (body, 1);
1313 }
1314 generate_bytecode_insns (body, target, state);
1315 for (local = BLOCK_EXPR_DECLS (exp); local; )
1316 {
1317 tree next = TREE_CHAIN (local);
1318 localvar_free (local, state);
1319 local = next;
1320 }
1321 }
1322 break;
1323 case COMPOUND_EXPR:
1324 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1325 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1326 break;
1327 case EXPR_WITH_FILE_LOCATION:
1328 {
1329 char *saved_input_filename = input_filename;
1330 tree body = EXPR_WFL_NODE (exp);
1331 int saved_lineno = lineno;
1332 if (body == empty_stmt_node)
1333 break;
1334 input_filename = EXPR_WFL_FILENAME (exp);
1335 lineno = EXPR_WFL_LINENO (exp);
1336 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0)
1337 put_linenumber (lineno, state);
1338 generate_bytecode_insns (body, target, state);
1339 input_filename = saved_input_filename;
1340 lineno = saved_lineno;
1341 }
1342 break;
1343 case INTEGER_CST:
1344 if (target == IGNORE_TARGET) ; /* do nothing */
1345 else if (TREE_CODE (type) == POINTER_TYPE)
1346 {
1347 if (! integer_zerop (exp))
1348 abort();
1349 RESERVE(1);
1350 OP1 (OPCODE_aconst_null);
1351 NOTE_PUSH (1);
1352 }
1353 else if (TYPE_PRECISION (type) <= 32)
1354 {
1355 push_int_const (TREE_INT_CST_LOW (exp), state);
1356 NOTE_PUSH (1);
1357 }
1358 else
1359 {
1360 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1361 state);
1362 NOTE_PUSH (2);
1363 }
1364 break;
1365 case REAL_CST:
1366 offset = find_constant_index (exp, state);
1367 switch (TYPE_PRECISION (type))
1368 {
1369 case 32:
1370 push_constant1 (offset, state);
1371 NOTE_PUSH (1);
1372 break;
1373 case 64:
1374 push_constant2 (offset, state);
1375 NOTE_PUSH (2);
1376 break;
1377 default:
1378 abort ();
1379 }
1380 break;
1381 case STRING_CST:
1382 push_constant1 (find_string_constant (&state->cpool, exp), state);
1383 NOTE_PUSH (1);
1384 break;
1385 case VAR_DECL:
1386 if (TREE_STATIC (exp))
1387 {
1388 field_op (exp, OPCODE_getstatic, state);
1389 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1390 break;
1391 }
1392 /* ... fall through ... */
1393 case PARM_DECL:
1394 emit_load (exp, state);
1395 break;
1396 case NON_LVALUE_EXPR:
1397 case INDIRECT_REF:
1398 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1399 break;
1400 case ARRAY_REF:
1401 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1402 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1403 if (target != IGNORE_TARGET)
1404 {
1405 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1406 RESERVE(1);
1407 OP1 (jopcode);
1408 if (! TYPE_IS_WIDE (type))
1409 NOTE_POP (1);
1410 }
1411 break;
1412 case COMPONENT_REF:
1413 {
1414 tree obj = TREE_OPERAND (exp, 0);
1415 tree field = TREE_OPERAND (exp, 1);
1416 int is_static = FIELD_STATIC (field);
1417 generate_bytecode_insns (obj,
1418 is_static ? IGNORE_TARGET : target, state);
1419 if (target != IGNORE_TARGET)
1420 {
1421 if (DECL_NAME (field) == length_identifier_node && !is_static
1422 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1423 {
1424 RESERVE (1);
1425 OP1 (OPCODE_arraylength);
1426 }
1427 else
1428 {
1429 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1430 state);
1431 if (! is_static)
1432 NOTE_POP (1);
1433 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1434 }
1435 }
1436 }
1437 break;
1438 case TRUTH_ANDIF_EXPR:
1439 case TRUTH_ORIF_EXPR:
1440 case EQ_EXPR:
1441 case NE_EXPR:
1442 case GT_EXPR:
1443 case LT_EXPR:
1444 case GE_EXPR:
1445 case LE_EXPR:
1446 {
1447 struct jcf_block *then_label = gen_jcf_label (state);
1448 struct jcf_block *else_label = gen_jcf_label (state);
1449 struct jcf_block *end_label = gen_jcf_label (state);
1450 generate_bytecode_conditional (exp,
1451 then_label, else_label, 1, state);
1452 define_jcf_label (then_label, state);
1453 push_int_const (1, state);
1454 emit_goto (end_label, state);
1455 define_jcf_label (else_label, state);
1456 push_int_const (0, state);
1457 define_jcf_label (end_label, state);
1458 NOTE_PUSH (1);
1459 }
1460 break;
1461 case COND_EXPR:
1462 {
1463 struct jcf_block *then_label = gen_jcf_label (state);
1464 struct jcf_block *else_label = gen_jcf_label (state);
1465 struct jcf_block *end_label = gen_jcf_label (state);
1466 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1467 then_label, else_label, 1, state);
1468 define_jcf_label (then_label, state);
1469 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1470 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1471 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1472 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1473 emit_goto (end_label, state);
1474 define_jcf_label (else_label, state);
1475 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1476 define_jcf_label (end_label, state);
1477 }
1478 break;
1479 case CASE_EXPR:
1480 {
1481 struct jcf_switch_state *sw_state = state->sw_state;
1482 struct jcf_relocation *reloc = (struct jcf_relocation *)
1483 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1484 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1485 reloc->kind = 0;
1486 reloc->label = get_jcf_label_here (state);
1487 reloc->offset = case_value;
1488 reloc->next = sw_state->cases;
1489 sw_state->cases = reloc;
1490 if (sw_state->num_cases == 0)
1491 {
1492 sw_state->min_case = case_value;
1493 sw_state->max_case = case_value;
1494 }
1495 else
1496 {
1497 if (case_value < sw_state->min_case)
1498 sw_state->min_case = case_value;
1499 if (case_value > sw_state->max_case)
1500 sw_state->max_case = case_value;
1501 }
1502 sw_state->num_cases++;
1503 }
1504 break;
1505 case DEFAULT_EXPR:
1506 state->sw_state->default_label = get_jcf_label_here (state);
1507 break;
1508
1509 case SWITCH_EXPR:
1510 {
1511 /* The SWITCH_EXPR has three parts, generated in the following order:
1512 1. the switch_expression (the value used to select the correct case);
1513 2. the switch_body;
1514 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1515 After code generation, we will re-order then in the order 1, 3, 2.
1516 This is to avoid an extra GOTOs. */
1517 struct jcf_switch_state sw_state;
1518 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1519 struct jcf_block *body_last; /* Last block of the switch_body. */
1520 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1521 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1522 struct jcf_block *body_block;
1523 int switch_length;
1524 sw_state.prev = state->sw_state;
1525 state->sw_state = &sw_state;
1526 sw_state.cases = NULL;
1527 sw_state.num_cases = 0;
1528 sw_state.default_label = NULL;
1529 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1530 expression_last = state->last_block;
1531 body_block = get_jcf_label_here (state); /* Force a new block here. */
1532 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1533 body_last = state->last_block;
1534
1535 switch_instruction = gen_jcf_label (state);
1536 define_jcf_label (switch_instruction, state);
1537 if (sw_state.default_label == NULL)
1538 sw_state.default_label = gen_jcf_label (state);
1539
1540 if (sw_state.num_cases <= 1)
1541 {
1542 if (sw_state.num_cases == 0)
1543 {
1544 emit_pop (1, state);
1545 NOTE_POP (1);
1546 }
1547 else
1548 {
1549 push_int_const (sw_state.cases->offset, state);
1550 emit_if (sw_state.cases->label,
1551 OPCODE_ifeq, OPCODE_ifne, state);
1552 }
1553 emit_goto (sw_state.default_label, state);
1554 }
1555 else
1556 {
1557 HOST_WIDE_INT i;
1558 /* Copy the chain of relocs into a sorted array. */
1559 struct jcf_relocation **relocs = (struct jcf_relocation **)
1560 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1561 /* The relocs arrays is a buffer with a gap.
1562 The assumption is that cases will normally come in "runs". */
1563 int gap_start = 0;
1564 int gap_end = sw_state.num_cases;
1565 struct jcf_relocation *reloc;
1566 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1567 {
1568 HOST_WIDE_INT case_value = reloc->offset;
1569 while (gap_end < sw_state.num_cases)
1570 {
1571 struct jcf_relocation *end = relocs[gap_end];
1572 if (case_value <= end->offset)
1573 break;
1574 relocs[gap_start++] = end;
1575 gap_end++;
1576 }
1577 while (gap_start > 0)
1578 {
1579 struct jcf_relocation *before = relocs[gap_start-1];
1580 if (case_value >= before->offset)
1581 break;
1582 relocs[--gap_end] = before;
1583 gap_start--;
1584 }
1585 relocs[gap_start++] = reloc;
1586 /* Note we don't check for duplicates. FIXME! */
1587 }
1588
1589 if (2 * sw_state.num_cases
1590 >= sw_state.max_case - sw_state.min_case)
1591 { /* Use tableswitch. */
1592 int index = 0;
1593 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1594 OP1 (OPCODE_tableswitch);
1595 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1596 emit_switch_reloc (sw_state.default_label, state);
1597 OP4 (sw_state.min_case);
1598 OP4 (sw_state.max_case);
1599 for (i = sw_state.min_case; ; )
1600 {
1601 reloc = relocs[index];
1602 if (i == reloc->offset)
1603 {
1604 emit_case_reloc (reloc, state);
1605 if (i == sw_state.max_case)
1606 break;
1607 index++;
1608 }
1609 else
1610 emit_switch_reloc (sw_state.default_label, state);
1611 i++;
1612 }
1613 }
1614 else
1615 { /* Use lookupswitch. */
1616 RESERVE(9 + 8 * sw_state.num_cases);
1617 OP1 (OPCODE_lookupswitch);
1618 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1619 emit_switch_reloc (sw_state.default_label, state);
1620 OP4 (sw_state.num_cases);
1621 for (i = 0; i < sw_state.num_cases; i++)
1622 {
1623 struct jcf_relocation *reloc = relocs[i];
1624 OP4 (reloc->offset);
1625 emit_case_reloc (reloc, state);
1626 }
1627 }
1628 free (relocs);
1629 }
1630
1631 instruction_last = state->last_block;
1632 if (sw_state.default_label->pc < 0)
1633 define_jcf_label (sw_state.default_label, state);
1634 else /* Force a new block. */
1635 sw_state.default_label = get_jcf_label_here (state);
1636 /* Now re-arrange the blocks so the switch_instruction
1637 comes before the switch_body. */
1638 switch_length = state->code_length - switch_instruction->pc;
1639 switch_instruction->pc = body_block->pc;
1640 instruction_last->next = body_block;
1641 instruction_last->v.chunk->next = body_block->v.chunk;
1642 expression_last->next = switch_instruction;
1643 expression_last->v.chunk->next = switch_instruction->v.chunk;
1644 body_last->next = sw_state.default_label;
1645 body_last->v.chunk->next = NULL;
1646 state->chunk = body_last->v.chunk;
1647 for (; body_block != sw_state.default_label; body_block = body_block->next)
1648 body_block->pc += switch_length;
1649
1650 state->sw_state = sw_state.prev;
1651 break;
1652 }
1653
1654 case RETURN_EXPR:
1655 if (!TREE_OPERAND (exp, 0))
1656 {
1657 op = OPCODE_return;
1658 call_cleanups (NULL_TREE, state);
1659 }
1660 else
1661 {
1662 exp = TREE_OPERAND (exp, 0);
1663 if (TREE_CODE (exp) != MODIFY_EXPR)
1664 abort ();
1665 exp = TREE_OPERAND (exp, 1);
1666 op = OPCODE_ireturn + adjust_typed_op (TREE_TYPE (exp), 4);
1667 generate_bytecode_insns (exp, STACK_TARGET, state);
1668 if (state->num_finalizers > 0)
1669 {
1670 if (state->return_value_decl == NULL_TREE)
1671 {
1672 state->return_value_decl
1673 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1674 localvar_alloc (state->return_value_decl, state);
1675 }
1676 emit_store (state->return_value_decl, state);
1677 call_cleanups (NULL_TREE, state);
1678 emit_load (state->return_value_decl, state);
1679 /* If we call localvar_free (state->return_value_decl, state),
1680 then we risk the save decl erroneously re-used in the
1681 finalizer. Instead, we keep the state->return_value_decl
1682 allocated through the rest of the method. This is not
1683 the greatest solution, but it is at least simple and safe. */
1684 }
1685 }
1686 RESERVE (1);
1687 OP1 (op);
1688 break;
1689 case LABELED_BLOCK_EXPR:
1690 {
1691 struct jcf_block *end_label = gen_jcf_label (state);
1692 end_label->next = state->labeled_blocks;
1693 state->labeled_blocks = end_label;
1694 end_label->pc = PENDING_EXIT_PC;
1695 end_label->u.labeled_block = exp;
1696 if (LABELED_BLOCK_BODY (exp))
1697 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1698 if (state->labeled_blocks != end_label)
1699 abort();
1700 state->labeled_blocks = end_label->next;
1701 define_jcf_label (end_label, state);
1702 }
1703 break;
1704 case LOOP_EXPR:
1705 {
1706 tree body = TREE_OPERAND (exp, 0);
1707 #if 0
1708 if (TREE_CODE (body) == COMPOUND_EXPR
1709 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1710 {
1711 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1712 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1713 struct jcf_block *head_label;
1714 struct jcf_block *body_label;
1715 struct jcf_block *end_label = gen_jcf_label (state);
1716 struct jcf_block *exit_label = state->labeled_blocks;
1717 head_label = gen_jcf_label (state);
1718 emit_goto (head_label, state);
1719 body_label = get_jcf_label_here (state);
1720 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1721 define_jcf_label (head_label, state);
1722 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1723 end_label, body_label, 1, state);
1724 define_jcf_label (end_label, state);
1725 }
1726 else
1727 #endif
1728 {
1729 struct jcf_block *head_label = get_jcf_label_here (state);
1730 generate_bytecode_insns (body, IGNORE_TARGET, state);
1731 emit_goto (head_label, state);
1732 }
1733 }
1734 break;
1735 case EXIT_EXPR:
1736 {
1737 struct jcf_block *label = state->labeled_blocks;
1738 struct jcf_block *end_label = gen_jcf_label (state);
1739 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1740 label, end_label, 0, state);
1741 define_jcf_label (end_label, state);
1742 }
1743 break;
1744 case EXIT_BLOCK_EXPR:
1745 {
1746 struct jcf_block *label = state->labeled_blocks;
1747 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1748 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1749 label = label->next;
1750 call_cleanups (label, state);
1751 emit_goto (label, state);
1752 }
1753 break;
1754
1755 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1756 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1757 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1758 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1759 increment:
1760
1761 exp = TREE_OPERAND (exp, 0);
1762 type = TREE_TYPE (exp);
1763 size = TYPE_IS_WIDE (type) ? 2 : 1;
1764 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1765 && ! TREE_STATIC (exp)
1766 && TREE_CODE (type) == INTEGER_TYPE
1767 && TYPE_PRECISION (type) == 32)
1768 {
1769 if (target != IGNORE_TARGET && post_op)
1770 emit_load (exp, state);
1771 emit_iinc (exp, value, state);
1772 if (target != IGNORE_TARGET && ! post_op)
1773 emit_load (exp, state);
1774 break;
1775 }
1776 if (TREE_CODE (exp) == COMPONENT_REF)
1777 {
1778 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1779 emit_dup (1, 0, state);
1780 /* Stack: ..., objectref, objectref. */
1781 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1782 NOTE_PUSH (size-1);
1783 /* Stack: ..., objectref, oldvalue. */
1784 offset = 1;
1785 }
1786 else if (TREE_CODE (exp) == ARRAY_REF)
1787 {
1788 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1789 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1790 emit_dup (2, 0, state);
1791 /* Stack: ..., array, index, array, index. */
1792 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1793 RESERVE(1);
1794 OP1 (jopcode);
1795 NOTE_POP (2-size);
1796 /* Stack: ..., array, index, oldvalue. */
1797 offset = 2;
1798 }
1799 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1800 {
1801 generate_bytecode_insns (exp, STACK_TARGET, state);
1802 /* Stack: ..., oldvalue. */
1803 offset = 0;
1804 }
1805 else
1806 abort ();
1807
1808 if (target != IGNORE_TARGET && post_op)
1809 emit_dup (size, offset, state);
1810 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1811 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1812 /* Stack, otherwise: ..., [result, ] oldvalue. */
1813 if (size == 1)
1814 push_int_const (value, state);
1815 else
1816 push_long_const (value, value >= 0 ? 0 : -1, state);
1817 NOTE_PUSH (size);
1818 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1819 if (target != IGNORE_TARGET && ! post_op)
1820 emit_dup (size, offset, state);
1821 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1822 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1823 /* Stack, otherwise: ..., [result, ] newvalue. */
1824 goto finish_assignment;
1825
1826 case MODIFY_EXPR:
1827 {
1828 tree lhs = TREE_OPERAND (exp, 0);
1829 tree rhs = TREE_OPERAND (exp, 1);
1830 int offset = 0;
1831
1832 /* See if we can use the iinc instruction. */
1833 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1834 && ! TREE_STATIC (lhs)
1835 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1836 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1837 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1838 {
1839 tree arg0 = TREE_OPERAND (rhs, 0);
1840 tree arg1 = TREE_OPERAND (rhs, 1);
1841 HOST_WIDE_INT min_value = -32768;
1842 HOST_WIDE_INT max_value = 32767;
1843 if (TREE_CODE (rhs) == MINUS_EXPR)
1844 {
1845 min_value++;
1846 max_value++;
1847 }
1848 else if (arg1 == lhs)
1849 {
1850 arg0 = arg1;
1851 arg1 = TREE_OPERAND (rhs, 0);
1852 }
1853 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1854 {
1855 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1856 value = TREE_INT_CST_LOW (arg1);
1857 if ((hi_value == 0 && value <= max_value)
1858 || (hi_value == -1 && value >= min_value))
1859 {
1860 if (TREE_CODE (rhs) == MINUS_EXPR)
1861 value = -value;
1862 emit_iinc (lhs, value, state);
1863 break;
1864 }
1865 }
1866 }
1867
1868 if (TREE_CODE (lhs) == COMPONENT_REF)
1869 {
1870 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1871 STACK_TARGET, state);
1872 offset = 1;
1873 }
1874 else if (TREE_CODE (lhs) == ARRAY_REF)
1875 {
1876 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1877 STACK_TARGET, state);
1878 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1879 STACK_TARGET, state);
1880 offset = 2;
1881 }
1882 else
1883 offset = 0;
1884 generate_bytecode_insns (rhs, STACK_TARGET, state);
1885 if (target != IGNORE_TARGET)
1886 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
1887 exp = lhs;
1888 }
1889 /* FALLTHOUGH */
1890
1891 finish_assignment:
1892 if (TREE_CODE (exp) == COMPONENT_REF)
1893 {
1894 tree field = TREE_OPERAND (exp, 1);
1895 if (! FIELD_STATIC (field))
1896 NOTE_POP (1);
1897 field_op (field,
1898 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
1899 state);
1900
1901 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1902 }
1903 else if (TREE_CODE (exp) == VAR_DECL
1904 || TREE_CODE (exp) == PARM_DECL)
1905 {
1906 if (FIELD_STATIC (exp))
1907 {
1908 field_op (exp, OPCODE_putstatic, state);
1909 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1910 }
1911 else
1912 emit_store (exp, state);
1913 }
1914 else if (TREE_CODE (exp) == ARRAY_REF)
1915 {
1916 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
1917 RESERVE(1);
1918 OP1 (jopcode);
1919 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
1920 }
1921 else
1922 fatal ("internal error (bad lhs to MODIFY_EXPR)");
1923 break;
1924 case PLUS_EXPR:
1925 jopcode = OPCODE_iadd;
1926 goto binop;
1927 case MINUS_EXPR:
1928 jopcode = OPCODE_isub;
1929 goto binop;
1930 case MULT_EXPR:
1931 jopcode = OPCODE_imul;
1932 goto binop;
1933 case TRUNC_DIV_EXPR:
1934 case RDIV_EXPR:
1935 jopcode = OPCODE_idiv;
1936 goto binop;
1937 case TRUNC_MOD_EXPR:
1938 jopcode = OPCODE_irem;
1939 goto binop;
1940 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
1941 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
1942 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
1943 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
1944 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
1945 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
1946 binop:
1947 {
1948 tree arg0 = TREE_OPERAND (exp, 0);
1949 tree arg1 = TREE_OPERAND (exp, 1);
1950 jopcode += adjust_typed_op (type, 3);
1951 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
1952 {
1953 /* fold may (e.g) convert 2*x to x+x. */
1954 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
1955 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
1956 }
1957 else
1958 {
1959 generate_bytecode_insns (arg0, target, state);
1960 generate_bytecode_insns (arg1, target, state);
1961 }
1962 /* For most binary operations, both operands and the result have the
1963 same type. Shift operations are different. Using arg1's type
1964 gets us the correct SP adjustment in all casesd. */
1965 if (target == STACK_TARGET)
1966 emit_binop (jopcode, TREE_TYPE (arg1), state);
1967 break;
1968 }
1969 case TRUTH_NOT_EXPR:
1970 case BIT_NOT_EXPR:
1971 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1972 if (target == STACK_TARGET)
1973 {
1974 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
1975 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
1976 RESERVE (2);
1977 if (is_long)
1978 OP1 (OPCODE_i2l);
1979 NOTE_PUSH (1 + is_long);
1980 OP1 (OPCODE_ixor + is_long);
1981 NOTE_POP (1 + is_long);
1982 }
1983 break;
1984 case NEGATE_EXPR:
1985 jopcode = OPCODE_ineg;
1986 jopcode += adjust_typed_op (type, 3);
1987 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1988 if (target == STACK_TARGET)
1989 emit_unop (jopcode, type, state);
1990 break;
1991 case INSTANCEOF_EXPR:
1992 {
1993 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
1994 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1995 RESERVE (3);
1996 OP1 (OPCODE_instanceof);
1997 OP2 (index);
1998 }
1999 break;
2000 case CONVERT_EXPR:
2001 case NOP_EXPR:
2002 case FLOAT_EXPR:
2003 case FIX_TRUNC_EXPR:
2004 {
2005 tree src = TREE_OPERAND (exp, 0);
2006 tree src_type = TREE_TYPE (src);
2007 tree dst_type = TREE_TYPE (exp);
2008 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2009 if (target == IGNORE_TARGET || src_type == dst_type)
2010 break;
2011 if (TREE_CODE (dst_type) == POINTER_TYPE)
2012 {
2013 if (TREE_CODE (exp) == CONVERT_EXPR)
2014 {
2015 int index = find_class_constant (&state->cpool, TREE_TYPE (dst_type));
2016 RESERVE (3);
2017 OP1 (OPCODE_checkcast);
2018 OP2 (index);
2019 }
2020 }
2021 else /* Convert numeric types. */
2022 {
2023 int wide_src = TYPE_PRECISION (src_type) > 32;
2024 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2025 NOTE_POP (1 + wide_src);
2026 RESERVE (1);
2027 if (TREE_CODE (dst_type) == REAL_TYPE)
2028 {
2029 if (TREE_CODE (src_type) == REAL_TYPE)
2030 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2031 else if (TYPE_PRECISION (src_type) == 64)
2032 OP1 (OPCODE_l2f + wide_dst);
2033 else
2034 OP1 (OPCODE_i2f + wide_dst);
2035 }
2036 else /* Convert to integral type. */
2037 {
2038 if (TREE_CODE (src_type) == REAL_TYPE)
2039 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2040 else if (wide_dst)
2041 OP1 (OPCODE_i2l);
2042 else if (wide_src)
2043 OP1 (OPCODE_l2i);
2044 if (TYPE_PRECISION (dst_type) < 32)
2045 {
2046 RESERVE (1);
2047 /* Already converted to int, if needed. */
2048 if (TYPE_PRECISION (dst_type) <= 8)
2049 OP1 (OPCODE_i2b);
2050 else if (TREE_UNSIGNED (dst_type))
2051 OP1 (OPCODE_i2c);
2052 else
2053 OP1 (OPCODE_i2s);
2054 }
2055 }
2056 NOTE_PUSH (1 + wide_dst);
2057 }
2058 }
2059 break;
2060
2061 case CLEANUP_POINT_EXPR:
2062 {
2063 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2064 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2065 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2066 if (target != IGNORE_TARGET)
2067 abort ();
2068 while (state->labeled_blocks != save_labeled_blocks)
2069 {
2070 struct jcf_block *finished_label = NULL;
2071 tree return_link;
2072 tree exception_type = build_pointer_type (throwable_type_node);
2073 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2074 exception_type);
2075 struct jcf_block *end_label = get_jcf_label_here (state);
2076 struct jcf_block *label = state->labeled_blocks;
2077 struct jcf_handler *handler;
2078 tree cleanup = label->u.labeled_block;
2079 state->labeled_blocks = label->next;
2080 state->num_finalizers--;
2081 if (can_complete)
2082 {
2083 finished_label = gen_jcf_label (state);
2084 emit_jsr (label, state);
2085 emit_goto (finished_label, state);
2086 if (! CAN_COMPLETE_NORMALLY (cleanup))
2087 can_complete = 0;
2088 }
2089 handler = alloc_handler (label->v.start_label, end_label, state);
2090 handler->type = NULL_TREE;
2091 localvar_alloc (exception_decl, state);
2092 NOTE_PUSH (1);
2093 emit_store (exception_decl, state);
2094 emit_jsr (label, state);
2095 emit_load (exception_decl, state);
2096 RESERVE (1);
2097 OP1 (OPCODE_athrow);
2098 NOTE_POP (1);
2099
2100 /* The finally block. */
2101 return_link = build_decl (VAR_DECL, NULL_TREE,
2102 return_address_type_node);
2103 define_jcf_label (label, state);
2104 NOTE_PUSH (1);
2105 localvar_alloc (return_link, state);
2106 emit_store (return_link, state);
2107 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2108 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2109 localvar_free (return_link, state);
2110 localvar_free (exception_decl, state);
2111 if (finished_label != NULL)
2112 define_jcf_label (finished_label, state);
2113 }
2114 }
2115 break;
2116
2117 case WITH_CLEANUP_EXPR:
2118 {
2119 struct jcf_block *label;
2120 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2121 label = gen_jcf_label (state);
2122 label->pc = PENDING_CLEANUP_PC;
2123 label->next = state->labeled_blocks;
2124 state->labeled_blocks = label;
2125 state->num_finalizers++;
2126 label->u.labeled_block = TREE_OPERAND (exp, 2);
2127 label->v.start_label = get_jcf_label_here (state);
2128 if (target != IGNORE_TARGET)
2129 abort ();
2130 }
2131 break;
2132
2133 case TRY_EXPR:
2134 {
2135 tree try_clause = TREE_OPERAND (exp, 0);
2136 tree finally = TREE_OPERAND (exp, 2);
2137 struct jcf_block *start_label = get_jcf_label_here (state);
2138 struct jcf_block *end_label; /* End of try clause. */
2139 struct jcf_block *finally_label; /* Finally subroutine. */
2140 struct jcf_block *finished_label = gen_jcf_label (state);
2141 tree clause = TREE_OPERAND (exp, 1);
2142 if (finally)
2143 {
2144 finally = FINALLY_EXPR_BLOCK (finally);
2145 finally_label = gen_jcf_label (state);
2146 }
2147 if (target != IGNORE_TARGET)
2148 abort ();
2149 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2150 end_label = get_jcf_label_here (state);
2151 if (CAN_COMPLETE_NORMALLY (try_clause))
2152 emit_goto (finished_label, state);
2153 for ( ; clause != NULL_TREE; clause = TREE_CHAIN (clause))
2154 {
2155 tree catch_clause = TREE_OPERAND (clause, 0);
2156 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2157 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2158 if (exception_decl == NULL_TREE)
2159 handler->type = NULL_TREE;
2160 else
2161 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2162 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2163 if (CAN_COMPLETE_NORMALLY (catch_clause))
2164 emit_goto (finished_label, state);
2165 }
2166 if (finally)
2167 {
2168 tree return_link;
2169 tree exception_type = build_pointer_type (throwable_type_node);
2170 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2171 exception_type);
2172 struct jcf_handler *handler
2173 = alloc_handler (start_label, NULL_TREE, state);
2174 handler->end_label = handler->handler_label;
2175 handler->type = NULL_TREE;
2176 localvar_alloc (exception_decl, state);
2177 NOTE_PUSH (1);
2178 emit_store (exception_decl, state);
2179 emit_jsr (finally_label, state);
2180 emit_load (exception_decl, state);
2181 RESERVE (1);
2182 OP1 (OPCODE_athrow);
2183 NOTE_POP (1);
2184 localvar_free (exception_decl, state);
2185
2186 /* The finally block. */
2187 return_link = build_decl (VAR_DECL, NULL_TREE,
2188 return_address_type_node);
2189 define_jcf_label (finally_label, state);
2190 NOTE_PUSH (1);
2191 localvar_alloc (return_link, state);
2192 emit_store (return_link, state);
2193 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2194 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2195 localvar_free (return_link, state);
2196 }
2197 define_jcf_label (finished_label, state);
2198 if (finally)
2199 emit_jsr (finally_label, state);
2200 }
2201 break;
2202 case THROW_EXPR:
2203 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2204 RESERVE (1);
2205 OP1 (OPCODE_athrow);
2206 break;
2207 case NEW_ARRAY_INIT:
2208 {
2209 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2210 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2211 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2212 HOST_WIDE_INT length = java_array_type_length (array_type);
2213 if (target == IGNORE_TARGET)
2214 {
2215 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2216 generate_bytecode_insns (TREE_VALUE (values), target, state);
2217 break;
2218 }
2219 push_int_const (length, state);
2220 NOTE_PUSH (1);
2221 RESERVE (3);
2222 if (JPRIMITIVE_TYPE_P (element_type))
2223 {
2224 int atype = encode_newarray_type (element_type);
2225 OP1 (OPCODE_newarray);
2226 OP1 (atype);
2227 }
2228 else
2229 {
2230 int index = find_class_constant (&state->cpool,
2231 TREE_TYPE (element_type));
2232 OP1 (OPCODE_anewarray);
2233 OP2 (index);
2234 }
2235 offset = 0;
2236 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2237 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2238 {
2239 int save_SP = state->code_SP;
2240 emit_dup (1, 0, state);
2241 push_int_const (offset, state);
2242 NOTE_PUSH (1);
2243 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2244 RESERVE (1);
2245 OP1 (jopcode);
2246 state->code_SP = save_SP;
2247 }
2248 }
2249 break;
2250 case NEW_CLASS_EXPR:
2251 {
2252 tree class = TREE_TYPE (TREE_TYPE (exp));
2253 int need_result = target != IGNORE_TARGET;
2254 int index = find_class_constant (&state->cpool, class);
2255 RESERVE (4);
2256 OP1 (OPCODE_new);
2257 OP2 (index);
2258 if (need_result)
2259 OP1 (OPCODE_dup);
2260 NOTE_PUSH (1 + need_result);
2261 }
2262 /* ... fall though ... */
2263 case CALL_EXPR:
2264 {
2265 tree f = TREE_OPERAND (exp, 0);
2266 tree x = TREE_OPERAND (exp, 1);
2267 int save_SP = state->code_SP;
2268 int nargs;
2269 if (TREE_CODE (f) == ADDR_EXPR)
2270 f = TREE_OPERAND (f, 0);
2271 if (f == soft_newarray_node)
2272 {
2273 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2274 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2275 STACK_TARGET, state);
2276 RESERVE (2);
2277 OP1 (OPCODE_newarray);
2278 OP1 (type_code);
2279 break;
2280 }
2281 else if (f == soft_multianewarray_node)
2282 {
2283 int ndims;
2284 int idim;
2285 int index = find_class_constant (&state->cpool,
2286 TREE_TYPE (TREE_TYPE (exp)));
2287 x = TREE_CHAIN (x); /* Skip class argument. */
2288 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2289 for (idim = ndims; --idim >= 0; )
2290 {
2291 x = TREE_CHAIN (x);
2292 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2293 }
2294 RESERVE (4);
2295 OP1 (OPCODE_multianewarray);
2296 OP2 (index);
2297 OP1 (ndims);
2298 break;
2299 }
2300 else if (f == soft_anewarray_node)
2301 {
2302 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2303 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2304 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2305 RESERVE (3);
2306 OP1 (OPCODE_anewarray);
2307 OP2 (index);
2308 break;
2309 }
2310 else if (f == soft_monitorenter_node
2311 || f == soft_monitorexit_node
2312 || f == throw_node)
2313 {
2314 if (f == soft_monitorenter_node)
2315 op = OPCODE_monitorenter;
2316 else if (f == soft_monitorexit_node)
2317 op = OPCODE_monitorexit;
2318 else
2319 op = OPCODE_athrow;
2320 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2321 RESERVE (1);
2322 OP1 (op);
2323 NOTE_POP (1);
2324 break;
2325 }
2326 else if (exp == soft_exceptioninfo_call_node)
2327 {
2328 NOTE_PUSH (1); /* Pushed by exception system. */
2329 break;
2330 }
2331 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2332 {
2333 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2334 }
2335 nargs = state->code_SP - save_SP;
2336 state->code_SP = save_SP;
2337 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2338 NOTE_POP (1); /* Pop implicit this. */
2339 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2340 {
2341 int index = find_methodref_index (&state->cpool, f);
2342 int interface = 0;
2343 RESERVE (5);
2344 if (METHOD_STATIC (f))
2345 OP1 (OPCODE_invokestatic);
2346 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2347 || METHOD_PRIVATE (f))
2348 OP1 (OPCODE_invokespecial);
2349 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2350 {
2351 OP1 (OPCODE_invokeinterface);
2352 interface = 1;
2353 }
2354 else
2355 OP1 (OPCODE_invokevirtual);
2356 OP2 (index);
2357 f = TREE_TYPE (TREE_TYPE (f));
2358 if (TREE_CODE (f) != VOID_TYPE)
2359 {
2360 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2361 if (target == IGNORE_TARGET)
2362 emit_pop (size, state);
2363 else
2364 NOTE_PUSH (size);
2365 }
2366 if (interface)
2367 {
2368 OP1 (nargs);
2369 OP1 (0);
2370 }
2371 break;
2372 }
2373 }
2374 /* fall through */
2375 notimpl:
2376 default:
2377 error("internal error - tree code not implemented: %s",
2378 tree_code_name [(int) TREE_CODE (exp)]);
2379 }
2380 }
2381
2382 void
2383 perform_relocations (state)
2384 struct jcf_partial *state;
2385 {
2386 struct jcf_block *block;
2387 struct jcf_relocation *reloc;
2388 int pc;
2389 int shrink;
2390
2391 /* Before we start, the pc field of each block is an upper bound on
2392 the block's start pc (it may be less, if previous blocks need less
2393 than their maximum).
2394
2395 The minimum size of each block is in the block's chunk->size. */
2396
2397 /* First, figure out the actual locations of each block. */
2398 pc = 0;
2399 shrink = 0;
2400 for (block = state->blocks; block != NULL; block = block->next)
2401 {
2402 int block_size = block->v.chunk->size;
2403
2404 block->pc = pc;
2405
2406 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2407 Assumes relocations are in reverse order. */
2408 reloc = block->u.relocations;
2409 while (reloc != NULL
2410 && reloc->kind == OPCODE_goto_w
2411 && reloc->label->pc == block->next->pc
2412 && reloc->offset + 2 == block_size)
2413 {
2414 reloc = reloc->next;
2415 block->u.relocations = reloc;
2416 block->v.chunk->size -= 3;
2417 block_size -= 3;
2418 shrink += 3;
2419 }
2420
2421 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2422 {
2423 if (reloc->kind == SWITCH_ALIGN_RELOC)
2424 {
2425 /* We assume this is the first relocation in this block,
2426 so we know its final pc. */
2427 int where = pc + reloc->offset;
2428 int pad = ((where + 3) & ~3) - where;
2429 block_size += pad;
2430 }
2431 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2432 {
2433 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2434 int expand = reloc->kind > 0 ? 2 : 5;
2435
2436 if (delta > 0)
2437 delta -= shrink;
2438 if (delta >= -32768 && delta <= 32767)
2439 {
2440 shrink += expand;
2441 reloc->kind = -1;
2442 }
2443 else
2444 block_size += expand;
2445 }
2446 }
2447 pc += block_size;
2448 }
2449
2450 for (block = state->blocks; block != NULL; block = block->next)
2451 {
2452 struct chunk *chunk = block->v.chunk;
2453 int old_size = chunk->size;
2454 int next_pc = block->next == NULL ? pc : block->next->pc;
2455 int new_size = next_pc - block->pc;
2456 unsigned char *new_ptr;
2457 unsigned char *old_buffer = chunk->data;
2458 unsigned char *old_ptr = old_buffer + old_size;
2459 if (new_size != old_size)
2460 {
2461 chunk->data = (unsigned char *)
2462 obstack_alloc (state->chunk_obstack, new_size);
2463 chunk->size = new_size;
2464 }
2465 new_ptr = chunk->data + new_size;
2466
2467 /* We do the relocations from back to front, because
2468 the relocations are in reverse order. */
2469 for (reloc = block->u.relocations; ; reloc = reloc->next)
2470 {
2471 /* new_ptr and old_ptr point into the old and new buffers,
2472 respectively. (If no relocations cause the buffer to
2473 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2474 The bytes at higher adress have been copied and relocations
2475 handled; those at lower addresses remain to process. */
2476
2477 /* Lower old index of piece to be copied with no relocation.
2478 I.e. high index of the first piece that does need relocation. */
2479 int start = reloc == NULL ? 0
2480 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2481 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2482 ? reloc->offset + 4
2483 : reloc->offset + 2;
2484 int32 value;
2485 int new_offset;
2486 int n = (old_ptr - old_buffer) - start;
2487 new_ptr -= n;
2488 old_ptr -= n;
2489 if (n > 0)
2490 memcpy (new_ptr, old_ptr, n);
2491 if (old_ptr == old_buffer)
2492 break;
2493
2494 new_offset = new_ptr - chunk->data;
2495 new_offset -= (reloc->kind == -1 ? 2 : 4);
2496 if (reloc->kind == 0)
2497 {
2498 old_ptr -= 4;
2499 value = GET_u4 (old_ptr);
2500 }
2501 else if (reloc->kind == BLOCK_START_RELOC)
2502 {
2503 old_ptr -= 4;
2504 value = 0;
2505 new_offset = 0;
2506 }
2507 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2508 {
2509 int where = block->pc + reloc->offset;
2510 int pad = ((where + 3) & ~3) - where;
2511 while (--pad >= 0)
2512 *--new_ptr = 0;
2513 continue;
2514 }
2515 else
2516 {
2517 old_ptr -= 2;
2518 value = GET_u2 (old_ptr);
2519 }
2520 value += reloc->label->pc - (block->pc + new_offset);
2521 *--new_ptr = (unsigned char) value; value >>= 8;
2522 *--new_ptr = (unsigned char) value; value >>= 8;
2523 if (reloc->kind != -1)
2524 {
2525 *--new_ptr = (unsigned char) value; value >>= 8;
2526 *--new_ptr = (unsigned char) value;
2527 }
2528 if (reloc->kind > BLOCK_START_RELOC)
2529 {
2530 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2531 --old_ptr;
2532 *--new_ptr = reloc->kind;
2533 }
2534 else if (reloc->kind < -1)
2535 {
2536 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2537 --old_ptr;
2538 *--new_ptr = OPCODE_goto_w;
2539 *--new_ptr = 3;
2540 *--new_ptr = 0;
2541 *--new_ptr = - reloc->kind;
2542 }
2543 }
2544 if (new_ptr != chunk->data)
2545 fatal ("internal error - perform_relocations");
2546 }
2547 state->code_length = pc;
2548 }
2549
2550 void
2551 init_jcf_state (state, work)
2552 struct jcf_partial *state;
2553 struct obstack *work;
2554 {
2555 state->chunk_obstack = work;
2556 state->first = state->chunk = NULL;
2557 CPOOL_INIT (&state->cpool);
2558 BUFFER_INIT (&state->localvars);
2559 BUFFER_INIT (&state->bytecode);
2560 }
2561
2562 void
2563 init_jcf_method (state, method)
2564 struct jcf_partial *state;
2565 tree method;
2566 {
2567 state->current_method = method;
2568 state->blocks = state->last_block = NULL;
2569 state->linenumber_count = 0;
2570 state->first_lvar = state->last_lvar = NULL;
2571 state->lvar_count = 0;
2572 state->labeled_blocks = NULL;
2573 state->code_length = 0;
2574 BUFFER_RESET (&state->bytecode);
2575 BUFFER_RESET (&state->localvars);
2576 state->code_SP = 0;
2577 state->code_SP_max = 0;
2578 state->handlers = NULL;
2579 state->last_handler = NULL;
2580 state->num_handlers = 0;
2581 state->num_finalizers = 0;
2582 state->return_value_decl = NULL_TREE;
2583 }
2584
2585 void
2586 release_jcf_state (state)
2587 struct jcf_partial *state;
2588 {
2589 CPOOL_FINISH (&state->cpool);
2590 obstack_free (state->chunk_obstack, state->first);
2591 }
2592
2593 /* Generate and return a list of chunks containing the class CLAS
2594 in the .class file representation. The list can be written to a
2595 .class file using write_chunks. Allocate chunks from obstack WORK. */
2596
2597 struct chunk *
2598 generate_classfile (clas, state)
2599 tree clas;
2600 struct jcf_partial *state;
2601 {
2602 struct chunk *cpool_chunk;
2603 char *source_file;
2604 char *ptr;
2605 int i;
2606 char *fields_count_ptr;
2607 int fields_count = 0;
2608 char *methods_count_ptr;
2609 int methods_count = 0;
2610 static tree SourceFile_node = NULL_TREE;
2611 tree part;
2612 int total_supers
2613 = clas == object_type_node ? 0
2614 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2615
2616 ptr = append_chunk (NULL, 8, state);
2617 PUT4 (0xCafeBabe); /* Magic number */
2618 PUT2 (3); /* Minor version */
2619 PUT2 (45); /* Major version */
2620
2621 append_chunk (NULL, 0, state);
2622 cpool_chunk = state->chunk;
2623
2624 /* Next allocate the chunk containing acces_flags through fields_counr. */
2625 if (clas == object_type_node)
2626 i = 10;
2627 else
2628 i = 8 + 2 * total_supers;
2629 ptr = append_chunk (NULL, i, state);
2630 i = get_access_flags (TYPE_NAME (clas)) | ACC_SUPER;
2631 PUT2 (i); /* acces_flags */
2632 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2633 if (clas == object_type_node)
2634 {
2635 PUT2(0); /* super_class */
2636 PUT2(0); /* interfaces_count */
2637 }
2638 else
2639 {
2640 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2641 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2642 int j = find_class_constant (&state->cpool, base);
2643 PUT2 (j); /* super_class */
2644 PUT2 (total_supers - 1); /* interfaces_count */
2645 for (i = 1; i < total_supers; i++)
2646 {
2647 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2648 j = find_class_constant (&state->cpool, base);
2649 PUT2 (j);
2650 }
2651 }
2652 fields_count_ptr = ptr;
2653
2654 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2655 {
2656 int have_value;
2657 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2658 continue;
2659 ptr = append_chunk (NULL, 8, state);
2660 i = get_access_flags (part); PUT2 (i);
2661 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2662 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part)));
2663 PUT2(i);
2664 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part);
2665 PUT2 (have_value); /* attributes_count */
2666 if (have_value)
2667 {
2668 tree init = DECL_INITIAL (part);
2669 static tree ConstantValue_node = NULL_TREE;
2670 ptr = append_chunk (NULL, 8, state);
2671 if (ConstantValue_node == NULL_TREE)
2672 ConstantValue_node = get_identifier ("ConstantValue");
2673 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2674 PUT2 (i); /* attribute_name_index */
2675 PUT4 (2); /* attribute_length */
2676 i = find_constant_index (init, state); PUT2 (i);
2677 }
2678 fields_count++;
2679 }
2680 ptr = fields_count_ptr; PUT2 (fields_count);
2681
2682 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2683 PUT2 (0);
2684
2685 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2686 {
2687 struct jcf_block *block;
2688 tree function_body = DECL_FUNCTION_BODY (part);
2689 tree body = function_body == NULL_TREE ? NULL_TREE
2690 : BLOCK_EXPR_BODY (function_body);
2691 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2692 : DECL_NAME (part);
2693 tree type = TREE_TYPE (part);
2694 tree save_function = current_function_decl;
2695 current_function_decl = part;
2696 ptr = append_chunk (NULL, 8, state);
2697 i = get_access_flags (part); PUT2 (i);
2698 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2699 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2700 PUT2 (i);
2701 PUT2 (body != NULL_TREE ? 1 : 0); /* attributes_count */
2702 if (body != NULL_TREE)
2703 {
2704 int code_attributes_count = 0;
2705 static tree Code_node = NULL_TREE;
2706 tree t;
2707 char *attr_len_ptr;
2708 struct jcf_handler *handler;
2709 if (Code_node == NULL_TREE)
2710 Code_node = get_identifier ("Code");
2711 ptr = append_chunk (NULL, 14, state);
2712 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2713 attr_len_ptr = ptr;
2714 init_jcf_method (state, part);
2715 get_jcf_label_here (state); /* Force a first block. */
2716 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2717 localvar_alloc (t, state);
2718 generate_bytecode_insns (body, IGNORE_TARGET, state);
2719 if (CAN_COMPLETE_NORMALLY (body))
2720 {
2721 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2722 abort();
2723 RESERVE (1);
2724 OP1 (OPCODE_return);
2725 }
2726 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2727 localvar_free (t, state);
2728 if (state->return_value_decl != NULL_TREE)
2729 localvar_free (state->return_value_decl, state);
2730 finish_jcf_block (state);
2731 perform_relocations (state);
2732
2733 ptr = attr_len_ptr;
2734 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2735 if (state->linenumber_count > 0)
2736 {
2737 code_attributes_count++;
2738 i += 8 + 4 * state->linenumber_count;
2739 }
2740 if (state->lvar_count > 0)
2741 {
2742 code_attributes_count++;
2743 i += 8 + 10 * state->lvar_count;
2744 }
2745 PUT4 (i); /* attribute_length */
2746 PUT2 (state->code_SP_max); /* max_stack */
2747 PUT2 (localvar_max); /* max_locals */
2748 PUT4 (state->code_length);
2749
2750 /* Emit the exception table. */
2751 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2752 PUT2 (state->num_handlers); /* exception_table_length */
2753 handler = state->handlers;
2754 for (; handler != NULL; handler = handler->next)
2755 {
2756 int type_index;
2757 PUT2 (handler->start_label->pc);
2758 PUT2 (handler->end_label->pc);
2759 PUT2 (handler->handler_label->pc);
2760 if (handler->type == NULL_TREE)
2761 type_index = 0;
2762 else
2763 type_index = find_class_constant (&state->cpool,
2764 handler->type);
2765 PUT2 (type_index);
2766 }
2767
2768 ptr = append_chunk (NULL, 2, state);
2769 PUT2 (code_attributes_count);
2770
2771 /* Write the LineNumberTable attribute. */
2772 if (state->linenumber_count > 0)
2773 {
2774 static tree LineNumberTable_node = NULL_TREE;
2775 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state);
2776 if (LineNumberTable_node == NULL_TREE)
2777 LineNumberTable_node = get_identifier ("LineNumberTable");
2778 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
2779 PUT2 (i); /* attribute_name_index */
2780 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
2781 i = state->linenumber_count; PUT2 (i);
2782 for (block = state->blocks; block != NULL; block = block->next)
2783 {
2784 int line = block->linenumber;
2785 if (line > 0)
2786 {
2787 PUT2 (block->pc);
2788 PUT2 (line);
2789 }
2790 }
2791 }
2792
2793 /* Write the LocalVariableTable attribute. */
2794 if (state->lvar_count > 0)
2795 {
2796 static tree LocalVariableTable_node = NULL_TREE;
2797 struct localvar_info *lvar = state->first_lvar;
2798 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
2799 if (LocalVariableTable_node == NULL_TREE)
2800 LocalVariableTable_node = get_identifier("LocalVariableTable");
2801 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
2802 PUT2 (i); /* attribute_name_index */
2803 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
2804 i = state->lvar_count; PUT2 (i);
2805 for ( ; lvar != NULL; lvar = lvar->next)
2806 {
2807 tree name = DECL_NAME (lvar->decl);
2808 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
2809 i = lvar->start_label->pc; PUT2 (i);
2810 i = lvar->end_label->pc - i; PUT2 (i);
2811 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2812 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
2813 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
2814 }
2815 }
2816 }
2817 methods_count++;
2818 current_function_decl = save_function;
2819 }
2820 ptr = methods_count_ptr; PUT2 (methods_count);
2821
2822 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
2823 for (ptr = source_file; ; ptr++)
2824 {
2825 char ch = *ptr;
2826 if (ch == '\0')
2827 break;
2828 if (ch == '/' || ch == '\\')
2829 source_file = ptr+1;
2830 }
2831 ptr = append_chunk (NULL, 10, state);
2832 PUT2 (1); /* attributes_count */
2833
2834 /* generate the SourceFile attribute. */
2835 if (SourceFile_node == NULL_TREE)
2836 SourceFile_node = get_identifier ("SourceFile");
2837 i = find_utf8_constant (&state->cpool, SourceFile_node);
2838 PUT2 (i); /* attribute_name_index */
2839 PUT4 (2);
2840 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
2841 PUT2 (i);
2842
2843 /* New finally generate the contents of the constant pool chunk. */
2844 i = count_constant_pool_bytes (&state->cpool);
2845 ptr = obstack_alloc (state->chunk_obstack, i);
2846 cpool_chunk->data = ptr;
2847 cpool_chunk->size = i;
2848 write_constant_pool (&state->cpool, ptr, i);
2849 return state->first;
2850 }
2851
2852 static char *
2853 make_class_file_name (clas)
2854 tree clas;
2855 {
2856 char *cname, *dname, *slash, *r;
2857 struct stat sb;
2858
2859 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
2860 "", '.', DIR_SEPARATOR,
2861 ".class"));
2862 if (jcf_write_base_directory == NULL)
2863 {
2864 /* Make sure we put the class file into the .java file's
2865 directory, and not into some subdirectory thereof. */
2866 char *t;
2867 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
2868 slash = strrchr (dname, DIR_SEPARATOR);
2869 if (! slash)
2870 {
2871 dname = ".";
2872 slash = dname + 1;
2873 }
2874 t = strrchr (cname, DIR_SEPARATOR);
2875 if (t)
2876 cname = t + 1;
2877 }
2878 else
2879 {
2880 dname = jcf_write_base_directory;
2881 slash = dname + strlen (dname);
2882 }
2883
2884 r = xmalloc (slash - dname + strlen (cname) + 2);
2885 strncpy (r, dname, slash - dname);
2886 r[slash - dname] = DIR_SEPARATOR;
2887 strcpy (&r[slash - dname + 1], cname);
2888
2889 /* We try to make new directories when we need them. We only do
2890 this for directories which "might not" exist. For instance, we
2891 assume the `-d' directory exists, but we don't assume that any
2892 subdirectory below it exists. It might be worthwhile to keep
2893 track of which directories we've created to avoid gratuitous
2894 stat()s. */
2895 dname = r + (slash - dname) + 1;
2896 while (1)
2897 {
2898 cname = strchr (dname, DIR_SEPARATOR);
2899 if (cname == NULL)
2900 break;
2901 *cname = '\0';
2902 if (stat (r, &sb) == -1)
2903 {
2904 /* Try to make it. */
2905 if (mkdir (r, 0755) == -1)
2906 {
2907 fatal ("failed to create directory `%s'", r);
2908 free (r);
2909 return NULL;
2910 }
2911 }
2912 *cname = DIR_SEPARATOR;
2913 /* Skip consecutive separators. */
2914 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
2915 ;
2916 }
2917
2918 return r;
2919 }
2920
2921 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
2922 The output .class file name is make_class_file_name(CLAS). */
2923
2924 void
2925 write_classfile (clas)
2926 tree clas;
2927 {
2928 struct obstack *work = &temporary_obstack;
2929 struct jcf_partial state[1];
2930 char *class_file_name = make_class_file_name (clas);
2931 struct chunk *chunks;
2932
2933 if (class_file_name != NULL)
2934 {
2935 FILE* stream = fopen (class_file_name, "wb");
2936 if (stream == NULL)
2937 fatal ("failed to open `%s' for writing", class_file_name);
2938 jcf_dependency_add_target (class_file_name);
2939 init_jcf_state (state, work);
2940 chunks = generate_classfile (clas, state);
2941 write_chunks (stream, chunks);
2942 if (fclose (stream))
2943 fatal ("failed to close after writing `%s'", class_file_name);
2944 free (class_file_name);
2945 }
2946 release_jcf_state (state);
2947 }
2948
2949 /* TODO:
2950 string concatenation
2951 synchronized statement
2952 */
This page took 0.174807 seconds and 6 git commands to generate.