]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
check-init.c (check_bool2_init, [...]): Handle TRUTH_AND_EXPR, TRUTH_OR_EXPR, and...
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "tree.h"
27 #include "java-tree.h"
28 #include "jcf.h"
29 #include "obstack.h"
30 #undef AND
31 #include "rtl.h"
32 #include "java-opcodes.h"
33 #include "parse.h" /* for BLOCK_EXPR_BODY */
34 #include "buffer.h"
35 #include "toplev.h"
36
37 #ifndef DIR_SEPARATOR
38 #define DIR_SEPARATOR '/'
39 #endif
40
41 extern struct obstack temporary_obstack;
42
43 /* Base directory in which `.class' files should be written.
44 NULL means to put the file into the same directory as the
45 corresponding .java file. */
46 char *jcf_write_base_directory = NULL;
47
48 /* Make sure bytecode.data is big enough for at least N more bytes. */
49
50 #define RESERVE(N) \
51 do { CHECK_OP(state); \
52 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
53 buffer_grow (&state->bytecode, N); } while (0)
54
55 /* Add a 1-byte instruction/operand I to bytecode.data,
56 assuming space has already been RESERVE'd. */
57
58 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
59
60 /* Like OP1, but I is a 2-byte big endian integer. */
61
62 #define OP2(I) \
63 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
64
65 /* Like OP1, but I is a 4-byte big endian integer. */
66
67 #define OP4(I) \
68 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
69 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
70
71 /* Macro to call each time we push I words on the JVM stack. */
72
73 #define NOTE_PUSH(I) \
74 do { state->code_SP += (I); \
75 if (state->code_SP > state->code_SP_max) \
76 state->code_SP_max = state->code_SP; } while (0)
77
78 /* Macro to call each time we pop I words from the JVM stack. */
79
80 #define NOTE_POP(I) \
81 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
82
83 /* A chunk or segment of a .class file. */
84
85 struct chunk
86 {
87 /* The next segment of this .class file. */
88 struct chunk *next;
89
90 /* The actual data in this segment to be written to the .class file. */
91 unsigned char *data;
92
93 /* The size of the segment to be written to the .class file. */
94 int size;
95 };
96
97 #define PENDING_CLEANUP_PC (-3)
98 #define PENDING_EXIT_PC (-2)
99 #define UNDEFINED_PC (-1)
100
101 /* Each "block" represents a label plus the bytecode instructions following.
102 There may be branches out of the block, but no incoming jumps, except
103 to the beginning of the block.
104
105 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
106 assocated code yet), but it is an undefined label.
107 */
108
109 struct jcf_block
110 {
111 /* For blocks that that are defined, the next block (in pc order).
112 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
113 or a cleanup expression (from a WITH_CLEANUP_EXPR),
114 this is the next (outer) such end label, in a stack headed by
115 labeled_blocks in jcf_partial. */
116 struct jcf_block *next;
117
118 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
119 pc is PENDING_EXIT_PC.
120 In the not-yet-defined end label for pending cleanup subroutine,
121 pc is PENDING_CLEANUP_PC.
122 For other not-yet-defined labels, pc is UNDEFINED_PC.
123
124 If the label has been defined:
125 Until perform_relocations is finished, this is the maximum possible
126 value of the bytecode offset at the begnning of this block.
127 After perform_relocations, it is the actual offset (pc). */
128 int pc;
129
130 int linenumber;
131
132 /* After finish_jcf_block is called, The actual instructions contained in this block.
133 Before than NULL, and the instructions are in state->bytecode. */
134 union {
135 struct chunk *chunk;
136
137 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
138 coveed by the cleanup. */
139 struct jcf_block *start_label;
140 } v;
141
142 union {
143 /* Set of relocations (in reverse offset order) for this block. */
144 struct jcf_relocation *relocations;
145
146 /* If this block is that of the not-yet-defined end label of
147 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
148 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
149 tree labeled_block;
150 } u;
151 };
152
153 /* A "relocation" type for the 0-3 bytes of padding at the start
154 of a tableswitch or a lookupswitch. */
155 #define SWITCH_ALIGN_RELOC 4
156
157 /* A relocation type for the labels in a tableswitch or a lookupswitch;
158 these are relative to the start of the instruction, but (due to
159 th 0-3 bytes of padding), we don't know the offset before relocation. */
160 #define BLOCK_START_RELOC 1
161
162 struct jcf_relocation
163 {
164 /* Next relocation for the current jcf_block. */
165 struct jcf_relocation *next;
166
167 /* The (byte) offset within the current block that needs to be relocated. */
168 HOST_WIDE_INT offset;
169
170 /* 0 if offset is a 4-byte relative offset.
171 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
172 for proper alignment in tableswitch/lookupswitch instructions.
173 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
174 to the start of the containing block.
175 -1 if offset is a 2-byte relative offset.
176 < -1 if offset is the address of an instruction with a 2-byte offset
177 that does not have a corresponding 4-byte offset version, in which
178 case the absolute value of kind is the inverted opcode.
179 > 4 if offset is the address of an instruction (such as jsr) with a
180 2-byte offset that does have a corresponding 4-byte offset version,
181 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
182 int kind;
183
184 /* The label the relocation wants to actually transfer to. */
185 struct jcf_block *label;
186 };
187
188 /* State for single catch clause. */
189
190 struct jcf_handler
191 {
192 struct jcf_handler *next;
193
194 struct jcf_block *start_label;
195 struct jcf_block *end_label;
196 struct jcf_block *handler_label;
197
198 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
199 tree type;
200 };
201
202 /* State for the current switch statement. */
203
204 struct jcf_switch_state
205 {
206 struct jcf_switch_state *prev;
207 struct jcf_block *default_label;
208
209 struct jcf_relocation *cases;
210 int num_cases;
211 HOST_WIDE_INT min_case, max_case;
212 };
213
214 /* This structure is used to contain the various pieces that will
215 become a .class file. */
216
217 struct jcf_partial
218 {
219 struct chunk *first;
220 struct chunk *chunk;
221 struct obstack *chunk_obstack;
222 tree current_method;
223
224 /* List of basic blocks for the current method. */
225 struct jcf_block *blocks;
226 struct jcf_block *last_block;
227
228 struct localvar_info *first_lvar;
229 struct localvar_info *last_lvar;
230 int lvar_count;
231
232 CPool cpool;
233
234 int linenumber_count;
235
236 /* Until perform_relocations, this is a upper bound on the number
237 of bytes (so far) in the instructions for the current method. */
238 int code_length;
239
240 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
241 struct jcf_block *labeled_blocks;
242
243 /* The current stack size (stack pointer) in the current method. */
244 int code_SP;
245
246 /* The largest extent of stack size (stack pointer) in the current method. */
247 int code_SP_max;
248
249 /* Contains a mapping from local var slot number to localvar_info. */
250 struct buffer localvars;
251
252 /* The buffer allocated for bytecode for the current jcf_block. */
253 struct buffer bytecode;
254
255 /* Chain of exception handlers for the current method. */
256 struct jcf_handler *handlers;
257
258 /* Last element in handlers chain. */
259 struct jcf_handler *last_handler;
260
261 /* Number of exception handlers for the current method. */
262 int num_handlers;
263
264 /* Number of finalizers we are currently nested within. */
265 int num_finalizers;
266
267 /* If non-NULL, use this for the return value. */
268 tree return_value_decl;
269
270 /* Information about the current switch statemenet. */
271 struct jcf_switch_state *sw_state;
272 };
273
274 static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *));
275
276 /* Utility macros for appending (big-endian) data to a buffer.
277 We assume a local variable 'ptr' points into where we want to
278 write next, and we assume enoygh space has been allocated. */
279
280 #ifdef ENABLE_CHECKING
281 int
282 CHECK_PUT(ptr, state, i)
283 void *ptr;
284 struct jcf_partial *state;
285 int i;
286 {
287 if (ptr < state->chunk->data
288 || (char*)ptr + i > state->chunk->data + state->chunk->size)
289 fatal ("internal error - CHECK_PUT failed");
290 return 0;
291 }
292 #else
293 #define CHECK_PUT(PTR, STATE, I) ((void)0)
294 #endif
295
296 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
297 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
298 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
299 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
300
301 \f
302 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
303 Set the data and size fields to DATA and SIZE, respectively.
304 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
305
306 struct chunk *
307 alloc_chunk (last, data, size, work)
308 struct chunk *last;
309 unsigned char *data;
310 int size;
311 struct obstack *work;
312 {
313 struct chunk *chunk = (struct chunk *)
314 obstack_alloc (work, sizeof(struct chunk));
315
316 if (data == NULL && size > 0)
317 data = obstack_alloc (work, size);
318
319 chunk->next = NULL;
320 chunk->data = data;
321 chunk->size = size;
322 if (last != NULL)
323 last->next = chunk;
324 return chunk;
325 }
326
327 #ifdef ENABLE_CHECKING
328 int
329 CHECK_OP(struct jcf_partial *state)
330 {
331 if (state->bytecode.ptr > state->bytecode.limit)
332 {
333 fatal("internal error - CHECK_OP failed");
334 }
335 return 0;
336 }
337 #else
338 #define CHECK_OP(STATE) ((void)0)
339 #endif
340
341 unsigned char *
342 append_chunk (data, size, state)
343 unsigned char *data;
344 int size;
345 struct jcf_partial *state;
346 {
347 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
348 if (state->first == NULL)
349 state->first = state->chunk;
350 return state->chunk->data;
351 }
352
353 void
354 append_chunk_copy (data, size, state)
355 unsigned char *data;
356 int size;
357 struct jcf_partial *state;
358 {
359 unsigned char *ptr = append_chunk (NULL, size, state);
360 memcpy (ptr, data, size);
361 }
362 \f
363 struct jcf_block *
364 gen_jcf_label (state)
365 struct jcf_partial *state;
366 {
367 struct jcf_block *block = (struct jcf_block *)
368 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
369 block->next = NULL;
370 block->linenumber = -1;
371 block->pc = UNDEFINED_PC;
372 return block;
373 }
374
375 void
376 finish_jcf_block (state)
377 struct jcf_partial *state;
378 {
379 struct jcf_block *block = state->last_block;
380 struct jcf_relocation *reloc;
381 int code_length = BUFFER_LENGTH (&state->bytecode);
382 int pc = state->code_length;
383 append_chunk_copy (state->bytecode.data, code_length, state);
384 BUFFER_RESET (&state->bytecode);
385 block->v.chunk = state->chunk;
386
387 /* Calculate code_length to the maximum value it can have. */
388 pc += block->v.chunk->size;
389 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
390 {
391 int kind = reloc->kind;
392 if (kind == SWITCH_ALIGN_RELOC)
393 pc += 3;
394 else if (kind > BLOCK_START_RELOC)
395 pc += 2; /* 2-byte offset may grow to 4-byte offset */
396 else if (kind < -1)
397 pc += 5; /* May need to add a goto_w. */
398 }
399 state->code_length = pc;
400 }
401
402 void
403 define_jcf_label (label, state)
404 struct jcf_block *label;
405 struct jcf_partial *state;
406 {
407 if (state->last_block != NULL)
408 finish_jcf_block (state);
409 label->pc = state->code_length;
410 if (state->blocks == NULL)
411 state->blocks = label;
412 else
413 state->last_block->next = label;
414 state->last_block = label;
415 label->next = NULL;
416 label->u.relocations = NULL;
417 }
418
419 struct jcf_block *
420 get_jcf_label_here (state)
421 struct jcf_partial *state;
422 {
423 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
424 return state->last_block;
425 else
426 {
427 struct jcf_block *label = gen_jcf_label (state);
428 define_jcf_label (label, state);
429 return label;
430 }
431 }
432
433 /* Note a line number entry for the current PC and given LINE. */
434
435 void
436 put_linenumber (line, state)
437 int line;
438 struct jcf_partial *state;
439 {
440 struct jcf_block *label = get_jcf_label_here (state);
441 if (label->linenumber > 0)
442 {
443 label = gen_jcf_label (state);
444 define_jcf_label (label, state);
445 }
446 label->linenumber = line;
447 state->linenumber_count++;
448 }
449
450 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
451 in the range (START_LABEL, END_LABEL). */
452
453 static struct jcf_handler *
454 alloc_handler (start_label, end_label, state)
455 struct jcf_block *start_label;
456 struct jcf_block *end_label;
457 struct jcf_partial *state;
458 {
459 struct jcf_handler *handler = (struct jcf_handler *)
460 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
461 handler->start_label = start_label;
462 handler->end_label = end_label;
463 handler->handler_label = get_jcf_label_here (state);
464 if (state->handlers == NULL)
465 state->handlers = handler;
466 else
467 state->last_handler->next = handler;
468 state->last_handler = handler;
469 handler->next = NULL;
470 state->num_handlers++;
471 return handler;
472 }
473
474 \f
475 /* The index of jvm local variable allocated for this DECL.
476 This is assigned when generating .class files;
477 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
478 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
479
480 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
481
482 struct localvar_info
483 {
484 struct localvar_info *next;
485
486 tree decl;
487 struct jcf_block *start_label;
488 struct jcf_block *end_label;
489 };
490
491 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
492 #define localvar_max \
493 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
494
495 void
496 localvar_alloc (decl, state)
497 tree decl;
498 struct jcf_partial *state;
499 {
500 struct jcf_block *start_label = get_jcf_label_here (state);
501 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
502 int index;
503 register struct localvar_info *info;
504 register struct localvar_info **ptr = localvar_buffer;
505 register struct localvar_info **limit
506 = (struct localvar_info**) state->localvars.ptr;
507 for (index = 0; ptr < limit; index++, ptr++)
508 {
509 if (ptr[0] == NULL
510 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
511 break;
512 }
513 if (ptr == limit)
514 {
515 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
516 ptr = (struct localvar_info**) state->localvars.data + index;
517 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
518 }
519 info = (struct localvar_info *)
520 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
521 ptr[0] = info;
522 if (wide)
523 ptr[1] = (struct localvar_info *)(~0);
524 DECL_LOCAL_INDEX (decl) = index;
525 info->decl = decl;
526 info->start_label = start_label;
527
528 if (DECL_NAME (decl) != NULL_TREE)
529 {
530 /* Generate debugging info. */
531 info->next = NULL;
532 if (state->last_lvar != NULL)
533 state->last_lvar->next = info;
534 else
535 state->first_lvar = info;
536 state->last_lvar = info;
537 state->lvar_count++;
538 }
539 }
540
541 int
542 localvar_free (decl, state)
543 tree decl;
544 struct jcf_partial *state;
545 {
546 struct jcf_block *end_label = get_jcf_label_here (state);
547 int index = DECL_LOCAL_INDEX (decl);
548 register struct localvar_info **ptr = &localvar_buffer [index];
549 register struct localvar_info *info = *ptr;
550 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
551
552 info->end_label = end_label;
553
554 if (info->decl != decl)
555 abort ();
556 ptr[0] = NULL;
557 if (wide)
558 {
559 if (ptr[1] != (struct localvar_info *)(~0))
560 abort ();
561 ptr[1] = NULL;
562 }
563 }
564
565 \f
566 #define STACK_TARGET 1
567 #define IGNORE_TARGET 2
568
569 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
570 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
571
572 int
573 get_access_flags (decl)
574 tree decl;
575 {
576 int flags = 0;
577 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
578 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
579 flags |= ACC_PUBLIC;
580 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
581 flags |= ACC_FINAL;
582 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
583 {
584 if (TREE_PROTECTED (decl))
585 flags |= ACC_PROTECTED;
586 if (TREE_PRIVATE (decl))
587 flags |= ACC_PRIVATE;
588 }
589 else if (TREE_CODE (decl) == TYPE_DECL)
590 {
591 if (CLASS_SUPER (decl))
592 flags |= ACC_SUPER;
593 if (CLASS_ABSTRACT (decl))
594 flags |= ACC_ABSTRACT;
595 if (CLASS_INTERFACE (decl))
596 flags |= ACC_INTERFACE;
597 }
598 else
599 fatal ("internal error - bad argument to get_access_flags");
600 if (TREE_CODE (decl) == FUNCTION_DECL)
601 {
602 if (METHOD_NATIVE (decl))
603 flags |= ACC_NATIVE;
604 if (METHOD_STATIC (decl))
605 flags |= ACC_STATIC;
606 if (METHOD_SYNCHRONIZED (decl))
607 flags |= ACC_SYNCHRONIZED;
608 if (METHOD_ABSTRACT (decl))
609 flags |= ACC_ABSTRACT;
610 }
611 if (isfield)
612 {
613 if (FIELD_STATIC (decl))
614 flags |= ACC_STATIC;
615 if (FIELD_VOLATILE (decl))
616 flags |= ACC_VOLATILE;
617 if (FIELD_TRANSIENT (decl))
618 flags |= ACC_TRANSIENT;
619 }
620 return flags;
621 }
622
623 /* Write the list of segments starting at CHUNKS to STREAM. */
624
625 void
626 write_chunks (stream, chunks)
627 FILE* stream;
628 struct chunk *chunks;
629 {
630 for (; chunks != NULL; chunks = chunks->next)
631 fwrite (chunks->data, chunks->size, 1, stream);
632 }
633
634 /* Push a 1-word constant in the constant pool at the given INDEX.
635 (Caller is responsible for doing NOTE_PUSH.) */
636
637 static void
638 push_constant1 (index, state)
639 int index;
640 struct jcf_partial *state;
641 {
642 RESERVE (3);
643 if (index < 256)
644 {
645 OP1 (OPCODE_ldc);
646 OP1 (index);
647 }
648 else
649 {
650 OP1 (OPCODE_ldc_w);
651 OP2 (index);
652 }
653 }
654
655 /* Push a 2-word constant in the constant pool at the given INDEX.
656 (Caller is responsible for doing NOTE_PUSH.) */
657
658 static void
659 push_constant2 (index, state)
660 int index;
661 struct jcf_partial *state;
662 {
663 RESERVE (3);
664 OP1 (OPCODE_ldc2_w);
665 OP2 (index);
666 }
667
668 /* Push 32-bit integer constant on VM stack.
669 Caller is responsible for doing NOTE_PUSH. */
670
671 static void
672 push_int_const (i, state)
673 HOST_WIDE_INT i;
674 struct jcf_partial *state;
675 {
676 RESERVE(3);
677 if (i >= -1 && i <= 5)
678 OP1(OPCODE_iconst_0 + i);
679 else if (i >= -128 && i < 128)
680 {
681 OP1(OPCODE_bipush);
682 OP1(i);
683 }
684 else if (i >= -32768 && i < 32768)
685 {
686 OP1(OPCODE_sipush);
687 OP2(i);
688 }
689 else
690 {
691 i = find_constant1 (&state->cpool, CONSTANT_Integer, i & 0xFFFFFFFF);
692 push_constant1 (i, state);
693 }
694 }
695
696 static int
697 find_constant_wide (lo, hi, state)
698 HOST_WIDE_INT lo, hi;
699 struct jcf_partial *state;
700 {
701 HOST_WIDE_INT w1, w2;
702 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
703 return find_constant2 (&state->cpool, CONSTANT_Long,
704 w1 & 0xFFFFFFFF, lo & 0xFFFFFFFF);
705 }
706
707 /* Find or allocate a constant pool entry for the given VALUE.
708 Return the index in the constant pool. */
709
710 static int
711 find_constant_index (value, state)
712 tree value;
713 struct jcf_partial *state;
714 {
715 if (TREE_CODE (value) == INTEGER_CST)
716 {
717 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
718 return find_constant1 (&state->cpool, CONSTANT_Integer,
719 TREE_INT_CST_LOW (value) & 0xFFFFFFFF);
720 else
721 return find_constant_wide (TREE_INT_CST_LOW (value),
722 TREE_INT_CST_HIGH (value), state);
723 }
724 else if (TREE_CODE (value) == REAL_CST)
725 {
726 long words[2];
727 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
728 {
729 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
730 return find_constant1 (&state->cpool, CONSTANT_Float, words[0]);
731 }
732 else
733 {
734 etardouble (TREE_REAL_CST (value), words);
735 return find_constant2 (&state->cpool, CONSTANT_Double,
736 words[1-FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF,
737 words[FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF);
738 }
739 }
740 else if (TREE_CODE (value) == STRING_CST)
741 {
742 return find_string_constant (&state->cpool, value);
743 }
744 else
745 fatal ("find_constant_index - bad type");
746 }
747
748 /* Push 64-bit long constant on VM stack.
749 Caller is responsible for doing NOTE_PUSH. */
750
751 static void
752 push_long_const (lo, hi, state)
753 HOST_WIDE_INT lo, hi;
754 struct jcf_partial *state;
755 {
756 if (hi == 0 && lo >= 0 && lo <= 1)
757 {
758 RESERVE(1);
759 OP1(OPCODE_lconst_0 + lo);
760 }
761 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
762 {
763 push_int_const (lo, state);
764 RESERVE (1);
765 OP1 (OPCODE_i2l);
766 }
767 else
768 push_constant2 (find_constant_wide (lo, hi, state), state);
769 }
770
771 static void
772 field_op (field, opcode, state)
773 tree field;
774 int opcode;
775 struct jcf_partial *state;
776 {
777 int index = find_fieldref_index (&state->cpool, field);
778 RESERVE (3);
779 OP1 (opcode);
780 OP2 (index);
781 }
782
783 /* Returns an integer in the range 0 (for 'int') through 4 (for object
784 reference) to 7 (for 'short') which matches the pattern of how JVM
785 opcodes typically depend on the operand type. */
786
787 int
788 adjust_typed_op (type, max)
789 tree type;
790 int max;
791 {
792 switch (TREE_CODE (type))
793 {
794 case POINTER_TYPE:
795 case RECORD_TYPE: return 4;
796 case BOOLEAN_TYPE:
797 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
798 case CHAR_TYPE:
799 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
800 case INTEGER_TYPE:
801 switch (TYPE_PRECISION (type))
802 {
803 case 8: return max < 5 ? 0 : 5;
804 case 16: return max < 7 ? 0 : 7;
805 case 32: return 0;
806 case 64: return 1;
807 }
808 break;
809 case REAL_TYPE:
810 switch (TYPE_PRECISION (type))
811 {
812 case 32: return 2;
813 case 64: return 3;
814 }
815 break;
816 default:
817 break;
818 }
819 abort ();
820 }
821
822 static void
823 maybe_wide (opcode, index, state)
824 int opcode, index;
825 struct jcf_partial *state;
826 {
827 if (index >= 256)
828 {
829 RESERVE (4);
830 OP1 (OPCODE_wide);
831 OP1 (opcode);
832 OP2 (index);
833 }
834 else
835 {
836 RESERVE (2);
837 OP1 (opcode);
838 OP1 (index);
839 }
840 }
841
842 /* Compile code to duplicate with offset, where
843 SIZE is the size of the stack item to duplicate (1 or 2), abd
844 OFFSET is where to insert the result (must be 0, 1, or 2).
845 (The new words get inserted at stack[SP-size-offset].) */
846
847 static void
848 emit_dup (size, offset, state)
849 int size, offset;
850 struct jcf_partial *state;
851 {
852 int kind;
853 if (size == 0)
854 return;
855 RESERVE(1);
856 if (offset == 0)
857 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
858 else if (offset == 1)
859 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
860 else if (offset == 2)
861 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
862 else
863 abort();
864 OP1 (kind);
865 NOTE_PUSH (size);
866 }
867
868 static void
869 emit_pop (size, state)
870 int size;
871 struct jcf_partial *state;
872 {
873 RESERVE (1);
874 OP1 (OPCODE_pop - 1 + size);
875 }
876
877 static void
878 emit_iinc (var, value, state)
879 tree var;
880 int value;
881 struct jcf_partial *state;
882 {
883 int slot = DECL_LOCAL_INDEX (var);
884
885 if (value < -128 || value > 127 || slot >= 256)
886 {
887 RESERVE (6);
888 OP1 (OPCODE_wide);
889 OP1 (OPCODE_iinc);
890 OP2 (slot);
891 OP2 (value);
892 }
893 else
894 {
895 RESERVE (3);
896 OP1 (OPCODE_iinc);
897 OP1 (slot);
898 OP1 (value);
899 }
900 }
901
902 static void
903 emit_load_or_store (var, opcode, state)
904 tree var; /* Variable to load from or store into. */
905 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
906 struct jcf_partial *state;
907 {
908 tree type = TREE_TYPE (var);
909 int kind = adjust_typed_op (type, 4);
910 int index = DECL_LOCAL_INDEX (var);
911 if (index <= 3)
912 {
913 RESERVE (1);
914 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
915 }
916 else
917 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
918 }
919
920 static void
921 emit_load (var, state)
922 tree var;
923 struct jcf_partial *state;
924 {
925 emit_load_or_store (var, OPCODE_iload, state);
926 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
927 }
928
929 static void
930 emit_store (var, state)
931 tree var;
932 struct jcf_partial *state;
933 {
934 emit_load_or_store (var, OPCODE_istore, state);
935 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
936 }
937
938 static void
939 emit_unop (opcode, type, state)
940 enum java_opcode opcode;
941 tree type;
942 struct jcf_partial *state;
943 {
944 RESERVE(1);
945 OP1 (opcode);
946 }
947
948 static void
949 emit_binop (opcode, type, state)
950 enum java_opcode opcode;
951 tree type;
952 struct jcf_partial *state;
953 {
954 int size = TYPE_IS_WIDE (type) ? 2 : 1;
955 RESERVE(1);
956 OP1 (opcode);
957 NOTE_POP (size);
958 }
959
960 static void
961 emit_reloc (value, kind, target, state)
962 HOST_WIDE_INT value;
963 int kind;
964 struct jcf_block *target;
965 struct jcf_partial *state;
966 {
967 struct jcf_relocation *reloc = (struct jcf_relocation *)
968 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
969 struct jcf_block *block = state->last_block;
970 reloc->next = block->u.relocations;
971 block->u.relocations = reloc;
972 reloc->offset = BUFFER_LENGTH (&state->bytecode);
973 reloc->label = target;
974 reloc->kind = kind;
975 if (kind == 0 || kind == BLOCK_START_RELOC)
976 OP4 (value);
977 else if (kind != SWITCH_ALIGN_RELOC)
978 OP2 (value);
979 }
980
981 static void
982 emit_switch_reloc (label, state)
983 struct jcf_block *label;
984 struct jcf_partial *state;
985 {
986 emit_reloc (0, BLOCK_START_RELOC, label, state);
987 }
988
989 /* Similar to emit_switch_reloc,
990 but re-uses an existing case reloc. */
991
992 static void
993 emit_case_reloc (reloc, state)
994 struct jcf_relocation *reloc;
995 struct jcf_partial *state;
996 {
997 struct jcf_block *block = state->last_block;
998 reloc->next = block->u.relocations;
999 block->u.relocations = reloc;
1000 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1001 reloc->kind = BLOCK_START_RELOC;
1002 OP4 (0);
1003 }
1004
1005 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1006 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1007
1008 static void
1009 emit_if (target, opcode, inv_opcode, state)
1010 struct jcf_block *target;
1011 int opcode, inv_opcode;
1012 struct jcf_partial *state;
1013 {
1014 OP1 (opcode);
1015 // value is 1 byte from reloc back to start of instruction.
1016 emit_reloc (1, - inv_opcode, target, state);
1017 }
1018
1019 static void
1020 emit_goto (target, state)
1021 struct jcf_block *target;
1022 struct jcf_partial *state;
1023 {
1024 OP1 (OPCODE_goto);
1025 // Value is 1 byte from reloc back to start of instruction.
1026 emit_reloc (1, OPCODE_goto_w, target, state);
1027 }
1028
1029 static void
1030 emit_jsr (target, state)
1031 struct jcf_block *target;
1032 struct jcf_partial *state;
1033 {
1034 OP1 (OPCODE_jsr);
1035 // Value is 1 byte from reloc back to start of instruction.
1036 emit_reloc (1, OPCODE_jsr_w, target, state);
1037 }
1038
1039 /* Generate code to evaluate EXP. If the result is true,
1040 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1041 TRUE_BRANCH_FIRST is a code geneation hint that the
1042 TRUE_LABEL may follow right after this. (The idea is that we
1043 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1044
1045 void
1046 generate_bytecode_conditional (exp, true_label, false_label,
1047 true_branch_first, state)
1048 tree exp;
1049 struct jcf_block *true_label;
1050 struct jcf_block *false_label;
1051 int true_branch_first;
1052 struct jcf_partial *state;
1053 {
1054 tree exp0, exp1, type;
1055 int save_SP = state->code_SP;
1056 enum java_opcode op, negop;
1057 switch (TREE_CODE (exp))
1058 {
1059 case INTEGER_CST:
1060 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1061 break;
1062 case COND_EXPR:
1063 {
1064 struct jcf_block *then_label = gen_jcf_label (state);
1065 struct jcf_block *else_label = gen_jcf_label (state);
1066 int save_SP_before, save_SP_after;
1067 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1068 then_label, else_label, 1, state);
1069 define_jcf_label (then_label, state);
1070 save_SP_before = state->code_SP;
1071 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1072 true_label, false_label, 1, state);
1073 save_SP_after = state->code_SP;
1074 state->code_SP = save_SP_before;
1075 define_jcf_label (else_label, state);
1076 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1077 true_label, false_label,
1078 true_branch_first, state);
1079 if (state->code_SP != save_SP_after)
1080 fatal ("internal error non-matching SP");
1081 }
1082 break;
1083 case TRUTH_NOT_EXPR:
1084 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1085 ! true_branch_first, state);
1086 break;
1087 case TRUTH_ANDIF_EXPR:
1088 {
1089 struct jcf_block *next_label = gen_jcf_label (state);
1090 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1091 next_label, false_label, 1, state);
1092 define_jcf_label (next_label, state);
1093 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1094 true_label, false_label, 1, state);
1095 }
1096 break;
1097 case TRUTH_ORIF_EXPR:
1098 {
1099 struct jcf_block *next_label = gen_jcf_label (state);
1100 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1101 true_label, next_label, 1, state);
1102 define_jcf_label (next_label, state);
1103 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1104 true_label, false_label, 1, state);
1105 }
1106 break;
1107 compare_1:
1108 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1109 set it to the corresponding 1-operand if<COND> instructions. */
1110 op = op - 6;
1111 /* FALLTHROUGH */
1112 compare_2:
1113 /* The opcodes with their inverses are allocated in pairs.
1114 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1115 negop = (op & 1) ? op + 1 : op - 1;
1116 compare_2_ptr:
1117 if (true_branch_first)
1118 {
1119 emit_if (false_label, negop, op, state);
1120 emit_goto (true_label, state);
1121 }
1122 else
1123 {
1124 emit_if (true_label, op, negop, state);
1125 emit_goto (false_label, state);
1126 }
1127 break;
1128 case EQ_EXPR:
1129 op = OPCODE_if_icmpeq;
1130 goto compare;
1131 case NE_EXPR:
1132 op = OPCODE_if_icmpne;
1133 goto compare;
1134 case GT_EXPR:
1135 op = OPCODE_if_icmpgt;
1136 goto compare;
1137 case LT_EXPR:
1138 op = OPCODE_if_icmplt;
1139 goto compare;
1140 case GE_EXPR:
1141 op = OPCODE_if_icmpge;
1142 goto compare;
1143 case LE_EXPR:
1144 op = OPCODE_if_icmple;
1145 goto compare;
1146 compare:
1147 exp0 = TREE_OPERAND (exp, 0);
1148 exp1 = TREE_OPERAND (exp, 1);
1149 type = TREE_TYPE (exp0);
1150 switch (TREE_CODE (type))
1151 {
1152 int opf;
1153 case POINTER_TYPE: case RECORD_TYPE:
1154 switch (TREE_CODE (exp))
1155 {
1156 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1157 case NE_EXPR: op = OPCODE_if_acmpne; break;
1158 default: abort();
1159 }
1160 if (integer_zerop (exp1) || integer_zerop (exp0))
1161 {
1162 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1163 STACK_TARGET, state);
1164 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1165 negop = (op & 1) ? op - 1 : op + 1;
1166 NOTE_POP (1);
1167 goto compare_2_ptr;
1168 }
1169 generate_bytecode_insns (exp0, STACK_TARGET, state);
1170 generate_bytecode_insns (exp1, STACK_TARGET, state);
1171 NOTE_POP (2);
1172 goto compare_2;
1173 case REAL_TYPE:
1174 generate_bytecode_insns (exp0, STACK_TARGET, state);
1175 generate_bytecode_insns (exp1, STACK_TARGET, state);
1176 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1177 opf = OPCODE_fcmpg;
1178 else
1179 opf = OPCODE_fcmpl;
1180 if (TYPE_PRECISION (type) > 32)
1181 {
1182 opf += 2;
1183 NOTE_POP (4);
1184 }
1185 else
1186 NOTE_POP (2);
1187 RESERVE (1);
1188 OP1 (opf);
1189 goto compare_1;
1190 case INTEGER_TYPE:
1191 if (TYPE_PRECISION (type) > 32)
1192 {
1193 generate_bytecode_insns (exp0, STACK_TARGET, state);
1194 generate_bytecode_insns (exp1, STACK_TARGET, state);
1195 NOTE_POP (4);
1196 RESERVE (1);
1197 OP1 (OPCODE_lcmp);
1198 goto compare_1;
1199 }
1200 /* FALLTHOUGH */
1201 default:
1202 if (integer_zerop (exp1))
1203 {
1204 generate_bytecode_insns (exp0, STACK_TARGET, state);
1205 NOTE_POP (1);
1206 goto compare_1;
1207 }
1208 if (integer_zerop (exp0))
1209 {
1210 switch (op)
1211 {
1212 case OPCODE_if_icmplt:
1213 case OPCODE_if_icmpge:
1214 op += 2;
1215 break;
1216 case OPCODE_if_icmpgt:
1217 case OPCODE_if_icmple:
1218 op -= 2;
1219 break;
1220 default:
1221 break;
1222 }
1223 generate_bytecode_insns (exp1, STACK_TARGET, state);
1224 NOTE_POP (1);
1225 goto compare_1;
1226 }
1227 generate_bytecode_insns (exp0, STACK_TARGET, state);
1228 generate_bytecode_insns (exp1, STACK_TARGET, state);
1229 NOTE_POP (2);
1230 goto compare_2;
1231 }
1232
1233 default:
1234 generate_bytecode_insns (exp, STACK_TARGET, state);
1235 NOTE_POP (1);
1236 if (true_branch_first)
1237 {
1238 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1239 emit_goto (true_label, state);
1240 }
1241 else
1242 {
1243 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1244 emit_goto (false_label, state);
1245 }
1246 break;
1247 }
1248 if (save_SP != state->code_SP)
1249 fatal ("internal error - SP mismatch");
1250 }
1251
1252 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1253 but only as far out as LIMIT (since we are about to jump to the
1254 emit label that is LIMIT). */
1255
1256 static void
1257 call_cleanups (limit, state)
1258 struct jcf_block *limit;
1259 struct jcf_partial *state;
1260 {
1261 struct jcf_block *block = state->labeled_blocks;
1262 for (; block != limit; block = block->next)
1263 {
1264 if (block->pc == PENDING_CLEANUP_PC)
1265 emit_jsr (block, state);
1266 }
1267 }
1268
1269 /* Generate bytecode for sub-expression EXP of METHOD.
1270 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1271
1272 static void
1273 generate_bytecode_insns (exp, target, state)
1274 tree exp;
1275 int target;
1276 struct jcf_partial *state;
1277 {
1278 tree type;
1279 enum java_opcode jopcode;
1280 int op;
1281 HOST_WIDE_INT value;
1282 int post_op;
1283 int size;
1284 int offset;
1285
1286 if (exp == NULL && target == IGNORE_TARGET)
1287 return;
1288
1289 type = TREE_TYPE (exp);
1290
1291 switch (TREE_CODE (exp))
1292 {
1293 case BLOCK:
1294 if (BLOCK_EXPR_BODY (exp))
1295 {
1296 tree local;
1297 tree body = BLOCK_EXPR_BODY (exp);
1298 for (local = BLOCK_EXPR_DECLS (exp); local; )
1299 {
1300 tree next = TREE_CHAIN (local);
1301 localvar_alloc (local, state);
1302 local = next;
1303 }
1304 /* Avoid deep recursion for long blocks. */
1305 while (TREE_CODE (body) == COMPOUND_EXPR)
1306 {
1307 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1308 body = TREE_OPERAND (body, 1);
1309 }
1310 generate_bytecode_insns (body, target, state);
1311 for (local = BLOCK_EXPR_DECLS (exp); local; )
1312 {
1313 tree next = TREE_CHAIN (local);
1314 localvar_free (local, state);
1315 local = next;
1316 }
1317 }
1318 break;
1319 case COMPOUND_EXPR:
1320 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1321 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1322 break;
1323 case EXPR_WITH_FILE_LOCATION:
1324 {
1325 char *saved_input_filename = input_filename;
1326 tree body = EXPR_WFL_NODE (exp);
1327 int saved_lineno = lineno;
1328 if (body == empty_stmt_node)
1329 break;
1330 input_filename = EXPR_WFL_FILENAME (exp);
1331 lineno = EXPR_WFL_LINENO (exp);
1332 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0)
1333 put_linenumber (lineno, state);
1334 generate_bytecode_insns (body, target, state);
1335 input_filename = saved_input_filename;
1336 lineno = saved_lineno;
1337 }
1338 break;
1339 case INTEGER_CST:
1340 if (target == IGNORE_TARGET) ; /* do nothing */
1341 else if (TREE_CODE (type) == POINTER_TYPE)
1342 {
1343 if (! integer_zerop (exp))
1344 abort();
1345 RESERVE(1);
1346 OP1 (OPCODE_aconst_null);
1347 NOTE_PUSH (1);
1348 }
1349 else if (TYPE_PRECISION (type) <= 32)
1350 {
1351 push_int_const (TREE_INT_CST_LOW (exp), state);
1352 NOTE_PUSH (1);
1353 }
1354 else
1355 {
1356 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1357 state);
1358 NOTE_PUSH (2);
1359 }
1360 break;
1361 case REAL_CST:
1362 offset = find_constant_index (exp, state);
1363 switch (TYPE_PRECISION (type))
1364 {
1365 case 32:
1366 push_constant1 (offset, state);
1367 NOTE_PUSH (1);
1368 break;
1369 case 64:
1370 push_constant2 (offset, state);
1371 NOTE_PUSH (2);
1372 break;
1373 default:
1374 abort ();
1375 }
1376 break;
1377 case STRING_CST:
1378 push_constant1 (find_string_constant (&state->cpool, exp), state);
1379 NOTE_PUSH (1);
1380 break;
1381 case VAR_DECL:
1382 if (TREE_STATIC (exp))
1383 {
1384 field_op (exp, OPCODE_getstatic, state);
1385 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1386 break;
1387 }
1388 /* ... fall through ... */
1389 case PARM_DECL:
1390 emit_load (exp, state);
1391 break;
1392 case NON_LVALUE_EXPR:
1393 case INDIRECT_REF:
1394 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1395 break;
1396 case ARRAY_REF:
1397 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1398 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1399 if (target != IGNORE_TARGET)
1400 {
1401 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1402 RESERVE(1);
1403 OP1 (jopcode);
1404 if (! TYPE_IS_WIDE (type))
1405 NOTE_POP (1);
1406 }
1407 break;
1408 case COMPONENT_REF:
1409 {
1410 tree obj = TREE_OPERAND (exp, 0);
1411 tree field = TREE_OPERAND (exp, 1);
1412 int is_static = FIELD_STATIC (field);
1413 generate_bytecode_insns (obj,
1414 is_static ? IGNORE_TARGET : target, state);
1415 if (target != IGNORE_TARGET)
1416 {
1417 if (DECL_NAME (field) == length_identifier_node && !is_static
1418 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1419 {
1420 RESERVE (1);
1421 OP1 (OPCODE_arraylength);
1422 }
1423 else
1424 {
1425 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1426 state);
1427 if (! is_static)
1428 NOTE_POP (1);
1429 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1430 }
1431 }
1432 }
1433 break;
1434 case TRUTH_ANDIF_EXPR:
1435 case TRUTH_ORIF_EXPR:
1436 case EQ_EXPR:
1437 case NE_EXPR:
1438 case GT_EXPR:
1439 case LT_EXPR:
1440 case GE_EXPR:
1441 case LE_EXPR:
1442 {
1443 struct jcf_block *then_label = gen_jcf_label (state);
1444 struct jcf_block *else_label = gen_jcf_label (state);
1445 struct jcf_block *end_label = gen_jcf_label (state);
1446 generate_bytecode_conditional (exp,
1447 then_label, else_label, 1, state);
1448 define_jcf_label (then_label, state);
1449 push_int_const (1, state);
1450 emit_goto (end_label, state);
1451 define_jcf_label (else_label, state);
1452 push_int_const (0, state);
1453 define_jcf_label (end_label, state);
1454 NOTE_PUSH (1);
1455 }
1456 break;
1457 case COND_EXPR:
1458 {
1459 struct jcf_block *then_label = gen_jcf_label (state);
1460 struct jcf_block *else_label = gen_jcf_label (state);
1461 struct jcf_block *end_label = gen_jcf_label (state);
1462 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1463 then_label, else_label, 1, state);
1464 define_jcf_label (then_label, state);
1465 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1466 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1467 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1468 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1469 emit_goto (end_label, state);
1470 define_jcf_label (else_label, state);
1471 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1472 define_jcf_label (end_label, state);
1473 }
1474 break;
1475 case CASE_EXPR:
1476 {
1477 struct jcf_switch_state *sw_state = state->sw_state;
1478 struct jcf_relocation *reloc = (struct jcf_relocation *)
1479 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1480 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1481 reloc->kind = 0;
1482 reloc->label = get_jcf_label_here (state);
1483 reloc->offset = case_value;
1484 reloc->next = sw_state->cases;
1485 sw_state->cases = reloc;
1486 if (sw_state->num_cases == 0)
1487 {
1488 sw_state->min_case = case_value;
1489 sw_state->max_case = case_value;
1490 }
1491 else
1492 {
1493 if (case_value < sw_state->min_case)
1494 sw_state->min_case = case_value;
1495 if (case_value > sw_state->max_case)
1496 sw_state->max_case = case_value;
1497 }
1498 sw_state->num_cases++;
1499 }
1500 break;
1501 case DEFAULT_EXPR:
1502 state->sw_state->default_label = get_jcf_label_here (state);
1503 break;
1504
1505 case SWITCH_EXPR:
1506 {
1507 /* The SWITCH_EXPR has three parts, generated in the following order:
1508 1. the switch_expression (the value used to select the correct case);
1509 2. the switch_body;
1510 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1511 After code generation, we will re-order then in the order 1, 3, 2.
1512 This is to avoid an extra GOTOs. */
1513 struct jcf_switch_state sw_state;
1514 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1515 struct jcf_block *body_last; /* Last block of the switch_body. */
1516 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1517 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1518 struct jcf_block *body_block;
1519 int switch_length;
1520 sw_state.prev = state->sw_state;
1521 state->sw_state = &sw_state;
1522 sw_state.cases = NULL;
1523 sw_state.num_cases = 0;
1524 sw_state.default_label = NULL;
1525 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1526 expression_last = state->last_block;
1527 body_block = get_jcf_label_here (state); /* Force a new block here. */
1528 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1529 body_last = state->last_block;
1530
1531 switch_instruction = gen_jcf_label (state);
1532 define_jcf_label (switch_instruction, state);
1533 if (sw_state.default_label == NULL)
1534 sw_state.default_label = gen_jcf_label (state);
1535
1536 if (sw_state.num_cases <= 1)
1537 {
1538 if (sw_state.num_cases == 0)
1539 {
1540 emit_pop (1, state);
1541 NOTE_POP (1);
1542 }
1543 else
1544 {
1545 push_int_const (sw_state.cases->offset, state);
1546 emit_if (sw_state.cases->label,
1547 OPCODE_ifeq, OPCODE_ifne, state);
1548 }
1549 emit_goto (sw_state.default_label, state);
1550 }
1551 else
1552 {
1553 HOST_WIDE_INT i;
1554 /* Copy the chain of relocs into a sorted array. */
1555 struct jcf_relocation **relocs = (struct jcf_relocation **)
1556 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1557 /* The relocs arrays is a buffer with a gap.
1558 The assumption is that cases will normally come in "runs". */
1559 int gap_start = 0;
1560 int gap_end = sw_state.num_cases;
1561 struct jcf_relocation *reloc;
1562 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1563 {
1564 HOST_WIDE_INT case_value = reloc->offset;
1565 while (gap_end < sw_state.num_cases)
1566 {
1567 struct jcf_relocation *end = relocs[gap_end];
1568 if (case_value <= end->offset)
1569 break;
1570 relocs[gap_start++] = end;
1571 gap_end++;
1572 }
1573 while (gap_start > 0)
1574 {
1575 struct jcf_relocation *before = relocs[gap_start-1];
1576 if (case_value >= before->offset)
1577 break;
1578 relocs[--gap_end] = before;
1579 gap_start--;
1580 }
1581 relocs[gap_start++] = reloc;
1582 /* Note we don't check for duplicates. FIXME! */
1583 }
1584
1585 if (2 * sw_state.num_cases
1586 >= sw_state.max_case - sw_state.min_case)
1587 { /* Use tableswitch. */
1588 int index = 0;
1589 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1590 OP1 (OPCODE_tableswitch);
1591 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1592 emit_switch_reloc (sw_state.default_label, state);
1593 OP4 (sw_state.min_case);
1594 OP4 (sw_state.max_case);
1595 for (i = sw_state.min_case; ; )
1596 {
1597 reloc = relocs[index];
1598 if (i == reloc->offset)
1599 {
1600 emit_case_reloc (reloc, state);
1601 if (i == sw_state.max_case)
1602 break;
1603 index++;
1604 }
1605 else
1606 emit_switch_reloc (sw_state.default_label, state);
1607 i++;
1608 }
1609 }
1610 else
1611 { /* Use lookupswitch. */
1612 RESERVE(9 + 8 * sw_state.num_cases);
1613 OP1 (OPCODE_lookupswitch);
1614 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1615 emit_switch_reloc (sw_state.default_label, state);
1616 OP4 (sw_state.num_cases);
1617 for (i = 0; i < sw_state.num_cases; i++)
1618 {
1619 struct jcf_relocation *reloc = relocs[i];
1620 OP4 (reloc->offset);
1621 emit_case_reloc (reloc, state);
1622 }
1623 }
1624 free (relocs);
1625 }
1626
1627 instruction_last = state->last_block;
1628 if (sw_state.default_label->pc < 0)
1629 define_jcf_label (sw_state.default_label, state);
1630 else /* Force a new block. */
1631 sw_state.default_label = get_jcf_label_here (state);
1632 /* Now re-arrange the blocks so the switch_instruction
1633 comes before the switch_body. */
1634 switch_length = state->code_length - switch_instruction->pc;
1635 switch_instruction->pc = body_block->pc;
1636 instruction_last->next = body_block;
1637 instruction_last->v.chunk->next = body_block->v.chunk;
1638 expression_last->next = switch_instruction;
1639 expression_last->v.chunk->next = switch_instruction->v.chunk;
1640 body_last->next = sw_state.default_label;
1641 body_last->v.chunk->next = NULL;
1642 state->chunk = body_last->v.chunk;
1643 for (; body_block != sw_state.default_label; body_block = body_block->next)
1644 body_block->pc += switch_length;
1645
1646 state->sw_state = sw_state.prev;
1647 break;
1648 }
1649
1650 case RETURN_EXPR:
1651 if (!TREE_OPERAND (exp, 0))
1652 {
1653 op = OPCODE_return;
1654 call_cleanups (NULL_TREE, state);
1655 }
1656 else
1657 {
1658 exp = TREE_OPERAND (exp, 0);
1659 if (TREE_CODE (exp) != MODIFY_EXPR)
1660 abort ();
1661 exp = TREE_OPERAND (exp, 1);
1662 op = OPCODE_ireturn + adjust_typed_op (TREE_TYPE (exp), 4);
1663 generate_bytecode_insns (exp, STACK_TARGET, state);
1664 if (state->num_finalizers > 0)
1665 {
1666 if (state->return_value_decl == NULL_TREE)
1667 {
1668 state->return_value_decl
1669 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1670 localvar_alloc (state->return_value_decl, state);
1671 }
1672 emit_store (state->return_value_decl, state);
1673 call_cleanups (NULL_TREE, state);
1674 emit_load (state->return_value_decl, state);
1675 /* If we call localvar_free (state->return_value_decl, state),
1676 then we risk the save decl erroneously re-used in the
1677 finalizer. Instead, we keep the state->return_value_decl
1678 allocated through the rest of the method. This is not
1679 the greatest solution, but it is at least simple and safe. */
1680 }
1681 }
1682 RESERVE (1);
1683 OP1 (op);
1684 break;
1685 case LABELED_BLOCK_EXPR:
1686 {
1687 struct jcf_block *end_label = gen_jcf_label (state);
1688 end_label->next = state->labeled_blocks;
1689 state->labeled_blocks = end_label;
1690 end_label->pc = PENDING_EXIT_PC;
1691 end_label->u.labeled_block = exp;
1692 if (LABELED_BLOCK_BODY (exp))
1693 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1694 if (state->labeled_blocks != end_label)
1695 abort();
1696 state->labeled_blocks = end_label->next;
1697 define_jcf_label (end_label, state);
1698 }
1699 break;
1700 case LOOP_EXPR:
1701 {
1702 tree body = TREE_OPERAND (exp, 0);
1703 #if 0
1704 if (TREE_CODE (body) == COMPOUND_EXPR
1705 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1706 {
1707 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1708 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1709 struct jcf_block *head_label;
1710 struct jcf_block *body_label;
1711 struct jcf_block *end_label = gen_jcf_label (state);
1712 struct jcf_block *exit_label = state->labeled_blocks;
1713 head_label = gen_jcf_label (state);
1714 emit_goto (head_label, state);
1715 body_label = get_jcf_label_here (state);
1716 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1717 define_jcf_label (head_label, state);
1718 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1719 end_label, body_label, 1, state);
1720 define_jcf_label (end_label, state);
1721 }
1722 else
1723 #endif
1724 {
1725 struct jcf_block *head_label = get_jcf_label_here (state);
1726 generate_bytecode_insns (body, IGNORE_TARGET, state);
1727 emit_goto (head_label, state);
1728 }
1729 }
1730 break;
1731 case EXIT_EXPR:
1732 {
1733 struct jcf_block *label = state->labeled_blocks;
1734 struct jcf_block *end_label = gen_jcf_label (state);
1735 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1736 label, end_label, 0, state);
1737 define_jcf_label (end_label, state);
1738 }
1739 break;
1740 case EXIT_BLOCK_EXPR:
1741 {
1742 struct jcf_block *label = state->labeled_blocks;
1743 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1744 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1745 label = label->next;
1746 call_cleanups (label, state);
1747 emit_goto (label, state);
1748 }
1749 break;
1750
1751 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1752 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1753 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1754 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1755 increment:
1756
1757 exp = TREE_OPERAND (exp, 0);
1758 type = TREE_TYPE (exp);
1759 size = TYPE_IS_WIDE (type) ? 2 : 1;
1760 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1761 && ! TREE_STATIC (exp)
1762 && TREE_CODE (type) == INTEGER_TYPE
1763 && TYPE_PRECISION (type) == 32)
1764 {
1765 if (target != IGNORE_TARGET && post_op)
1766 emit_load (exp, state);
1767 emit_iinc (exp, value, state);
1768 if (target != IGNORE_TARGET && ! post_op)
1769 emit_load (exp, state);
1770 break;
1771 }
1772 if (TREE_CODE (exp) == COMPONENT_REF)
1773 {
1774 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1775 emit_dup (1, 0, state);
1776 /* Stack: ..., objectref, objectref. */
1777 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1778 NOTE_PUSH (size-1);
1779 /* Stack: ..., objectref, oldvalue. */
1780 offset = 1;
1781 }
1782 else if (TREE_CODE (exp) == ARRAY_REF)
1783 {
1784 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1785 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1786 emit_dup (2, 0, state);
1787 /* Stack: ..., array, index, array, index. */
1788 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1789 RESERVE(1);
1790 OP1 (jopcode);
1791 NOTE_POP (2-size);
1792 /* Stack: ..., array, index, oldvalue. */
1793 offset = 2;
1794 }
1795 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1796 {
1797 generate_bytecode_insns (exp, STACK_TARGET, state);
1798 /* Stack: ..., oldvalue. */
1799 offset = 0;
1800 }
1801 else
1802 abort ();
1803
1804 if (target != IGNORE_TARGET && post_op)
1805 emit_dup (size, offset, state);
1806 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1807 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1808 /* Stack, otherwise: ..., [result, ] oldvalue. */
1809 if (size == 1)
1810 push_int_const (value, state);
1811 else
1812 push_long_const (value, value >= 0 ? 0 : -1, state);
1813 NOTE_PUSH (size);
1814 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1815 if (target != IGNORE_TARGET && ! post_op)
1816 emit_dup (size, offset, state);
1817 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1818 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1819 /* Stack, otherwise: ..., [result, ] newvalue. */
1820 goto finish_assignment;
1821
1822 case MODIFY_EXPR:
1823 {
1824 tree lhs = TREE_OPERAND (exp, 0);
1825 tree rhs = TREE_OPERAND (exp, 1);
1826 int offset = 0;
1827
1828 /* See if we can use the iinc instruction. */
1829 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1830 && ! TREE_STATIC (lhs)
1831 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1832 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1833 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1834 {
1835 tree arg0 = TREE_OPERAND (rhs, 0);
1836 tree arg1 = TREE_OPERAND (rhs, 1);
1837 HOST_WIDE_INT min_value = -32768;
1838 HOST_WIDE_INT max_value = 32767;
1839 if (TREE_CODE (rhs) == MINUS_EXPR)
1840 {
1841 min_value++;
1842 max_value++;
1843 }
1844 else if (arg1 == lhs)
1845 {
1846 arg0 = arg1;
1847 arg1 = TREE_OPERAND (rhs, 0);
1848 }
1849 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1850 {
1851 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1852 value = TREE_INT_CST_LOW (arg1);
1853 if ((hi_value == 0 && value <= max_value)
1854 || (hi_value == -1 && value >= min_value))
1855 {
1856 if (TREE_CODE (rhs) == MINUS_EXPR)
1857 value = -value;
1858 emit_iinc (lhs, value, state);
1859 break;
1860 }
1861 }
1862 }
1863
1864 if (TREE_CODE (lhs) == COMPONENT_REF)
1865 {
1866 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
1867 STACK_TARGET, state);
1868 offset = 1;
1869 }
1870 else if (TREE_CODE (lhs) == ARRAY_REF)
1871 {
1872 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
1873 STACK_TARGET, state);
1874 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
1875 STACK_TARGET, state);
1876 offset = 2;
1877 }
1878 else
1879 offset = 0;
1880 generate_bytecode_insns (rhs, STACK_TARGET, state);
1881 if (target != IGNORE_TARGET)
1882 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
1883 exp = lhs;
1884 }
1885 /* FALLTHOUGH */
1886
1887 finish_assignment:
1888 if (TREE_CODE (exp) == COMPONENT_REF)
1889 {
1890 tree field = TREE_OPERAND (exp, 1);
1891 if (! FIELD_STATIC (field))
1892 NOTE_POP (1);
1893 field_op (field,
1894 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
1895 state);
1896
1897 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1898 }
1899 else if (TREE_CODE (exp) == VAR_DECL
1900 || TREE_CODE (exp) == PARM_DECL)
1901 {
1902 if (FIELD_STATIC (exp))
1903 {
1904 field_op (exp, OPCODE_putstatic, state);
1905 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1906 }
1907 else
1908 emit_store (exp, state);
1909 }
1910 else if (TREE_CODE (exp) == ARRAY_REF)
1911 {
1912 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
1913 RESERVE(1);
1914 OP1 (jopcode);
1915 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
1916 }
1917 else
1918 fatal ("internal error (bad lhs to MODIFY_EXPR)");
1919 break;
1920 case PLUS_EXPR:
1921 jopcode = OPCODE_iadd;
1922 goto binop;
1923 case MINUS_EXPR:
1924 jopcode = OPCODE_isub;
1925 goto binop;
1926 case MULT_EXPR:
1927 jopcode = OPCODE_imul;
1928 goto binop;
1929 case TRUNC_DIV_EXPR:
1930 case RDIV_EXPR:
1931 jopcode = OPCODE_idiv;
1932 goto binop;
1933 case TRUNC_MOD_EXPR:
1934 jopcode = OPCODE_irem;
1935 goto binop;
1936 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
1937 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
1938 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
1939 case TRUTH_AND_EXPR:
1940 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
1941 case TRUTH_OR_EXPR:
1942 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
1943 case TRUTH_XOR_EXPR:
1944 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
1945 binop:
1946 {
1947 tree arg0 = TREE_OPERAND (exp, 0);
1948 tree arg1 = TREE_OPERAND (exp, 1);
1949 jopcode += adjust_typed_op (type, 3);
1950 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
1951 {
1952 /* fold may (e.g) convert 2*x to x+x. */
1953 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
1954 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
1955 }
1956 else
1957 {
1958 generate_bytecode_insns (arg0, target, state);
1959 generate_bytecode_insns (arg1, target, state);
1960 }
1961 /* For most binary operations, both operands and the result have the
1962 same type. Shift operations are different. Using arg1's type
1963 gets us the correct SP adjustment in all casesd. */
1964 if (target == STACK_TARGET)
1965 emit_binop (jopcode, TREE_TYPE (arg1), state);
1966 break;
1967 }
1968 case TRUTH_NOT_EXPR:
1969 case BIT_NOT_EXPR:
1970 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1971 if (target == STACK_TARGET)
1972 {
1973 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
1974 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
1975 RESERVE (2);
1976 if (is_long)
1977 OP1 (OPCODE_i2l);
1978 NOTE_PUSH (1 + is_long);
1979 OP1 (OPCODE_ixor + is_long);
1980 NOTE_POP (1 + is_long);
1981 }
1982 break;
1983 case NEGATE_EXPR:
1984 jopcode = OPCODE_ineg;
1985 jopcode += adjust_typed_op (type, 3);
1986 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1987 if (target == STACK_TARGET)
1988 emit_unop (jopcode, type, state);
1989 break;
1990 case INSTANCEOF_EXPR:
1991 {
1992 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
1993 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1994 RESERVE (3);
1995 OP1 (OPCODE_instanceof);
1996 OP2 (index);
1997 }
1998 break;
1999 case CONVERT_EXPR:
2000 case NOP_EXPR:
2001 case FLOAT_EXPR:
2002 case FIX_TRUNC_EXPR:
2003 {
2004 tree src = TREE_OPERAND (exp, 0);
2005 tree src_type = TREE_TYPE (src);
2006 tree dst_type = TREE_TYPE (exp);
2007 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2008 if (target == IGNORE_TARGET || src_type == dst_type)
2009 break;
2010 if (TREE_CODE (dst_type) == POINTER_TYPE)
2011 {
2012 if (TREE_CODE (exp) == CONVERT_EXPR)
2013 {
2014 int index = find_class_constant (&state->cpool, TREE_TYPE (dst_type));
2015 RESERVE (3);
2016 OP1 (OPCODE_checkcast);
2017 OP2 (index);
2018 }
2019 }
2020 else /* Convert numeric types. */
2021 {
2022 int wide_src = TYPE_PRECISION (src_type) > 32;
2023 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2024 NOTE_POP (1 + wide_src);
2025 RESERVE (1);
2026 if (TREE_CODE (dst_type) == REAL_TYPE)
2027 {
2028 if (TREE_CODE (src_type) == REAL_TYPE)
2029 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2030 else if (TYPE_PRECISION (src_type) == 64)
2031 OP1 (OPCODE_l2f + wide_dst);
2032 else
2033 OP1 (OPCODE_i2f + wide_dst);
2034 }
2035 else /* Convert to integral type. */
2036 {
2037 if (TREE_CODE (src_type) == REAL_TYPE)
2038 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2039 else if (wide_dst)
2040 OP1 (OPCODE_i2l);
2041 else if (wide_src)
2042 OP1 (OPCODE_l2i);
2043 if (TYPE_PRECISION (dst_type) < 32)
2044 {
2045 RESERVE (1);
2046 /* Already converted to int, if needed. */
2047 if (TYPE_PRECISION (dst_type) <= 8)
2048 OP1 (OPCODE_i2b);
2049 else if (TREE_UNSIGNED (dst_type))
2050 OP1 (OPCODE_i2c);
2051 else
2052 OP1 (OPCODE_i2s);
2053 }
2054 }
2055 NOTE_PUSH (1 + wide_dst);
2056 }
2057 }
2058 break;
2059
2060 case CLEANUP_POINT_EXPR:
2061 {
2062 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2063 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2064 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2065 if (target != IGNORE_TARGET)
2066 abort ();
2067 while (state->labeled_blocks != save_labeled_blocks)
2068 {
2069 struct jcf_block *finished_label = NULL;
2070 tree return_link;
2071 tree exception_type = build_pointer_type (throwable_type_node);
2072 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2073 exception_type);
2074 struct jcf_block *end_label = get_jcf_label_here (state);
2075 struct jcf_block *label = state->labeled_blocks;
2076 struct jcf_handler *handler;
2077 tree cleanup = label->u.labeled_block;
2078 state->labeled_blocks = label->next;
2079 state->num_finalizers--;
2080 if (can_complete)
2081 {
2082 finished_label = gen_jcf_label (state);
2083 emit_jsr (label, state);
2084 emit_goto (finished_label, state);
2085 if (! CAN_COMPLETE_NORMALLY (cleanup))
2086 can_complete = 0;
2087 }
2088 handler = alloc_handler (label->v.start_label, end_label, state);
2089 handler->type = NULL_TREE;
2090 localvar_alloc (exception_decl, state);
2091 NOTE_PUSH (1);
2092 emit_store (exception_decl, state);
2093 emit_jsr (label, state);
2094 emit_load (exception_decl, state);
2095 RESERVE (1);
2096 OP1 (OPCODE_athrow);
2097 NOTE_POP (1);
2098
2099 /* The finally block. */
2100 return_link = build_decl (VAR_DECL, NULL_TREE,
2101 return_address_type_node);
2102 define_jcf_label (label, state);
2103 NOTE_PUSH (1);
2104 localvar_alloc (return_link, state);
2105 emit_store (return_link, state);
2106 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2107 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2108 localvar_free (return_link, state);
2109 localvar_free (exception_decl, state);
2110 if (finished_label != NULL)
2111 define_jcf_label (finished_label, state);
2112 }
2113 }
2114 break;
2115
2116 case WITH_CLEANUP_EXPR:
2117 {
2118 struct jcf_block *label;
2119 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2120 label = gen_jcf_label (state);
2121 label->pc = PENDING_CLEANUP_PC;
2122 label->next = state->labeled_blocks;
2123 state->labeled_blocks = label;
2124 state->num_finalizers++;
2125 label->u.labeled_block = TREE_OPERAND (exp, 2);
2126 label->v.start_label = get_jcf_label_here (state);
2127 if (target != IGNORE_TARGET)
2128 abort ();
2129 }
2130 break;
2131
2132 case TRY_EXPR:
2133 {
2134 tree try_clause = TREE_OPERAND (exp, 0);
2135 tree finally = TREE_OPERAND (exp, 2);
2136 struct jcf_block *start_label = get_jcf_label_here (state);
2137 struct jcf_block *end_label; /* End of try clause. */
2138 struct jcf_block *finally_label; /* Finally subroutine. */
2139 struct jcf_block *finished_label = gen_jcf_label (state);
2140 tree clause = TREE_OPERAND (exp, 1);
2141 if (finally)
2142 {
2143 finally = FINALLY_EXPR_BLOCK (finally);
2144 finally_label = gen_jcf_label (state);
2145 }
2146 if (target != IGNORE_TARGET)
2147 abort ();
2148 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2149 end_label = get_jcf_label_here (state);
2150 if (CAN_COMPLETE_NORMALLY (try_clause))
2151 emit_goto (finished_label, state);
2152 for ( ; clause != NULL_TREE; clause = TREE_CHAIN (clause))
2153 {
2154 tree catch_clause = TREE_OPERAND (clause, 0);
2155 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2156 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2157 if (exception_decl == NULL_TREE)
2158 handler->type = NULL_TREE;
2159 else
2160 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2161 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2162 if (CAN_COMPLETE_NORMALLY (catch_clause))
2163 emit_goto (finished_label, state);
2164 }
2165 if (finally)
2166 {
2167 tree return_link;
2168 tree exception_type = build_pointer_type (throwable_type_node);
2169 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2170 exception_type);
2171 struct jcf_handler *handler
2172 = alloc_handler (start_label, NULL_TREE, state);
2173 handler->end_label = handler->handler_label;
2174 handler->type = NULL_TREE;
2175 localvar_alloc (exception_decl, state);
2176 NOTE_PUSH (1);
2177 emit_store (exception_decl, state);
2178 emit_jsr (finally_label, state);
2179 emit_load (exception_decl, state);
2180 RESERVE (1);
2181 OP1 (OPCODE_athrow);
2182 NOTE_POP (1);
2183 localvar_free (exception_decl, state);
2184
2185 /* The finally block. */
2186 return_link = build_decl (VAR_DECL, NULL_TREE,
2187 return_address_type_node);
2188 define_jcf_label (finally_label, state);
2189 NOTE_PUSH (1);
2190 localvar_alloc (return_link, state);
2191 emit_store (return_link, state);
2192 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2193 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2194 localvar_free (return_link, state);
2195 }
2196 define_jcf_label (finished_label, state);
2197 if (finally)
2198 emit_jsr (finally_label, state);
2199 }
2200 break;
2201 case THROW_EXPR:
2202 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2203 RESERVE (1);
2204 OP1 (OPCODE_athrow);
2205 break;
2206 case NEW_ARRAY_INIT:
2207 {
2208 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2209 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2210 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2211 HOST_WIDE_INT length = java_array_type_length (array_type);
2212 if (target == IGNORE_TARGET)
2213 {
2214 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2215 generate_bytecode_insns (TREE_VALUE (values), target, state);
2216 break;
2217 }
2218 push_int_const (length, state);
2219 NOTE_PUSH (1);
2220 RESERVE (3);
2221 if (JPRIMITIVE_TYPE_P (element_type))
2222 {
2223 int atype = encode_newarray_type (element_type);
2224 OP1 (OPCODE_newarray);
2225 OP1 (atype);
2226 }
2227 else
2228 {
2229 int index = find_class_constant (&state->cpool,
2230 TREE_TYPE (element_type));
2231 OP1 (OPCODE_anewarray);
2232 OP2 (index);
2233 }
2234 offset = 0;
2235 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2236 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2237 {
2238 int save_SP = state->code_SP;
2239 emit_dup (1, 0, state);
2240 push_int_const (offset, state);
2241 NOTE_PUSH (1);
2242 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2243 RESERVE (1);
2244 OP1 (jopcode);
2245 state->code_SP = save_SP;
2246 }
2247 }
2248 break;
2249 case NEW_CLASS_EXPR:
2250 {
2251 tree class = TREE_TYPE (TREE_TYPE (exp));
2252 int need_result = target != IGNORE_TARGET;
2253 int index = find_class_constant (&state->cpool, class);
2254 RESERVE (4);
2255 OP1 (OPCODE_new);
2256 OP2 (index);
2257 if (need_result)
2258 OP1 (OPCODE_dup);
2259 NOTE_PUSH (1 + need_result);
2260 }
2261 /* ... fall though ... */
2262 case CALL_EXPR:
2263 {
2264 tree f = TREE_OPERAND (exp, 0);
2265 tree x = TREE_OPERAND (exp, 1);
2266 int save_SP = state->code_SP;
2267 int nargs;
2268 if (TREE_CODE (f) == ADDR_EXPR)
2269 f = TREE_OPERAND (f, 0);
2270 if (f == soft_newarray_node)
2271 {
2272 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2273 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2274 STACK_TARGET, state);
2275 RESERVE (2);
2276 OP1 (OPCODE_newarray);
2277 OP1 (type_code);
2278 break;
2279 }
2280 else if (f == soft_multianewarray_node)
2281 {
2282 int ndims;
2283 int idim;
2284 int index = find_class_constant (&state->cpool,
2285 TREE_TYPE (TREE_TYPE (exp)));
2286 x = TREE_CHAIN (x); /* Skip class argument. */
2287 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2288 for (idim = ndims; --idim >= 0; )
2289 {
2290 x = TREE_CHAIN (x);
2291 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2292 }
2293 RESERVE (4);
2294 OP1 (OPCODE_multianewarray);
2295 OP2 (index);
2296 OP1 (ndims);
2297 break;
2298 }
2299 else if (f == soft_anewarray_node)
2300 {
2301 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2302 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2303 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2304 RESERVE (3);
2305 OP1 (OPCODE_anewarray);
2306 OP2 (index);
2307 break;
2308 }
2309 else if (f == soft_monitorenter_node
2310 || f == soft_monitorexit_node
2311 || f == throw_node)
2312 {
2313 if (f == soft_monitorenter_node)
2314 op = OPCODE_monitorenter;
2315 else if (f == soft_monitorexit_node)
2316 op = OPCODE_monitorexit;
2317 else
2318 op = OPCODE_athrow;
2319 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2320 RESERVE (1);
2321 OP1 (op);
2322 NOTE_POP (1);
2323 break;
2324 }
2325 else if (exp == soft_exceptioninfo_call_node)
2326 {
2327 NOTE_PUSH (1); /* Pushed by exception system. */
2328 break;
2329 }
2330 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2331 {
2332 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2333 }
2334 nargs = state->code_SP - save_SP;
2335 state->code_SP = save_SP;
2336 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2337 NOTE_POP (1); /* Pop implicit this. */
2338 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2339 {
2340 int index = find_methodref_index (&state->cpool, f);
2341 int interface = 0;
2342 RESERVE (5);
2343 if (METHOD_STATIC (f))
2344 OP1 (OPCODE_invokestatic);
2345 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2346 || METHOD_PRIVATE (f))
2347 OP1 (OPCODE_invokespecial);
2348 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2349 {
2350 OP1 (OPCODE_invokeinterface);
2351 interface = 1;
2352 }
2353 else
2354 OP1 (OPCODE_invokevirtual);
2355 OP2 (index);
2356 f = TREE_TYPE (TREE_TYPE (f));
2357 if (TREE_CODE (f) != VOID_TYPE)
2358 {
2359 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2360 if (target == IGNORE_TARGET)
2361 emit_pop (size, state);
2362 else
2363 NOTE_PUSH (size);
2364 }
2365 if (interface)
2366 {
2367 OP1 (nargs);
2368 OP1 (0);
2369 }
2370 break;
2371 }
2372 }
2373 /* fall through */
2374 notimpl:
2375 default:
2376 error("internal error - tree code not implemented: %s",
2377 tree_code_name [(int) TREE_CODE (exp)]);
2378 }
2379 }
2380
2381 void
2382 perform_relocations (state)
2383 struct jcf_partial *state;
2384 {
2385 struct jcf_block *block;
2386 struct jcf_relocation *reloc;
2387 int pc;
2388 int shrink;
2389
2390 /* Before we start, the pc field of each block is an upper bound on
2391 the block's start pc (it may be less, if previous blocks need less
2392 than their maximum).
2393
2394 The minimum size of each block is in the block's chunk->size. */
2395
2396 /* First, figure out the actual locations of each block. */
2397 pc = 0;
2398 shrink = 0;
2399 for (block = state->blocks; block != NULL; block = block->next)
2400 {
2401 int block_size = block->v.chunk->size;
2402
2403 block->pc = pc;
2404
2405 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2406 Assumes relocations are in reverse order. */
2407 reloc = block->u.relocations;
2408 while (reloc != NULL
2409 && reloc->kind == OPCODE_goto_w
2410 && reloc->label->pc == block->next->pc
2411 && reloc->offset + 2 == block_size)
2412 {
2413 reloc = reloc->next;
2414 block->u.relocations = reloc;
2415 block->v.chunk->size -= 3;
2416 block_size -= 3;
2417 shrink += 3;
2418 }
2419
2420 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2421 {
2422 if (reloc->kind == SWITCH_ALIGN_RELOC)
2423 {
2424 /* We assume this is the first relocation in this block,
2425 so we know its final pc. */
2426 int where = pc + reloc->offset;
2427 int pad = ((where + 3) & ~3) - where;
2428 block_size += pad;
2429 }
2430 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2431 {
2432 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2433 int expand = reloc->kind > 0 ? 2 : 5;
2434
2435 if (delta > 0)
2436 delta -= shrink;
2437 if (delta >= -32768 && delta <= 32767)
2438 {
2439 shrink += expand;
2440 reloc->kind = -1;
2441 }
2442 else
2443 block_size += expand;
2444 }
2445 }
2446 pc += block_size;
2447 }
2448
2449 for (block = state->blocks; block != NULL; block = block->next)
2450 {
2451 struct chunk *chunk = block->v.chunk;
2452 int old_size = chunk->size;
2453 int next_pc = block->next == NULL ? pc : block->next->pc;
2454 int new_size = next_pc - block->pc;
2455 unsigned char *new_ptr;
2456 unsigned char *old_buffer = chunk->data;
2457 unsigned char *old_ptr = old_buffer + old_size;
2458 if (new_size != old_size)
2459 {
2460 chunk->data = (unsigned char *)
2461 obstack_alloc (state->chunk_obstack, new_size);
2462 chunk->size = new_size;
2463 }
2464 new_ptr = chunk->data + new_size;
2465
2466 /* We do the relocations from back to front, because
2467 the relocations are in reverse order. */
2468 for (reloc = block->u.relocations; ; reloc = reloc->next)
2469 {
2470 /* new_ptr and old_ptr point into the old and new buffers,
2471 respectively. (If no relocations cause the buffer to
2472 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2473 The bytes at higher adress have been copied and relocations
2474 handled; those at lower addresses remain to process. */
2475
2476 /* Lower old index of piece to be copied with no relocation.
2477 I.e. high index of the first piece that does need relocation. */
2478 int start = reloc == NULL ? 0
2479 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2480 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2481 ? reloc->offset + 4
2482 : reloc->offset + 2;
2483 int32 value;
2484 int new_offset;
2485 int n = (old_ptr - old_buffer) - start;
2486 new_ptr -= n;
2487 old_ptr -= n;
2488 if (n > 0)
2489 memcpy (new_ptr, old_ptr, n);
2490 if (old_ptr == old_buffer)
2491 break;
2492
2493 new_offset = new_ptr - chunk->data;
2494 new_offset -= (reloc->kind == -1 ? 2 : 4);
2495 if (reloc->kind == 0)
2496 {
2497 old_ptr -= 4;
2498 value = GET_u4 (old_ptr);
2499 }
2500 else if (reloc->kind == BLOCK_START_RELOC)
2501 {
2502 old_ptr -= 4;
2503 value = 0;
2504 new_offset = 0;
2505 }
2506 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2507 {
2508 int where = block->pc + reloc->offset;
2509 int pad = ((where + 3) & ~3) - where;
2510 while (--pad >= 0)
2511 *--new_ptr = 0;
2512 continue;
2513 }
2514 else
2515 {
2516 old_ptr -= 2;
2517 value = GET_u2 (old_ptr);
2518 }
2519 value += reloc->label->pc - (block->pc + new_offset);
2520 *--new_ptr = (unsigned char) value; value >>= 8;
2521 *--new_ptr = (unsigned char) value; value >>= 8;
2522 if (reloc->kind != -1)
2523 {
2524 *--new_ptr = (unsigned char) value; value >>= 8;
2525 *--new_ptr = (unsigned char) value;
2526 }
2527 if (reloc->kind > BLOCK_START_RELOC)
2528 {
2529 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2530 --old_ptr;
2531 *--new_ptr = reloc->kind;
2532 }
2533 else if (reloc->kind < -1)
2534 {
2535 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2536 --old_ptr;
2537 *--new_ptr = OPCODE_goto_w;
2538 *--new_ptr = 3;
2539 *--new_ptr = 0;
2540 *--new_ptr = - reloc->kind;
2541 }
2542 }
2543 if (new_ptr != chunk->data)
2544 fatal ("internal error - perform_relocations");
2545 }
2546 state->code_length = pc;
2547 }
2548
2549 void
2550 init_jcf_state (state, work)
2551 struct jcf_partial *state;
2552 struct obstack *work;
2553 {
2554 state->chunk_obstack = work;
2555 state->first = state->chunk = NULL;
2556 CPOOL_INIT (&state->cpool);
2557 BUFFER_INIT (&state->localvars);
2558 BUFFER_INIT (&state->bytecode);
2559 }
2560
2561 void
2562 init_jcf_method (state, method)
2563 struct jcf_partial *state;
2564 tree method;
2565 {
2566 state->current_method = method;
2567 state->blocks = state->last_block = NULL;
2568 state->linenumber_count = 0;
2569 state->first_lvar = state->last_lvar = NULL;
2570 state->lvar_count = 0;
2571 state->labeled_blocks = NULL;
2572 state->code_length = 0;
2573 BUFFER_RESET (&state->bytecode);
2574 BUFFER_RESET (&state->localvars);
2575 state->code_SP = 0;
2576 state->code_SP_max = 0;
2577 state->handlers = NULL;
2578 state->last_handler = NULL;
2579 state->num_handlers = 0;
2580 state->num_finalizers = 0;
2581 state->return_value_decl = NULL_TREE;
2582 }
2583
2584 void
2585 release_jcf_state (state)
2586 struct jcf_partial *state;
2587 {
2588 CPOOL_FINISH (&state->cpool);
2589 obstack_free (state->chunk_obstack, state->first);
2590 }
2591
2592 /* Generate and return a list of chunks containing the class CLAS
2593 in the .class file representation. The list can be written to a
2594 .class file using write_chunks. Allocate chunks from obstack WORK. */
2595
2596 struct chunk *
2597 generate_classfile (clas, state)
2598 tree clas;
2599 struct jcf_partial *state;
2600 {
2601 struct chunk *cpool_chunk;
2602 char *source_file;
2603 char *ptr;
2604 int i;
2605 char *fields_count_ptr;
2606 int fields_count = 0;
2607 char *methods_count_ptr;
2608 int methods_count = 0;
2609 static tree SourceFile_node = NULL_TREE;
2610 tree part;
2611 int total_supers
2612 = clas == object_type_node ? 0
2613 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2614
2615 ptr = append_chunk (NULL, 8, state);
2616 PUT4 (0xCafeBabe); /* Magic number */
2617 PUT2 (3); /* Minor version */
2618 PUT2 (45); /* Major version */
2619
2620 append_chunk (NULL, 0, state);
2621 cpool_chunk = state->chunk;
2622
2623 /* Next allocate the chunk containing acces_flags through fields_counr. */
2624 if (clas == object_type_node)
2625 i = 10;
2626 else
2627 i = 8 + 2 * total_supers;
2628 ptr = append_chunk (NULL, i, state);
2629 i = get_access_flags (TYPE_NAME (clas)) | ACC_SUPER;
2630 PUT2 (i); /* acces_flags */
2631 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2632 if (clas == object_type_node)
2633 {
2634 PUT2(0); /* super_class */
2635 PUT2(0); /* interfaces_count */
2636 }
2637 else
2638 {
2639 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2640 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2641 int j = find_class_constant (&state->cpool, base);
2642 PUT2 (j); /* super_class */
2643 PUT2 (total_supers - 1); /* interfaces_count */
2644 for (i = 1; i < total_supers; i++)
2645 {
2646 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2647 j = find_class_constant (&state->cpool, base);
2648 PUT2 (j);
2649 }
2650 }
2651 fields_count_ptr = ptr;
2652
2653 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2654 {
2655 int have_value;
2656 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2657 continue;
2658 ptr = append_chunk (NULL, 8, state);
2659 i = get_access_flags (part); PUT2 (i);
2660 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2661 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part)));
2662 PUT2(i);
2663 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part);
2664 PUT2 (have_value); /* attributes_count */
2665 if (have_value)
2666 {
2667 tree init = DECL_INITIAL (part);
2668 static tree ConstantValue_node = NULL_TREE;
2669 ptr = append_chunk (NULL, 8, state);
2670 if (ConstantValue_node == NULL_TREE)
2671 ConstantValue_node = get_identifier ("ConstantValue");
2672 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2673 PUT2 (i); /* attribute_name_index */
2674 PUT4 (2); /* attribute_length */
2675 i = find_constant_index (init, state); PUT2 (i);
2676 }
2677 fields_count++;
2678 }
2679 ptr = fields_count_ptr; PUT2 (fields_count);
2680
2681 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2682 PUT2 (0);
2683
2684 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2685 {
2686 struct jcf_block *block;
2687 tree function_body = DECL_FUNCTION_BODY (part);
2688 tree body = function_body == NULL_TREE ? NULL_TREE
2689 : BLOCK_EXPR_BODY (function_body);
2690 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2691 : DECL_NAME (part);
2692 tree type = TREE_TYPE (part);
2693 tree save_function = current_function_decl;
2694 current_function_decl = part;
2695 ptr = append_chunk (NULL, 8, state);
2696 i = get_access_flags (part); PUT2 (i);
2697 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2698 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2699 PUT2 (i);
2700 PUT2 (body != NULL_TREE ? 1 : 0); /* attributes_count */
2701 if (body != NULL_TREE)
2702 {
2703 int code_attributes_count = 0;
2704 static tree Code_node = NULL_TREE;
2705 tree t;
2706 char *attr_len_ptr;
2707 struct jcf_handler *handler;
2708 if (Code_node == NULL_TREE)
2709 Code_node = get_identifier ("Code");
2710 ptr = append_chunk (NULL, 14, state);
2711 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2712 attr_len_ptr = ptr;
2713 init_jcf_method (state, part);
2714 get_jcf_label_here (state); /* Force a first block. */
2715 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2716 localvar_alloc (t, state);
2717 generate_bytecode_insns (body, IGNORE_TARGET, state);
2718 if (CAN_COMPLETE_NORMALLY (body))
2719 {
2720 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2721 abort();
2722 RESERVE (1);
2723 OP1 (OPCODE_return);
2724 }
2725 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2726 localvar_free (t, state);
2727 if (state->return_value_decl != NULL_TREE)
2728 localvar_free (state->return_value_decl, state);
2729 finish_jcf_block (state);
2730 perform_relocations (state);
2731
2732 ptr = attr_len_ptr;
2733 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2734 if (state->linenumber_count > 0)
2735 {
2736 code_attributes_count++;
2737 i += 8 + 4 * state->linenumber_count;
2738 }
2739 if (state->lvar_count > 0)
2740 {
2741 code_attributes_count++;
2742 i += 8 + 10 * state->lvar_count;
2743 }
2744 PUT4 (i); /* attribute_length */
2745 PUT2 (state->code_SP_max); /* max_stack */
2746 PUT2 (localvar_max); /* max_locals */
2747 PUT4 (state->code_length);
2748
2749 /* Emit the exception table. */
2750 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2751 PUT2 (state->num_handlers); /* exception_table_length */
2752 handler = state->handlers;
2753 for (; handler != NULL; handler = handler->next)
2754 {
2755 int type_index;
2756 PUT2 (handler->start_label->pc);
2757 PUT2 (handler->end_label->pc);
2758 PUT2 (handler->handler_label->pc);
2759 if (handler->type == NULL_TREE)
2760 type_index = 0;
2761 else
2762 type_index = find_class_constant (&state->cpool,
2763 handler->type);
2764 PUT2 (type_index);
2765 }
2766
2767 ptr = append_chunk (NULL, 2, state);
2768 PUT2 (code_attributes_count);
2769
2770 /* Write the LineNumberTable attribute. */
2771 if (state->linenumber_count > 0)
2772 {
2773 static tree LineNumberTable_node = NULL_TREE;
2774 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state);
2775 if (LineNumberTable_node == NULL_TREE)
2776 LineNumberTable_node = get_identifier ("LineNumberTable");
2777 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
2778 PUT2 (i); /* attribute_name_index */
2779 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
2780 i = state->linenumber_count; PUT2 (i);
2781 for (block = state->blocks; block != NULL; block = block->next)
2782 {
2783 int line = block->linenumber;
2784 if (line > 0)
2785 {
2786 PUT2 (block->pc);
2787 PUT2 (line);
2788 }
2789 }
2790 }
2791
2792 /* Write the LocalVariableTable attribute. */
2793 if (state->lvar_count > 0)
2794 {
2795 static tree LocalVariableTable_node = NULL_TREE;
2796 struct localvar_info *lvar = state->first_lvar;
2797 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
2798 if (LocalVariableTable_node == NULL_TREE)
2799 LocalVariableTable_node = get_identifier("LocalVariableTable");
2800 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
2801 PUT2 (i); /* attribute_name_index */
2802 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
2803 i = state->lvar_count; PUT2 (i);
2804 for ( ; lvar != NULL; lvar = lvar->next)
2805 {
2806 tree name = DECL_NAME (lvar->decl);
2807 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
2808 i = lvar->start_label->pc; PUT2 (i);
2809 i = lvar->end_label->pc - i; PUT2 (i);
2810 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2811 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
2812 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
2813 }
2814 }
2815 }
2816 methods_count++;
2817 current_function_decl = save_function;
2818 }
2819 ptr = methods_count_ptr; PUT2 (methods_count);
2820
2821 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
2822 for (ptr = source_file; ; ptr++)
2823 {
2824 char ch = *ptr;
2825 if (ch == '\0')
2826 break;
2827 if (ch == '/' || ch == '\\')
2828 source_file = ptr+1;
2829 }
2830 ptr = append_chunk (NULL, 10, state);
2831 PUT2 (1); /* attributes_count */
2832
2833 /* generate the SourceFile attribute. */
2834 if (SourceFile_node == NULL_TREE)
2835 SourceFile_node = get_identifier ("SourceFile");
2836 i = find_utf8_constant (&state->cpool, SourceFile_node);
2837 PUT2 (i); /* attribute_name_index */
2838 PUT4 (2);
2839 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
2840 PUT2 (i);
2841
2842 /* New finally generate the contents of the constant pool chunk. */
2843 i = count_constant_pool_bytes (&state->cpool);
2844 ptr = obstack_alloc (state->chunk_obstack, i);
2845 cpool_chunk->data = ptr;
2846 cpool_chunk->size = i;
2847 write_constant_pool (&state->cpool, ptr, i);
2848 return state->first;
2849 }
2850
2851 static char *
2852 make_class_file_name (clas)
2853 tree clas;
2854 {
2855 char *cname, *dname, *slash, *r;
2856 struct stat sb;
2857
2858 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
2859 "", '.', DIR_SEPARATOR,
2860 ".class"));
2861 if (jcf_write_base_directory == NULL)
2862 {
2863 /* Make sure we put the class file into the .java file's
2864 directory, and not into some subdirectory thereof. */
2865 char *t;
2866 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
2867 slash = strrchr (dname, DIR_SEPARATOR);
2868 if (! slash)
2869 {
2870 dname = ".";
2871 slash = dname + 1;
2872 }
2873 t = strrchr (cname, DIR_SEPARATOR);
2874 if (t)
2875 cname = t + 1;
2876 }
2877 else
2878 {
2879 dname = jcf_write_base_directory;
2880 slash = dname + strlen (dname);
2881 }
2882
2883 r = xmalloc (slash - dname + strlen (cname) + 2);
2884 strncpy (r, dname, slash - dname);
2885 r[slash - dname] = DIR_SEPARATOR;
2886 strcpy (&r[slash - dname + 1], cname);
2887
2888 /* We try to make new directories when we need them. We only do
2889 this for directories which "might not" exist. For instance, we
2890 assume the `-d' directory exists, but we don't assume that any
2891 subdirectory below it exists. It might be worthwhile to keep
2892 track of which directories we've created to avoid gratuitous
2893 stat()s. */
2894 dname = r + (slash - dname) + 1;
2895 while (1)
2896 {
2897 cname = strchr (dname, DIR_SEPARATOR);
2898 if (cname == NULL)
2899 break;
2900 *cname = '\0';
2901 if (stat (r, &sb) == -1)
2902 {
2903 /* Try to make it. */
2904 if (mkdir (r, 0755) == -1)
2905 {
2906 fatal ("failed to create directory `%s'", r);
2907 free (r);
2908 return NULL;
2909 }
2910 }
2911 *cname = DIR_SEPARATOR;
2912 /* Skip consecutive separators. */
2913 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
2914 ;
2915 }
2916
2917 return r;
2918 }
2919
2920 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
2921 The output .class file name is make_class_file_name(CLAS). */
2922
2923 void
2924 write_classfile (clas)
2925 tree clas;
2926 {
2927 struct obstack *work = &temporary_obstack;
2928 struct jcf_partial state[1];
2929 char *class_file_name = make_class_file_name (clas);
2930 struct chunk *chunks;
2931
2932 if (class_file_name != NULL)
2933 {
2934 FILE* stream = fopen (class_file_name, "wb");
2935 if (stream == NULL)
2936 fatal ("failed to open `%s' for writing", class_file_name);
2937 jcf_dependency_add_target (class_file_name);
2938 init_jcf_state (state, work);
2939 chunks = generate_classfile (clas, state);
2940 write_chunks (stream, chunks);
2941 if (fclose (stream))
2942 fatal ("failed to close after writing `%s'", class_file_name);
2943 free (class_file_name);
2944 }
2945 release_jcf_state (state);
2946 }
2947
2948 /* TODO:
2949 string concatenation
2950 synchronized statement
2951 */
This page took 0.176197 seconds and 6 git commands to generate.