]> gcc.gnu.org Git - gcc.git/blob - gcc/java/jcf-write.c
jcf-write.c (generate_bytecode_insns): Don't free sw_state.cases.
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include <string.h>
27 #include "tree.h"
28 #include "java-tree.h"
29 #include "jcf.h"
30 #include "obstack.h"
31 #undef AND
32 #include "rtl.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
35 #include "buffer.h"
36
37 #include <sys/stat.h>
38
39 #ifndef DIR_SEPARATOR
40 #define DIR_SEPARATOR '/'
41 #endif
42
43 extern struct obstack temporary_obstack;
44
45 /* Base directory in which `.class' files should be written.
46 NULL means to put the file into the same directory as the
47 corresponding .java file. */
48 char *jcf_write_base_directory = NULL;
49
50 /* Make sure bytecode.data is big enough for at least N more bytes. */
51
52 #define RESERVE(N) \
53 do { CHECK_OP(state); \
54 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
55 buffer_grow (&state->bytecode, N); } while (0)
56
57 /* Add a 1-byte instruction/operand I to bytecode.data,
58 assuming space has already been RESERVE'd. */
59
60 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
61
62 /* Like OP1, but I is a 2-byte big endian integer. */
63
64 #define OP2(I) \
65 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
66
67 /* Like OP1, but I is a 4-byte big endian integer. */
68
69 #define OP4(I) \
70 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
71 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
72
73 /* Macro to call each time we push I words on the JVM stack. */
74
75 #define NOTE_PUSH(I) \
76 do { state->code_SP += (I); \
77 if (state->code_SP > state->code_SP_max) \
78 state->code_SP_max = state->code_SP; } while (0)
79
80 /* Macro to call each time we pop I words from the JVM stack. */
81
82 #define NOTE_POP(I) \
83 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
84
85 /* A chunk or segment of a .class file. */
86
87 struct chunk
88 {
89 /* The next segment of this .class file. */
90 struct chunk *next;
91
92 /* The actual data in this segment to be written to the .class file. */
93 unsigned char *data;
94
95 /* The size of the segment to be written to the .class file. */
96 int size;
97 };
98
99 /* Each "block" represents a label plus the bytecode instructions following.
100 There may be branches out of the block, but no incoming jumps, except
101 to the beginning of the block. */
102
103 struct jcf_block
104 {
105 /* For blocks that that are defined, the next block (in pc order).
106 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR,
107 this is the next (outer) such end label, in a stack heaed by
108 labeled_blocks in jcf_partial. */
109 struct jcf_block *next;
110
111 /* Until perform_relocations is finished, this is the maximum possible
112 value of the bytecode offset at the begnning of this block.
113 After perform_relocations, it is the actual offset (pc). */
114 int pc;
115
116 int linenumber;
117
118 /* After finish_jcf_block is called, The actual instructions contained in this block.
119 Before than NULL, and the instructions are in state->bytecode. */
120 struct chunk *chunk;
121
122 union {
123 /* Set of relocations (in reverse offset order) for this block. */
124 struct jcf_relocation *relocations;
125
126 /* If this block is that of the not-yet-defined end label of
127 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR. */
128 tree labeled_block;
129 } u;
130 };
131
132 /* A "relocation" type for the 0-3 bytes of padding at the start
133 of a tableswitch or a lookupswitch. */
134 #define SWITCH_ALIGN_RELOC 4
135
136 /* A relocation type for the labels in a tableswitch or a lookupswitch;
137 these are relative to the start of the instruction, but (due to
138 th 0-3 bytes of padding), we don't know the offset before relocation. */
139 #define BLOCK_START_RELOC 1
140
141 struct jcf_relocation
142 {
143 /* Next relocation for the current jcf_block. */
144 struct jcf_relocation *next;
145
146 /* The (byte) offset within the current block that needs to be relocated. */
147 HOST_WIDE_INT offset;
148
149 /* 0 if offset is a 4-byte relative offset.
150 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
151 for proper alignment in tableswitch/lookupswitch instructions.
152 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
153 to the start of the containing block.
154 -1 if offset is a 2-byte relative offset.
155 < -1 if offset is the address of an instruction with a 2-byte offset
156 that does not have a corresponding 4-byte offset version, in which
157 case the absolute value of kind is the inverted opcode.
158 > 4 if offset is the address of an instruction (such as jsr) with a
159 2-byte offset that does have a corresponding 4-byte offset version,
160 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
161 int kind;
162
163 /* The label the relocation wants to actually transfer to. */
164 struct jcf_block *label;
165 };
166
167 /* State for single catch clause. */
168
169 struct jcf_handler
170 {
171 struct jcf_handler *next;
172
173 struct jcf_block *start_label;
174 struct jcf_block *end_label;
175 struct jcf_block *handler_label;
176
177 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
178 tree type;
179 };
180
181 /* State for the current switch statement. */
182
183 struct jcf_switch_state
184 {
185 struct jcf_switch_state *prev;
186 struct jcf_block *default_label;
187
188 struct jcf_relocation *cases;
189 int num_cases;
190 HOST_WIDE_INT min_case, max_case;
191 };
192
193 /* This structure is used to contain the various pieces that will
194 become a .class file. */
195
196 struct jcf_partial
197 {
198 struct chunk *first;
199 struct chunk *chunk;
200 struct obstack *chunk_obstack;
201 tree current_method;
202
203 /* List of basic blocks for the current method. */
204 struct jcf_block *blocks;
205 struct jcf_block *last_block;
206
207 struct localvar_info *first_lvar;
208 struct localvar_info *last_lvar;
209 int lvar_count;
210
211 CPool cpool;
212
213 int linenumber_count;
214
215 /* Until perform_relocations, this is a upper bound on the number
216 of bytes (so far) in the instructions for the current method. */
217 int code_length;
218
219 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
220 struct jcf_block *labeled_blocks;
221
222 /* The current stack size (stack pointer) in the current method. */
223 int code_SP;
224
225 /* The largest extent of stack size (stack pointer) in the current method. */
226 int code_SP_max;
227
228 /* Contains a mapping from local var slot number to localvar_info. */
229 struct buffer localvars;
230
231 /* The buffer allocated for bytecode for the current jcf_block. */
232 struct buffer bytecode;
233
234 /* Chain of exception handlers for the current method. */
235 struct jcf_handler *handlers;
236
237 /* Last element in handlers chain. */
238 struct jcf_handler *last_handler;
239
240 /* Number of exception handlers for the current method. */
241 int num_handlers;
242
243 /* Information about the current switch statemenet. */
244 struct jcf_switch_state *sw_state;
245 };
246
247 static void generate_bytecode_insns PROTO ((tree, int, struct jcf_partial *));
248
249 /* Utility macros for appending (big-endian) data to a buffer.
250 We assume a local variable 'ptr' points into where we want to
251 write next, and we assume enoygh space has been allocated. */
252
253 #ifdef ENABLE_CHECKING
254 int
255 CHECK_PUT(ptr, state, i)
256 void *ptr;
257 struct jcf_partial *state;
258 int i;
259 {
260 if (ptr < state->chunk->data
261 || (char*)ptr + i > state->chunk->data + state->chunk->size)
262 fatal ("internal error - CHECK_PUT failed");
263 return 0;
264 }
265 #else
266 #define CHECK_PUT(PTR, STATE, I) 0
267 #endif
268
269 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
270 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
271 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
272 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
273
274 \f
275 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
276 Set the data and size fields to DATA and SIZE, respectively.
277 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
278
279 struct chunk *
280 alloc_chunk (last, data, size, work)
281 struct chunk *last;
282 unsigned char *data;
283 int size;
284 struct obstack *work;
285 {
286 struct chunk *chunk = (struct chunk *)
287 obstack_alloc (work, sizeof(struct chunk));
288
289 if (data == NULL && size > 0)
290 data = obstack_alloc (work, size);
291
292 chunk->next = NULL;
293 chunk->data = data;
294 chunk->size = size;
295 if (last != NULL)
296 last->next = chunk;
297 return chunk;
298 }
299
300 #ifdef ENABLE_CHECKING
301 int
302 CHECK_OP(struct jcf_partial *state)
303 {
304 if (state->bytecode.ptr > state->bytecode.limit)
305 {
306 fatal("internal error - CHECK_OP failed");
307 }
308 return 0;
309 }
310 #else
311 #define CHECK_OP(STATE) 0
312 #endif
313
314 unsigned char *
315 append_chunk (data, size, state)
316 unsigned char *data;
317 int size;
318 struct jcf_partial *state;
319 {
320 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
321 if (state->first == NULL)
322 state->first = state->chunk;
323 return state->chunk->data;
324 }
325
326 void
327 append_chunk_copy (data, size, state)
328 unsigned char *data;
329 int size;
330 struct jcf_partial *state;
331 {
332 unsigned char *ptr = append_chunk (NULL, size, state);
333 memcpy (ptr, data, size);
334 }
335 \f
336 struct jcf_block *
337 gen_jcf_label (state)
338 struct jcf_partial *state;
339 {
340 struct jcf_block *block = (struct jcf_block *)
341 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
342 block->next = NULL;
343 block->linenumber = -1;
344 block->pc = -1;
345 return block;
346 }
347
348 void
349 finish_jcf_block (state)
350 struct jcf_partial *state;
351 {
352 struct jcf_block *block = state->last_block;
353 struct jcf_relocation *reloc;
354 int code_length = BUFFER_LENGTH (&state->bytecode);
355 int pc = state->code_length;
356 append_chunk_copy (state->bytecode.data, code_length, state);
357 BUFFER_RESET (&state->bytecode);
358 block->chunk = state->chunk;
359
360 /* Calculate code_length to the maximum value it can have. */
361 pc += block->chunk->size;
362 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
363 {
364 int kind = reloc->kind;
365 if (kind == SWITCH_ALIGN_RELOC)
366 pc += 3;
367 else if (kind > BLOCK_START_RELOC)
368 pc += 2; /* 2-byte offset may grow to 4-byte offset */
369 else if (kind < -1)
370 pc += 5; /* May need to add a goto_w. */
371 }
372 state->code_length = pc;
373 }
374
375 void
376 define_jcf_label (label, state)
377 struct jcf_block *label;
378 struct jcf_partial *state;
379 {
380 if (state->last_block != NULL)
381 finish_jcf_block (state);
382 label->pc = state->code_length;
383 if (state->blocks == NULL)
384 state->blocks = label;
385 else
386 state->last_block->next = label;
387 state->last_block = label;
388 label->next = NULL;
389 label->u.relocations = NULL;
390 }
391
392 struct jcf_block *
393 get_jcf_label_here (state)
394 struct jcf_partial *state;
395 {
396 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
397 return state->last_block;
398 else
399 {
400 struct jcf_block *label = gen_jcf_label (state);
401 define_jcf_label (label, state);
402 return label;
403 }
404 }
405
406 /* Note a line number entry for the current PC and given LINE. */
407
408 void
409 put_linenumber (line, state)
410 int line;
411 struct jcf_partial *state;
412 {
413 (get_jcf_label_here (state))->linenumber = line;
414 state->linenumber_count++;
415 }
416
417 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
418 in the range (START_LABEL, END_LABEL). */
419
420 static struct jcf_handler *
421 alloc_handler (start_label, end_label, state)
422 struct jcf_block *start_label;
423 struct jcf_block *end_label;
424 struct jcf_partial *state;
425 {
426 struct jcf_handler *handler = (struct jcf_handler *)
427 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
428 handler->start_label = start_label;
429 handler->end_label = end_label;
430 handler->handler_label = get_jcf_label_here (state);
431 if (state->handlers == NULL)
432 state->handlers = handler;
433 else
434 state->last_handler->next = handler;
435 state->last_handler = handler;
436 handler->next = NULL;
437 state->num_handlers++;
438 return handler;
439 }
440
441 \f
442 /* The index of jvm local variable allocated for this DECL.
443 This is assigned when generating .class files;
444 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
445 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
446
447 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
448
449 struct localvar_info
450 {
451 struct localvar_info *next;
452
453 tree decl;
454 struct jcf_block *start_label;
455 struct jcf_block *end_label;
456 };
457
458 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
459 #define localvar_max \
460 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
461
462 int
463 localvar_alloc (decl, state)
464 tree decl;
465 struct jcf_partial *state;
466 {
467 struct jcf_block *start_label = get_jcf_label_here (state);
468 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
469 int index;
470 register struct localvar_info *info;
471 register struct localvar_info **ptr = localvar_buffer;
472 register struct localvar_info **limit
473 = (struct localvar_info**) state->localvars.ptr;
474 for (index = 0; ptr < limit; index++, ptr++)
475 {
476 if (ptr[0] == NULL
477 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
478 break;
479 }
480 if (ptr == limit)
481 {
482 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
483 ptr = (struct localvar_info**) state->localvars.data + index;
484 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
485 }
486 info = (struct localvar_info *)
487 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
488 ptr[0] = info;
489 if (wide)
490 ptr[1] = (struct localvar_info *)(~0);
491 DECL_LOCAL_INDEX (decl) = index;
492 info->decl = decl;
493 info->start_label = start_label;
494
495 if (DECL_NAME (decl) != NULL_TREE)
496 {
497 /* Generate debugging info. */
498 info->next = NULL;
499 if (state->last_lvar != NULL)
500 state->last_lvar->next = info;
501 else
502 state->first_lvar = info;
503 state->last_lvar = info;
504 state->lvar_count++;
505 }
506 }
507
508 int
509 localvar_free (decl, state)
510 tree decl;
511 struct jcf_partial *state;
512 {
513 struct jcf_block *end_label = get_jcf_label_here (state);
514 int index = DECL_LOCAL_INDEX (decl);
515 register struct localvar_info **ptr = &localvar_buffer [index];
516 register struct localvar_info *info = *ptr;
517 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
518 int i;
519
520 info->end_label = end_label;
521
522 if (info->decl != decl)
523 abort ();
524 ptr[0] = NULL;
525 if (wide)
526 {
527 if (ptr[1] != (struct localvar_info *)(~0))
528 abort ();
529 ptr[1] = NULL;
530 }
531 }
532
533 \f
534 #define STACK_TARGET 1
535 #define IGNORE_TARGET 2
536
537 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
538 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
539
540 int
541 get_access_flags (decl)
542 tree decl;
543 {
544 int flags = 0;
545 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
546 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
547 flags |= ACC_PUBLIC;
548 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
549 flags |= ACC_PUBLIC;
550 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
551 {
552 if (TREE_PROTECTED (decl))
553 flags |= ACC_PROTECTED;
554 if (TREE_PRIVATE (decl))
555 flags |= ACC_PRIVATE;
556 }
557 else if (TREE_CODE (decl) == TYPE_DECL)
558 {
559 if (CLASS_SUPER (decl))
560 flags |= ACC_SUPER;
561 if (CLASS_ABSTRACT (decl))
562 flags |= ACC_ABSTRACT;
563 if (CLASS_INTERFACE (decl))
564 flags |= ACC_INTERFACE;
565 }
566 else
567 fatal ("internal error - bad argument to get_access_flags");
568 if (TREE_CODE (decl) == FUNCTION_DECL)
569 {
570 if (METHOD_NATIVE (decl))
571 flags |= ACC_NATIVE;
572 if (METHOD_STATIC (decl))
573 flags |= ACC_STATIC;
574 if (METHOD_FINAL (decl))
575 flags |= ACC_FINAL;
576 if (METHOD_SYNCHRONIZED (decl))
577 flags |= ACC_SYNCHRONIZED;
578 if (METHOD_ABSTRACT (decl))
579 flags |= ACC_ABSTRACT;
580 }
581 if (isfield)
582 {
583 if (FIELD_STATIC (decl))
584 flags |= ACC_STATIC;
585 if (FIELD_VOLATILE (decl))
586 flags |= ACC_VOLATILE;
587 if (FIELD_TRANSIENT (decl))
588 flags |= ACC_TRANSIENT;
589 }
590 return flags;
591 }
592
593 /* Write the list of segments starting at CHUNKS to STREAM. */
594
595 void
596 write_chunks (stream, chunks)
597 FILE* stream;
598 struct chunk *chunks;
599 {
600 for (; chunks != NULL; chunks = chunks->next)
601 fwrite (chunks->data, chunks->size, 1, stream);
602 }
603
604 /* Push a 1-word constant in the constant pool at the given INDEX.
605 (Caller is responsible for doing NOTE_PUSH.) */
606
607 static void
608 push_constant1 (index, state)
609 int index;
610 struct jcf_partial *state;
611 {
612 RESERVE (3);
613 if (index < 256)
614 {
615 OP1 (OPCODE_ldc);
616 OP1 (index);
617 }
618 else
619 {
620 OP1 (OPCODE_ldc_w);
621 OP2 (index);
622 }
623 }
624
625 /* Push a 2-word constant in the constant pool at the given INDEX.
626 (Caller is responsible for doing NOTE_PUSH.) */
627
628 static void
629 push_constant2 (index, state)
630 int index;
631 struct jcf_partial *state;
632 {
633 RESERVE (3);
634 OP1 (OPCODE_ldc2_w);
635 OP2 (index);
636 }
637
638 /* Push 32-bit integer constant on VM stack.
639 Caller is responsible for doing NOTE_PUSH. */
640
641 static void
642 push_int_const (i, state)
643 HOST_WIDE_INT i;
644 struct jcf_partial *state;
645 {
646 RESERVE(3);
647 if (i >= -1 && i <= 5)
648 OP1(OPCODE_iconst_0 + i);
649 else if (i >= -128 && i < 128)
650 {
651 OP1(OPCODE_bipush);
652 OP1(i);
653 }
654 else if (i >= -32768 && i < 32768)
655 {
656 OP1(OPCODE_sipush);
657 OP2(i);
658 }
659 else
660 {
661 i = find_constant1 (&state->cpool, CONSTANT_Integer, i & 0xFFFFFFFF);
662 push_constant1 (i, state);
663 }
664 }
665
666 /* Push 64-bit long constant on VM stack.
667 Caller is responsible for doing NOTE_PUSH. */
668
669 static void
670 push_long_const (lo, hi, state)
671 HOST_WIDE_INT lo, hi;
672 struct jcf_partial *state;
673 {
674 if (hi == 0 && lo >= 0 && lo <= 1)
675 {
676 RESERVE(1);
677 OP1(OPCODE_lconst_0 + lo);
678 }
679 else if ((hi == 0 && lo < 32768) || (hi == -1 && lo >= -32768))
680 {
681 push_int_const (lo, state);
682 RESERVE (1);
683 OP1 (OPCODE_i2l);
684 }
685 else
686 {
687 HOST_WIDE_INT w1, w2;
688 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
689 hi = find_constant2 (&state->cpool, CONSTANT_Long,
690 w1 & 0xFFFFFFFF, lo & 0xFFFFFFFF);
691 push_constant2 (hi, state);
692 }
693 }
694
695 static void
696 field_op (field, opcode, state)
697 tree field;
698 int opcode;
699 struct jcf_partial *state;
700 {
701 int index = find_fieldref_index (&state->cpool, field);
702 RESERVE (3);
703 OP1 (opcode);
704 OP2 (index);
705 }
706
707 /* Returns an integer in the range 0 (for 'int') through 4 (for object
708 reference) to 7 (for 'short') which matches the pattern of how JVM
709 opcodes typically depend on the operand type. */
710
711 int
712 adjust_typed_op (type, max)
713 tree type;
714 int max;
715 {
716 switch (TREE_CODE (type))
717 {
718 case POINTER_TYPE:
719 case RECORD_TYPE: return 4;
720 case BOOLEAN_TYPE:
721 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
722 case CHAR_TYPE:
723 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
724 case INTEGER_TYPE:
725 switch (TYPE_PRECISION (type))
726 {
727 case 8: return max < 5 ? 0 : 5;
728 case 16: return max < 7 ? 0 : 7;
729 case 32: return 0;
730 case 64: return 1;
731 }
732 break;
733 case REAL_TYPE:
734 switch (TYPE_PRECISION (type))
735 {
736 case 32: return 2;
737 case 64: return 3;
738 }
739 break;
740 default:
741 break;
742 }
743 abort ();
744 }
745
746 static void
747 maybe_wide (opcode, index, state)
748 int opcode, index;
749 struct jcf_partial *state;
750 {
751 if (index >= 256)
752 {
753 RESERVE (4);
754 OP1 (OPCODE_wide);
755 OP1 (opcode);
756 OP2 (index);
757 }
758 else
759 {
760 RESERVE (2);
761 OP1 (opcode);
762 OP1 (index);
763 }
764 }
765
766 /* Compile code to duplicate with offset, where
767 SIZE is the size of the stack item to duplicate (1 or 2), abd
768 OFFSET is where to insert the result (must be 0, 1, or 2).
769 (The new words get inserted at stack[SP-size-offset].) */
770
771 static void
772 emit_dup (size, offset, state)
773 int size, offset;
774 struct jcf_partial *state;
775 {
776 int kind;
777 if (size == 0)
778 return;
779 RESERVE(1);
780 if (offset == 0)
781 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
782 else if (offset == 1)
783 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
784 else if (offset == 2)
785 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
786 else
787 abort();
788 OP1 (kind);
789 NOTE_PUSH (size);
790 }
791
792 static void
793 emit_pop (size, state)
794 int size;
795 struct jcf_partial *state;
796 {
797 RESERVE (1);
798 OP1 (OPCODE_pop - 1 + size);
799 }
800
801 static void
802 emit_iinc (var, value, state)
803 tree var;
804 int value;
805 struct jcf_partial *state;
806 {
807 int slot = DECL_LOCAL_INDEX (var);
808
809 if (value < -128 || value > 127 || slot >= 256)
810 {
811 RESERVE (6);
812 OP1 (OPCODE_wide);
813 OP1 (OPCODE_iinc);
814 OP2 (slot);
815 OP2 (value);
816 }
817 else
818 {
819 RESERVE (3);
820 OP1 (OPCODE_iinc);
821 OP1 (slot);
822 OP1 (value);
823 }
824 }
825
826 static void
827 emit_load_or_store (var, opcode, state)
828 tree var; /* Variable to load from or store into. */
829 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
830 struct jcf_partial *state;
831 {
832 tree type = TREE_TYPE (var);
833 int kind = adjust_typed_op (type, 4);
834 int index = DECL_LOCAL_INDEX (var);
835 if (index <= 3)
836 {
837 RESERVE (1);
838 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
839 }
840 else
841 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
842 }
843
844 static void
845 emit_load (var, state)
846 tree var;
847 struct jcf_partial *state;
848 {
849 emit_load_or_store (var, OPCODE_iload, state);
850 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
851 }
852
853 static void
854 emit_store (var, state)
855 tree var;
856 struct jcf_partial *state;
857 {
858 emit_load_or_store (var, OPCODE_istore, state);
859 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
860 }
861
862 static void
863 emit_unop (opcode, type, state)
864 enum java_opcode opcode;
865 tree type;
866 struct jcf_partial *state;
867 {
868 int size = TYPE_IS_WIDE (type) ? 2 : 1;
869 RESERVE(1);
870 OP1 (opcode);
871 }
872
873 static void
874 emit_binop (opcode, type, state)
875 enum java_opcode opcode;
876 tree type;
877 struct jcf_partial *state;
878 {
879 int size = TYPE_IS_WIDE (type) ? 2 : 1;
880 RESERVE(1);
881 OP1 (opcode);
882 NOTE_POP (size);
883 }
884
885 static struct jcf_relocation *
886 emit_reloc (value, kind, target, state)
887 HOST_WIDE_INT value;
888 int kind;
889 struct jcf_block *target;
890 struct jcf_partial *state;
891 {
892 struct jcf_relocation *reloc = (struct jcf_relocation *)
893 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
894 struct jcf_block *block = state->last_block;
895 reloc->next = block->u.relocations;
896 block->u.relocations = reloc;
897 reloc->offset = BUFFER_LENGTH (&state->bytecode);
898 reloc->label = target;
899 reloc->kind = kind;
900 if (kind == 0 || kind == BLOCK_START_RELOC)
901 OP4 (value);
902 else if (kind != SWITCH_ALIGN_RELOC)
903 OP2 (value);
904 }
905
906 static void
907 emit_switch_reloc (label, state)
908 struct jcf_block *label;
909 struct jcf_partial *state;
910 {
911 emit_reloc (0, BLOCK_START_RELOC, label, state);
912 }
913
914 /* Similar to emit_switch_reloc,
915 but re-uses an existing case reloc. */
916
917 static void
918 emit_case_reloc (reloc, state)
919 struct jcf_relocation *reloc;
920 struct jcf_partial *state;
921 {
922 struct jcf_block *block = state->last_block;
923 reloc->next = block->u.relocations;
924 block->u.relocations = reloc;
925 reloc->offset = BUFFER_LENGTH (&state->bytecode);
926 reloc->kind = BLOCK_START_RELOC;
927 OP4 (0);
928 }
929
930 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
931 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
932
933 static void
934 emit_if (target, opcode, inv_opcode, state)
935 struct jcf_block *target;
936 int opcode, inv_opcode;
937 struct jcf_partial *state;
938 {
939 OP1 (opcode);
940 // value is 1 byte from reloc back to start of instruction.
941 emit_reloc (1, - inv_opcode, target, state);
942 }
943
944 static void
945 emit_goto (target, state)
946 struct jcf_block *target;
947 struct jcf_partial *state;
948 {
949 OP1 (OPCODE_goto);
950 // Value is 1 byte from reloc back to start of instruction.
951 emit_reloc (1, OPCODE_goto_w, target, state);
952 }
953
954 static void
955 emit_jsr (target, state)
956 struct jcf_block *target;
957 struct jcf_partial *state;
958 {
959 OP1 (OPCODE_jsr);
960 // Value is 1 byte from reloc back to start of instruction.
961 emit_reloc (1, OPCODE_jsr_w, target, state);
962 }
963
964 /* Generate code to evaluate EXP. If the result is true,
965 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
966 TRUE_BRANCH_FIRST is a code geneation hint that the
967 TRUE_LABEL may follow right after this. (The idea is that we
968 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
969
970 void
971 generate_bytecode_conditional (exp, true_label, false_label,
972 true_branch_first, state)
973 tree exp;
974 struct jcf_block *true_label;
975 struct jcf_block *false_label;
976 int true_branch_first;
977 struct jcf_partial *state;
978 {
979 int kind;
980 tree exp0, exp1, type;
981 int save_SP = state->code_SP;
982 enum java_opcode op, negop;
983 switch (TREE_CODE (exp))
984 {
985 case INTEGER_CST:
986 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
987 break;
988 case COND_EXPR:
989 {
990 struct jcf_block *then_label = gen_jcf_label (state);
991 struct jcf_block *else_label = gen_jcf_label (state);
992 int save_SP_before, save_SP_after;
993 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
994 then_label, else_label, 1, state);
995 define_jcf_label (then_label, state);
996 save_SP_before = state->code_SP;
997 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
998 true_label, false_label, 1, state);
999 save_SP_after = state->code_SP;
1000 state->code_SP = save_SP_before;
1001 define_jcf_label (else_label, state);
1002 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1003 true_label, false_label,
1004 true_branch_first, state);
1005 if (state->code_SP != save_SP_after)
1006 fatal ("internal error non-matching SP");
1007 }
1008 break;
1009 case TRUTH_NOT_EXPR:
1010 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1011 ! true_branch_first, state);
1012 break;
1013 case TRUTH_ANDIF_EXPR:
1014 {
1015 struct jcf_block *next_label = gen_jcf_label (state);
1016 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1017 next_label, false_label, 1, state);
1018 define_jcf_label (next_label, state);
1019 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1020 true_label, false_label, 1, state);
1021 }
1022 break;
1023 case TRUTH_ORIF_EXPR:
1024 {
1025 struct jcf_block *next_label = gen_jcf_label (state);
1026 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1027 true_label, next_label, 1, state);
1028 define_jcf_label (next_label, state);
1029 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1030 true_label, false_label, 1, state);
1031 }
1032 break;
1033 compare_1:
1034 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1035 set it to the corresponding 1-operand if<COND> instructions. */
1036 op = op - 6;
1037 /* FALLTHROUGH */
1038 compare_2:
1039 /* The opcodes with their inverses are allocated in pairs.
1040 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1041 negop = (op & 1) ? op + 1 : op - 1;
1042 compare_2_ptr:
1043 if (true_branch_first)
1044 {
1045 emit_if (false_label, negop, op, state);
1046 emit_goto (true_label, state);
1047 }
1048 else
1049 {
1050 emit_if (true_label, op, negop, state);
1051 emit_goto (false_label, state);
1052 }
1053 break;
1054 case EQ_EXPR:
1055 op = OPCODE_if_icmpeq;
1056 goto compare;
1057 case NE_EXPR:
1058 op = OPCODE_if_icmpne;
1059 goto compare;
1060 case GT_EXPR:
1061 op = OPCODE_if_icmpgt;
1062 goto compare;
1063 case LT_EXPR:
1064 op = OPCODE_if_icmplt;
1065 goto compare;
1066 case GE_EXPR:
1067 op = OPCODE_if_icmpge;
1068 goto compare;
1069 case LE_EXPR:
1070 op = OPCODE_if_icmple;
1071 goto compare;
1072 compare:
1073 exp0 = TREE_OPERAND (exp, 0);
1074 exp1 = TREE_OPERAND (exp, 1);
1075 type = TREE_TYPE (exp0);
1076 switch (TREE_CODE (type))
1077 {
1078 int opf;
1079 case POINTER_TYPE: case RECORD_TYPE:
1080 switch (TREE_CODE (exp))
1081 {
1082 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1083 case NE_EXPR: op = OPCODE_if_acmpne; break;
1084 default: abort();
1085 }
1086 if (integer_zerop (exp1) || integer_zerop (exp0))
1087 {
1088 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1089 STACK_TARGET, state);
1090 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1091 negop = (op & 1) ? op - 1 : op + 1;
1092 NOTE_POP (1);
1093 goto compare_2_ptr;
1094 }
1095 generate_bytecode_insns (exp0, STACK_TARGET, state);
1096 generate_bytecode_insns (exp1, STACK_TARGET, state);
1097 NOTE_POP (2);
1098 goto compare_2;
1099 case REAL_TYPE:
1100 generate_bytecode_insns (exp0, STACK_TARGET, state);
1101 generate_bytecode_insns (exp1, STACK_TARGET, state);
1102 if (op == OPCODE_if_icmplt || op == op == OPCODE_if_icmple)
1103 opf = OPCODE_fcmpg;
1104 else
1105 opf = OPCODE_fcmpl;
1106 if (TYPE_PRECISION (type) > 32)
1107 {
1108 opf += 2;
1109 NOTE_POP (4);
1110 }
1111 else
1112 NOTE_POP (2);
1113 RESERVE (1);
1114 OP1 (opf);
1115 goto compare_1;
1116 case INTEGER_TYPE:
1117 if (TYPE_PRECISION (type) > 32)
1118 {
1119 generate_bytecode_insns (exp0, STACK_TARGET, state);
1120 generate_bytecode_insns (exp1, STACK_TARGET, state);
1121 NOTE_POP (4);
1122 RESERVE (1);
1123 OP1 (OPCODE_lcmp);
1124 goto compare_1;
1125 }
1126 /* FALLTHOUGH */
1127 default:
1128 if (integer_zerop (exp1))
1129 {
1130 generate_bytecode_insns (exp0, STACK_TARGET, state);
1131 NOTE_POP (1);
1132 goto compare_1;
1133 }
1134 if (integer_zerop (exp0))
1135 {
1136 switch (op)
1137 {
1138 case OPCODE_if_icmplt:
1139 case OPCODE_if_icmpge:
1140 op += 2;
1141 break;
1142 case OPCODE_if_icmpgt:
1143 case OPCODE_if_icmple:
1144 op -= 2;
1145 break;
1146 default:
1147 break;
1148 }
1149 generate_bytecode_insns (exp1, STACK_TARGET, state);
1150 NOTE_POP (1);
1151 goto compare_1;
1152 }
1153 generate_bytecode_insns (exp0, STACK_TARGET, state);
1154 generate_bytecode_insns (exp1, STACK_TARGET, state);
1155 NOTE_POP (2);
1156 goto compare_2;
1157 }
1158
1159 default:
1160 generate_bytecode_insns (exp, STACK_TARGET, state);
1161 NOTE_POP (1);
1162 if (true_branch_first)
1163 {
1164 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1165 emit_goto (true_label, state);
1166 }
1167 else
1168 {
1169 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1170 emit_goto (false_label, state);
1171 }
1172 break;
1173 }
1174 if (save_SP != state->code_SP)
1175 fatal ("internal error - SP mismatch");
1176 }
1177
1178 /* Generate bytecode for sub-expression EXP of METHOD.
1179 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1180
1181 static void
1182 generate_bytecode_insns (exp, target, state)
1183 tree exp;
1184 int target;
1185 struct jcf_partial *state;
1186 {
1187 tree type;
1188 enum java_opcode jopcode;
1189 int op;
1190 HOST_WIDE_INT value;
1191 int post_op;
1192 int size;
1193 int offset;
1194
1195 if (exp == NULL && target == IGNORE_TARGET)
1196 return;
1197
1198 type = TREE_TYPE (exp);
1199
1200 switch (TREE_CODE (exp))
1201 {
1202 case BLOCK:
1203 if (BLOCK_EXPR_BODY (exp))
1204 {
1205 tree local;
1206 tree body = BLOCK_EXPR_BODY (exp);
1207 for (local = BLOCK_EXPR_DECLS (exp); local; )
1208 {
1209 tree next = TREE_CHAIN (local);
1210 localvar_alloc (local, state);
1211 local = next;
1212 }
1213 /* Avoid deep recursion for long blocks. */
1214 while (TREE_CODE (body) == COMPOUND_EXPR)
1215 {
1216 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1217 body = TREE_OPERAND (body, 1);
1218 }
1219 generate_bytecode_insns (body, target, state);
1220 for (local = BLOCK_EXPR_DECLS (exp); local; )
1221 {
1222 tree next = TREE_CHAIN (local);
1223 localvar_free (local, state);
1224 local = next;
1225 }
1226 }
1227 break;
1228 case COMPOUND_EXPR:
1229 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1230 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1231 break;
1232 case EXPR_WITH_FILE_LOCATION:
1233 {
1234 char *saved_input_filename = input_filename;
1235 int saved_lineno = lineno;
1236 input_filename = EXPR_WFL_FILENAME (exp);
1237 lineno = EXPR_WFL_LINENO (exp);
1238 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
1239 put_linenumber (EXPR_WFL_LINENO (exp), state);
1240 generate_bytecode_insns (EXPR_WFL_NODE (exp), target, state);
1241 input_filename = saved_input_filename;
1242 lineno = saved_lineno;
1243 }
1244 break;
1245 case INTEGER_CST:
1246 if (target == IGNORE_TARGET) ; /* do nothing */
1247 else if (TREE_CODE (type) == POINTER_TYPE)
1248 {
1249 if (! integer_zerop (exp))
1250 abort();
1251 RESERVE(1);
1252 OP1 (OPCODE_aconst_null);
1253 NOTE_PUSH (1);
1254 }
1255 else if (TYPE_PRECISION (type) <= 32)
1256 {
1257 push_int_const (TREE_INT_CST_LOW (exp), state);
1258 NOTE_PUSH (1);
1259 }
1260 else
1261 {
1262 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1263 state);
1264 NOTE_PUSH (2);
1265 }
1266 break;
1267 case REAL_CST:
1268 switch (TYPE_PRECISION (type))
1269 {
1270 long words[2];
1271 int index;
1272 case 32:
1273 words[0] = etarsingle (TREE_REAL_CST (exp)) & 0xFFFFFFFF;
1274 index = find_constant1 (&state->cpool, CONSTANT_Float, words[0]);
1275 push_constant1 (index, state);
1276 NOTE_PUSH (1);
1277 break;
1278 case 64:
1279 etardouble (TREE_REAL_CST (exp), words);
1280 index = find_constant2 (&state->cpool, CONSTANT_Double,
1281 words[1-FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF,
1282 words[FLOAT_WORDS_BIG_ENDIAN] & 0xFFFFFFFF);
1283 push_constant2 (index, state);
1284 NOTE_PUSH (2);
1285 break;
1286 default:
1287 abort ();
1288 }
1289 break;
1290 case STRING_CST:
1291 push_constant1 (find_string_constant (&state->cpool, exp), state);
1292 NOTE_PUSH (1);
1293 break;
1294 case VAR_DECL:
1295 if (TREE_STATIC (exp))
1296 {
1297 field_op (exp, OPCODE_getstatic, state);
1298 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1299 break;
1300 }
1301 /* ... fall through ... */
1302 case PARM_DECL:
1303 emit_load (exp, state);
1304 break;
1305 case NON_LVALUE_EXPR:
1306 case INDIRECT_REF:
1307 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1308 break;
1309 case ARRAY_REF:
1310 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1311 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1312 if (target != IGNORE_TARGET)
1313 {
1314 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1315 RESERVE(1);
1316 OP1 (jopcode);
1317 if (! TYPE_IS_WIDE (type))
1318 NOTE_POP (1);
1319 }
1320 break;
1321 case COMPONENT_REF:
1322 {
1323 tree obj = TREE_OPERAND (exp, 0);
1324 tree field = TREE_OPERAND (exp, 1);
1325 int is_static = FIELD_STATIC (field);
1326 generate_bytecode_insns (obj,
1327 is_static ? IGNORE_TARGET : target, state);
1328 if (target != IGNORE_TARGET)
1329 {
1330 if (DECL_NAME (field) == length_identifier_node && !is_static
1331 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1332 {
1333 RESERVE (1);
1334 OP1 (OPCODE_arraylength);
1335 }
1336 else
1337 {
1338 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1339 state);
1340 if (! is_static)
1341 NOTE_POP (1);
1342 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1343 }
1344 }
1345 }
1346 break;
1347 case TRUTH_ANDIF_EXPR:
1348 case TRUTH_ORIF_EXPR:
1349 case EQ_EXPR:
1350 case NE_EXPR:
1351 case GT_EXPR:
1352 case LT_EXPR:
1353 case GE_EXPR:
1354 case LE_EXPR:
1355 {
1356 struct jcf_block *then_label = gen_jcf_label (state);
1357 struct jcf_block *else_label = gen_jcf_label (state);
1358 struct jcf_block *end_label = gen_jcf_label (state);
1359 generate_bytecode_conditional (exp,
1360 then_label, else_label, 1, state);
1361 define_jcf_label (then_label, state);
1362 push_int_const (1, state);
1363 emit_goto (end_label, state);
1364 define_jcf_label (else_label, state);
1365 push_int_const (0, state);
1366 define_jcf_label (end_label, state);
1367 NOTE_PUSH (1);
1368 }
1369 break;
1370 case COND_EXPR:
1371 {
1372 struct jcf_block *then_label = gen_jcf_label (state);
1373 struct jcf_block *else_label = gen_jcf_label (state);
1374 struct jcf_block *end_label = gen_jcf_label (state);
1375 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1376 then_label, else_label, 1, state);
1377 define_jcf_label (then_label, state);
1378 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1379 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1380 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1381 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1382 emit_goto (end_label, state);
1383 define_jcf_label (else_label, state);
1384 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1385 define_jcf_label (end_label, state);
1386 }
1387 break;
1388 case CASE_EXPR:
1389 {
1390 struct jcf_switch_state *sw_state = state->sw_state;
1391 struct jcf_relocation *reloc = (struct jcf_relocation *)
1392 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1393 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1394 reloc->kind = 0;
1395 reloc->label = get_jcf_label_here (state);
1396 reloc->offset = case_value;
1397 reloc->next = sw_state->cases;
1398 sw_state->cases = reloc;
1399 if (sw_state->num_cases == 0)
1400 {
1401 sw_state->min_case = case_value;
1402 sw_state->max_case = case_value;
1403 }
1404 else
1405 {
1406 if (case_value < sw_state->min_case)
1407 sw_state->min_case = case_value;
1408 if (case_value > sw_state->max_case)
1409 sw_state->max_case = case_value;
1410 }
1411 sw_state->num_cases++;
1412 }
1413 break;
1414 case DEFAULT_EXPR:
1415 state->sw_state->default_label = get_jcf_label_here (state);
1416 break;
1417
1418 case SWITCH_EXPR:
1419 {
1420 /* The SWITCH_EXPR has three parts, generated in the following order:
1421 1. the switch_expression (the value used to select the correct case);
1422 2. the switch_body;
1423 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1424 After code generation, we will re-order then in the order 1, 3, 2.
1425 This is to avoid an extra GOTOs. */
1426 struct jcf_switch_state sw_state;
1427 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1428 struct jcf_block *body_last; /* Last block of the switch_body. */
1429 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1430 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1431 struct jcf_block *body_block;
1432 int switch_length;
1433 sw_state.prev = state->sw_state;
1434 state->sw_state = &sw_state;
1435 sw_state.cases = NULL;
1436 sw_state.num_cases = 0;
1437 sw_state.default_label = NULL;
1438 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1439 expression_last = state->last_block;
1440 body_block = get_jcf_label_here (state); /* Force a new block here. */
1441 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1442 body_last = state->last_block;
1443
1444 if (sw_state.default_label == NULL)
1445 sw_state.default_label = gen_jcf_label (state);
1446 switch_instruction = get_jcf_label_here (state);
1447
1448 if (sw_state.num_cases <= 1)
1449 {
1450 if (sw_state.num_cases == 0)
1451 {
1452 emit_pop (1, state);
1453 NOTE_POP (1);
1454 }
1455 else
1456 {
1457 push_int_const (sw_state.cases->offset, state);
1458 emit_if (sw_state.cases->label,
1459 OPCODE_ifeq, OPCODE_ifne, state);
1460 }
1461 emit_goto (sw_state.default_label, state);
1462 }
1463 else
1464 {
1465 HOST_WIDE_INT i;
1466 /* Copy the chain of relocs into a sorted array. */
1467 struct jcf_relocation **relocs = (struct jcf_relocation **)
1468 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1469 /* The relocs arrays is a buffer with a gap.
1470 The assumption is that cases will normally come in "runs". */
1471 int gap_start = 0;
1472 int gap_end = sw_state.num_cases;
1473 struct jcf_relocation *reloc;
1474 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1475 {
1476 HOST_WIDE_INT case_value = reloc->offset;
1477 while (gap_end < sw_state.num_cases)
1478 {
1479 struct jcf_relocation *end = relocs[gap_end];
1480 if (case_value <= end->offset)
1481 break;
1482 relocs[gap_start++] = end;
1483 gap_end++;
1484 }
1485 while (gap_start > 0)
1486 {
1487 struct jcf_relocation *before = relocs[gap_start-1];
1488 if (case_value >= before->offset)
1489 break;
1490 relocs[--gap_end] = before;
1491 gap_start--;
1492 }
1493 relocs[gap_start++] = reloc;
1494 /* Note we don't check for duplicates. FIXME! */
1495 }
1496
1497 if (2 * sw_state.num_cases
1498 >= sw_state.max_case - sw_state.min_case)
1499 { /* Use tableswitch. */
1500 int index = 0;
1501 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1502 OP1 (OPCODE_tableswitch);
1503 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1504 emit_switch_reloc (sw_state.default_label, state);
1505 OP4 (sw_state.min_case);
1506 OP4 (sw_state.max_case);
1507 for (i = sw_state.min_case; ; )
1508 {
1509 reloc = relocs[index];
1510 if (i == reloc->offset)
1511 {
1512 emit_case_reloc (reloc, state);
1513 if (i == sw_state.max_case)
1514 break;
1515 index++;
1516 }
1517 else
1518 emit_switch_reloc (sw_state.default_label, state);
1519 i++;
1520 }
1521 }
1522 else
1523 { /* Use lookupswitch. */
1524 RESERVE(9 + 8 * sw_state.num_cases);
1525 OP1 (OPCODE_lookupswitch);
1526 emit_reloc (0, SWITCH_ALIGN_RELOC, NULL, state);
1527 emit_switch_reloc (sw_state.default_label, state);
1528 OP4 (sw_state.num_cases);
1529 for (i = 0; i < sw_state.num_cases; i++)
1530 {
1531 struct jcf_relocation *reloc = relocs[i];
1532 OP4 (reloc->offset);
1533 emit_case_reloc (reloc, state);
1534 }
1535 }
1536 free (relocs);
1537 }
1538
1539 instruction_last = state->last_block;
1540 if (sw_state.default_label->pc < 0)
1541 define_jcf_label (sw_state.default_label, state);
1542 else /* Force a new block. */
1543 sw_state.default_label = get_jcf_label_here (state);
1544 /* Now re-arrange the blocks so the switch_instruction
1545 comes before the switch_body. */
1546 switch_length = state->code_length - switch_instruction->pc;
1547 switch_instruction->pc = body_block->pc;
1548 instruction_last->next = body_block;
1549 instruction_last->chunk->next = body_block->chunk;
1550 expression_last->next = switch_instruction;
1551 expression_last->chunk->next = switch_instruction->chunk;
1552 body_last->next = sw_state.default_label;
1553 body_last->chunk->next = NULL;
1554 state->chunk = body_last->chunk;
1555 for (; body_block != sw_state.default_label; body_block = body_block->next)
1556 body_block->pc += switch_length;
1557
1558 state->sw_state = sw_state.prev;
1559 break;
1560 }
1561
1562 case RETURN_EXPR:
1563 if (!TREE_OPERAND (exp, 0))
1564 op = OPCODE_return;
1565 else
1566 {
1567 exp = TREE_OPERAND (exp, 0);
1568 if (TREE_CODE (exp) != MODIFY_EXPR)
1569 abort ();
1570 exp = TREE_OPERAND (exp, 1);
1571 op = OPCODE_ireturn + adjust_typed_op (TREE_TYPE (exp), 4);
1572 generate_bytecode_insns (exp, STACK_TARGET, state);
1573 }
1574 RESERVE (1);
1575 OP1 (op);
1576 break;
1577 case LABELED_BLOCK_EXPR:
1578 {
1579 struct jcf_block *end_label = gen_jcf_label (state);
1580 end_label->next = state->labeled_blocks;
1581 state->labeled_blocks = end_label;
1582 end_label->u.labeled_block = exp;
1583 if (LABELED_BLOCK_BODY (exp))
1584 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1585 if (state->labeled_blocks != end_label)
1586 abort();
1587 state->labeled_blocks = end_label->next;
1588 define_jcf_label (end_label, state);
1589 }
1590 break;
1591 case LOOP_EXPR:
1592 {
1593 tree body = TREE_OPERAND (exp, 0);
1594 #if 0
1595 if (TREE_CODE (body) == COMPOUND_EXPR
1596 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1597 {
1598 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1599 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1600 struct jcf_block *head_label;
1601 struct jcf_block *body_label;
1602 struct jcf_block *end_label = gen_jcf_label (state);
1603 struct jcf_block *exit_label = state->labeled_blocks;
1604 head_label = gen_jcf_label (state);
1605 emit_goto (head_label, state);
1606 body_label = get_jcf_label_here (state);
1607 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1608 define_jcf_label (head_label, state);
1609 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1610 end_label, body_label, 1, state);
1611 define_jcf_label (end_label, state);
1612 }
1613 else
1614 #endif
1615 {
1616 struct jcf_block *head_label = get_jcf_label_here (state);
1617 generate_bytecode_insns (body, IGNORE_TARGET, state);
1618 emit_goto (head_label, state);
1619 }
1620 }
1621 break;
1622 case EXIT_EXPR:
1623 {
1624 struct jcf_block *label = state->labeled_blocks;
1625 struct jcf_block *end_label = gen_jcf_label (state);
1626 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1627 label, end_label, 0, state);
1628 define_jcf_label (end_label, state);
1629 }
1630 break;
1631 case EXIT_BLOCK_EXPR:
1632 {
1633 struct jcf_block *label = state->labeled_blocks;
1634 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1635 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1636 label = label->next;
1637 emit_goto (label, state);
1638 }
1639 break;
1640
1641 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1642 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1643 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1644 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1645 increment:
1646
1647 exp = TREE_OPERAND (exp, 0);
1648 type = TREE_TYPE (exp);
1649 size = TYPE_IS_WIDE (type) ? 2 : 1;
1650 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1651 && ! TREE_STATIC (exp)
1652 && TREE_CODE (type) == INTEGER_TYPE
1653 && TYPE_PRECISION (type) == 32)
1654 {
1655 if (target != IGNORE_TARGET && post_op)
1656 emit_load (exp, state);
1657 emit_iinc (exp, value, state);
1658 if (target != IGNORE_TARGET && ! post_op)
1659 emit_load (exp, state);
1660 break;
1661 }
1662 if (TREE_CODE (exp) == COMPONENT_REF)
1663 {
1664 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1665 emit_dup (1, 0, state);
1666 /* Stack: ..., objectref, objectref. */
1667 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1668 NOTE_PUSH (size);
1669 /* Stack: ..., objectref, oldvalue. */
1670 offset = 1;
1671 }
1672 else if (TREE_CODE (exp) == ARRAY_REF)
1673 {
1674 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1675 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1676 emit_dup (2, 0, state);
1677 /* Stack: ..., array, index, array, index. */
1678 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1679 RESERVE(1);
1680 OP1 (jopcode);
1681 NOTE_POP (2-size);
1682 /* Stack: ..., array, index, oldvalue. */
1683 offset = 2;
1684 }
1685 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1686 {
1687 generate_bytecode_insns (exp, STACK_TARGET, state);
1688 /* Stack: ..., oldvalue. */
1689 offset = 0;
1690 }
1691 else
1692 abort ();
1693
1694 if (target != IGNORE_TARGET && post_op)
1695 emit_dup (size, offset, state);
1696 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1697 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1698 /* Stack, otherwise: ..., [result, ] oldvalue. */
1699 push_int_const (value, state); /* FIXME - assumes int! */
1700 NOTE_PUSH (1);
1701 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1702 if (target != IGNORE_TARGET && ! post_op)
1703 emit_dup (size, offset, state);
1704 /* Stack: ..., [result,] newvalue. */
1705 goto finish_assignment;
1706
1707 case MODIFY_EXPR:
1708 {
1709 tree lhs = TREE_OPERAND (exp, 0);
1710 tree rhs = TREE_OPERAND (exp, 1);
1711
1712 /* See if we can use the iinc instruction. */
1713 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1714 && ! TREE_STATIC (lhs)
1715 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1716 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1717 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1718 {
1719 tree arg0 = TREE_OPERAND (rhs, 0);
1720 tree arg1 = TREE_OPERAND (rhs, 1);
1721 HOST_WIDE_INT min_value = -32768;
1722 HOST_WIDE_INT max_value = 32767;
1723 if (TREE_CODE (rhs) == MINUS_EXPR)
1724 {
1725 min_value++;
1726 max_value++;
1727 }
1728 else if (arg1 == lhs)
1729 {
1730 arg0 = arg1;
1731 arg1 = TREE_OPERAND (rhs, 0);
1732 }
1733 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1734 {
1735 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1736 value = TREE_INT_CST_LOW (arg1);
1737 if ((hi_value == 0 && value <= max_value)
1738 || (hi_value == -1 && value >= min_value))
1739 {
1740 if (TREE_CODE (rhs) == MINUS_EXPR)
1741 value = -value;
1742 emit_iinc (lhs, value, state);
1743 break;
1744 }
1745 }
1746 }
1747
1748 if (TREE_CODE (lhs) == COMPONENT_REF)
1749 generate_bytecode_insns (TREE_OPERAND (lhs, 0), STACK_TARGET, state);
1750 else if (TREE_CODE (lhs) == ARRAY_REF)
1751 {
1752 generate_bytecode_insns (TREE_OPERAND(lhs, 0), STACK_TARGET, state);
1753 generate_bytecode_insns (TREE_OPERAND(lhs, 1), STACK_TARGET, state);
1754 }
1755 generate_bytecode_insns (rhs, STACK_TARGET, state);
1756 if (target != IGNORE_TARGET)
1757 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , 1, state);
1758 exp = lhs;
1759 }
1760 /* FALLTHOUGH */
1761
1762 finish_assignment:
1763 if (TREE_CODE (exp) == COMPONENT_REF)
1764 {
1765 tree field = TREE_OPERAND (exp, 1);
1766 if (! FIELD_STATIC (field))
1767 NOTE_POP (1);
1768 field_op (field,
1769 FIELD_STATIC (field) ? OPCODE_putstatic
1770 : OPCODE_putfield,
1771 state);
1772
1773 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1774 }
1775 else if (TREE_CODE (exp) == VAR_DECL
1776 || TREE_CODE (exp) == PARM_DECL)
1777 {
1778 if (FIELD_STATIC (exp))
1779 {
1780 field_op (exp, OPCODE_putstatic, state);
1781 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1782 }
1783 else
1784 emit_store (exp, state);
1785 }
1786 else if (TREE_CODE (exp) == ARRAY_REF)
1787 {
1788 NOTE_POP (2);
1789 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
1790 RESERVE(1);
1791 OP1 (jopcode);
1792 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1793 }
1794 else
1795 fatal ("internal error (bad lhs to MODIFY_EXPR)");
1796 break;
1797 case PLUS_EXPR:
1798 jopcode = OPCODE_iadd;
1799 goto binop;
1800 case MINUS_EXPR:
1801 jopcode = OPCODE_isub;
1802 goto binop;
1803 case MULT_EXPR:
1804 jopcode = OPCODE_imul;
1805 goto binop;
1806 case TRUNC_DIV_EXPR:
1807 case RDIV_EXPR:
1808 jopcode = OPCODE_idiv;
1809 goto binop;
1810 case TRUNC_MOD_EXPR:
1811 jopcode = OPCODE_irem;
1812 goto binop;
1813 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
1814 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
1815 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
1816 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
1817 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
1818 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
1819 binop:
1820 {
1821 tree arg0 = TREE_OPERAND (exp, 0);
1822 tree arg1 = TREE_OPERAND (exp, 1);
1823 jopcode += adjust_typed_op (type, 3);
1824 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
1825 {
1826 /* fold may (e.g) convert 2*x to x+x. */
1827 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
1828 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
1829 }
1830 else
1831 {
1832 generate_bytecode_insns (arg0, target, state);
1833 generate_bytecode_insns (arg1, target, state);
1834 }
1835 if (target == STACK_TARGET)
1836 emit_binop (jopcode, type, state);
1837 break;
1838 }
1839 case TRUTH_NOT_EXPR:
1840 case BIT_NOT_EXPR:
1841 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1842 if (target == STACK_TARGET)
1843 {
1844 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
1845 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
1846 RESERVE (2);
1847 if (is_long)
1848 OP1 (OPCODE_i2l);
1849 OP1 (OPCODE_ixor + is_long);
1850 }
1851 break;
1852 case NEGATE_EXPR:
1853 jopcode = OPCODE_ineg;
1854 jopcode += adjust_typed_op (type, 3);
1855 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1856 if (target == STACK_TARGET)
1857 emit_unop (jopcode, type, state);
1858 break;
1859 case INSTANCEOF_EXPR:
1860 {
1861 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
1862 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1863 RESERVE (3);
1864 OP1 (OPCODE_instanceof);
1865 OP2 (index);
1866 }
1867 break;
1868 case CONVERT_EXPR:
1869 case NOP_EXPR:
1870 case FLOAT_EXPR:
1871 case FIX_TRUNC_EXPR:
1872 {
1873 tree src = TREE_OPERAND (exp, 0);
1874 tree src_type = TREE_TYPE (src);
1875 tree dst_type = TREE_TYPE (exp);
1876 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1877 if (target == IGNORE_TARGET || src_type == dst_type)
1878 break;
1879 if (TREE_CODE (dst_type) == POINTER_TYPE)
1880 {
1881 if (TREE_CODE (exp) == CONVERT_EXPR)
1882 {
1883 int index = find_class_constant (&state->cpool, TREE_TYPE (dst_type));
1884 RESERVE (3);
1885 OP1 (OPCODE_checkcast);
1886 OP2 (index);
1887 }
1888 }
1889 else /* Convert numeric types. */
1890 {
1891 int wide_src = TYPE_PRECISION (src_type) > 32;
1892 int wide_dst = TYPE_PRECISION (dst_type) > 32;
1893 NOTE_POP (1 + wide_src);
1894 RESERVE (1);
1895 if (TREE_CODE (dst_type) == REAL_TYPE)
1896 {
1897 if (TREE_CODE (src_type) == REAL_TYPE)
1898 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
1899 else if (TYPE_PRECISION (src_type) == 64)
1900 OP1 (OPCODE_l2f + wide_dst);
1901 else
1902 OP1 (OPCODE_i2f + wide_dst);
1903 }
1904 else /* Convert to integral type. */
1905 {
1906 if (TREE_CODE (src_type) == REAL_TYPE)
1907 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
1908 else if (wide_dst)
1909 OP1 (OPCODE_i2l);
1910 else if (wide_src)
1911 OP1 (OPCODE_l2i);
1912 if (TYPE_PRECISION (dst_type) < 32)
1913 {
1914 RESERVE (1);
1915 /* Already converted to int, if needed. */
1916 if (TYPE_PRECISION (dst_type) <= 8)
1917 OP1 (OPCODE_i2b);
1918 else if (TREE_UNSIGNED (dst_type))
1919 OP1 (OPCODE_i2c);
1920 else
1921 OP1 (OPCODE_i2s);
1922 }
1923 }
1924 NOTE_PUSH (1 + wide_dst);
1925 }
1926 }
1927 break;
1928 case TRY_EXPR:
1929 {
1930 tree try_clause = TREE_OPERAND (exp, 0);
1931 tree finally = TREE_OPERAND (exp, 2);
1932 struct jcf_block *start_label = get_jcf_label_here (state);
1933 struct jcf_block *end_label; /* End of try clause. */
1934 struct jcf_block *finally_label; /* Finally subroutine. */
1935 struct jcf_block *finished_label = gen_jcf_label (state);
1936 tree clause = TREE_OPERAND (exp, 1);
1937 if (finally)
1938 {
1939 finally = FINALLY_EXPR_BLOCK (finally);
1940 finally_label = gen_jcf_label (state);
1941 }
1942 if (target != IGNORE_TARGET)
1943 abort ();
1944 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
1945 end_label = get_jcf_label_here (state);
1946 if (CAN_COMPLETE_NORMALLY (try_clause))
1947 emit_goto (finished_label, state);
1948 for ( ; clause != NULL_TREE; clause = TREE_CHAIN (clause))
1949 {
1950 tree catch_clause = TREE_OPERAND (clause, 0);
1951 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
1952 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
1953 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
1954 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
1955 if (CAN_COMPLETE_NORMALLY (catch_clause))
1956 emit_goto (finished_label, state);
1957 }
1958 if (finally)
1959 {
1960 tree return_link;
1961 tree exception_type = build_pointer_type (throwable_type_node);
1962 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
1963 exception_type);
1964 struct jcf_handler *handler
1965 = alloc_handler (start_label, NULL_TREE, state);
1966 handler->end_label = handler->handler_label;
1967 handler->type = NULL_TREE;
1968 localvar_alloc (exception_decl, state);
1969 NOTE_PUSH (1);
1970 emit_store (exception_decl, state);
1971 emit_jsr (finally_label, state);
1972 emit_load (exception_decl, state);
1973 RESERVE (1);
1974 OP1 (OPCODE_athrow);
1975 NOTE_POP (1);
1976 localvar_free (exception_decl, state);
1977
1978 /* The finally block. */
1979 return_link = build_decl (VAR_DECL, NULL_TREE,
1980 return_address_type_node);
1981 define_jcf_label (finally_label, state);
1982 NOTE_PUSH (1);
1983 localvar_alloc (return_link, state);
1984 emit_store (return_link, state);
1985 generate_bytecode_insns (finally, IGNORE_TARGET, state);
1986 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
1987 localvar_free (return_link, state);
1988 }
1989 define_jcf_label (finished_label, state);
1990 if (finally)
1991 emit_jsr (finally_label, state);
1992 }
1993 break;
1994 case THROW_EXPR:
1995 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1996 RESERVE (1);
1997 OP1 (OPCODE_athrow);
1998 break;
1999 case NEW_CLASS_EXPR:
2000 {
2001 tree class = TREE_TYPE (TREE_TYPE (exp));
2002 int index = find_class_constant (&state->cpool, class);
2003 RESERVE (4);
2004 OP1 (OPCODE_new);
2005 OP2 (index);
2006 OP1 (OPCODE_dup);
2007 NOTE_PUSH (1);
2008 }
2009 /* ... fall though ... */
2010 case CALL_EXPR:
2011 {
2012 tree f = TREE_OPERAND (exp, 0);
2013 tree x = TREE_OPERAND (exp, 1);
2014 int save_SP = state->code_SP;
2015 if (TREE_CODE (f) == ADDR_EXPR)
2016 f = TREE_OPERAND (f, 0);
2017 if (f == soft_newarray_node)
2018 {
2019 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2020 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2021 STACK_TARGET, state);
2022 RESERVE (2);
2023 OP1 (OPCODE_newarray);
2024 OP1 (type_code);
2025 break;
2026 }
2027 else if (f == soft_multianewarray_node)
2028 {
2029 int ndims;
2030 int idim;
2031 int index = find_class_constant (&state->cpool,
2032 TREE_TYPE (TREE_TYPE (exp)));
2033 x = TREE_CHAIN (x); /* Skip class argument. */
2034 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2035 for (idim = ndims; --idim >= 0; )
2036 {
2037 x = TREE_CHAIN (x);
2038 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2039 }
2040 RESERVE (4);
2041 OP1 (OPCODE_multianewarray);
2042 OP2 (index);
2043 OP1 (ndims);
2044 break;
2045 }
2046 else if (f == soft_anewarray_node)
2047 {
2048 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2049 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2050 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2051 RESERVE (3);
2052 OP1 (OPCODE_anewarray);
2053 OP2 (index);
2054 break;
2055 }
2056 else if (exp == soft_exceptioninfo_call_node)
2057 {
2058 NOTE_PUSH (1); /* Pushed by exception system. */
2059 break;
2060 }
2061 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2062 {
2063 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2064 }
2065 state->code_SP = save_SP;
2066 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2067 {
2068 int index = find_methodref_index (&state->cpool, f);
2069 RESERVE (3);
2070 if (DECL_CONSTRUCTOR_P (f))
2071 OP1 (OPCODE_invokespecial);
2072 else if (METHOD_STATIC (f))
2073 OP1 (OPCODE_invokestatic);
2074 else
2075 OP1 (OPCODE_invokevirtual);
2076 OP2 (index);
2077 f = TREE_TYPE (TREE_TYPE (f));
2078 if (TREE_CODE (f) != VOID_TYPE)
2079 {
2080 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2081 if (target == IGNORE_TARGET)
2082 emit_pop (size, state);
2083 else
2084 NOTE_PUSH (size);
2085 }
2086 break;
2087 }
2088 }
2089 /* fall through */
2090 notimpl:
2091 default:
2092 error("internal error - tree code not implemented: %s",
2093 tree_code_name [(int) TREE_CODE (exp)]);
2094 }
2095 }
2096
2097 void
2098 perform_relocations (state)
2099 struct jcf_partial *state;
2100 {
2101 struct jcf_block *block;
2102 struct jcf_relocation *reloc;
2103 int pc;
2104 int shrink;
2105
2106 /* Before we start, the pc field of each block is an upper bound on
2107 the block's start pc (it may be less, if previous blocks need less
2108 than their maximum).
2109
2110 The minimum size of each block is in the block's chunk->size. */
2111
2112 /* First, figure out the actual locations of each block. */
2113 pc = 0;
2114 shrink = 0;
2115 for (block = state->blocks; block != NULL; block = block->next)
2116 {
2117 int block_size = block->chunk->size;
2118
2119 block->pc = pc;
2120
2121 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2122 Assumes relocations are in reverse order. */
2123 reloc = block->u.relocations;
2124 while (reloc != NULL
2125 && reloc->kind == OPCODE_goto_w
2126 && reloc->label->pc == block->next->pc
2127 && reloc->offset + 2 == block_size)
2128 {
2129 reloc = reloc->next;
2130 block->u.relocations = reloc;
2131 block->chunk->size -= 3;
2132 block_size -= 3;
2133 shrink += 3;
2134 }
2135
2136 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2137 {
2138 if (reloc->kind == SWITCH_ALIGN_RELOC)
2139 {
2140 /* We assume this is the first relocation in this block,
2141 so we know its final pc. */
2142 int where = pc + reloc->offset;
2143 int pad = ((where + 3) & ~3) - where;
2144 block_size += pad;
2145 }
2146 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2147 {
2148 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2149 int expand = reloc->kind > 0 ? 2 : 5;
2150
2151 if (delta > 0)
2152 delta -= shrink;
2153 if (delta >= -32768 && delta <= 32767)
2154 {
2155 shrink += expand;
2156 reloc->kind = -1;
2157 }
2158 else
2159 block_size += expand;
2160 }
2161 }
2162 pc += block_size;
2163 }
2164
2165 for (block = state->blocks; block != NULL; block = block->next)
2166 {
2167 struct chunk *chunk = block->chunk;
2168 int old_size = chunk->size;
2169 int next_pc = block->next == NULL ? pc : block->next->pc;
2170 int new_size = next_pc - block->pc;
2171 int offset = 0;
2172 unsigned char *new_ptr;
2173 unsigned char *old_buffer = chunk->data;
2174 unsigned char *old_ptr = old_buffer + old_size;
2175 int new_end = new_size;
2176 if (new_size != old_size)
2177 {
2178 chunk->data = (unsigned char *)
2179 obstack_alloc (state->chunk_obstack, new_size);
2180 chunk->size = new_size;
2181 }
2182 new_ptr = chunk->data + new_size;
2183
2184 /* We do the relocations from back to front, because
2185 the relocations are in reverse order. */
2186 for (reloc = block->u.relocations; ; reloc = reloc->next)
2187 {
2188 /* new_ptr and old_ptr point into the old and new buffers,
2189 respectively. (If no relocations cause the buffer to
2190 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2191 The bytes at higher adress have been copied and relocations
2192 handled; those at lower addresses remain to process. */
2193
2194 /* Lower old index of piece to be copied with no relocation.
2195 I.e. high index of the first piece that does need relocation. */
2196 int start = reloc == NULL ? 0
2197 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2198 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2199 ? reloc->offset + 4
2200 : reloc->offset + 2;
2201 int32 value;
2202 int new_offset;
2203 int n = (old_ptr - old_buffer) - start;
2204 new_ptr -= n;
2205 old_ptr -= n;
2206 if (n > 0)
2207 memcpy (new_ptr, old_ptr, n);
2208 if (old_ptr == old_buffer)
2209 break;
2210
2211 new_offset = new_ptr - chunk->data;
2212 new_offset -= (reloc->kind == -1 ? 2 : 4);
2213 if (reloc->kind == 0)
2214 {
2215 old_ptr -= 4;
2216 value = GET_u4 (old_ptr);
2217 }
2218 else if (reloc->kind == BLOCK_START_RELOC)
2219 {
2220 old_ptr -= 4;
2221 value = 0;
2222 new_offset = 0;
2223 }
2224 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2225 {
2226 int where = block->pc + reloc->offset;
2227 int pad = ((where + 3) & ~3) - where;
2228 while (--pad >= 0)
2229 *--new_ptr = 0;
2230 continue;
2231 }
2232 else
2233 {
2234 old_ptr -= 2;
2235 value = GET_u2 (old_ptr);
2236 }
2237 value += reloc->label->pc - (block->pc + new_offset);
2238 *--new_ptr = (unsigned char) value; value >>= 8;
2239 *--new_ptr = (unsigned char) value; value >>= 8;
2240 if (reloc->kind != -1)
2241 {
2242 *--new_ptr = (unsigned char) value; value >>= 8;
2243 *--new_ptr = (unsigned char) value;
2244 }
2245 if (reloc->kind > BLOCK_START_RELOC)
2246 {
2247 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2248 --old_ptr;
2249 *--new_ptr = reloc->kind;
2250 }
2251 else if (reloc->kind < -1)
2252 {
2253 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2254 --old_ptr;
2255 *--new_ptr = OPCODE_goto_w;
2256 *--new_ptr = 3;
2257 *--new_ptr = 0;
2258 *--new_ptr = - reloc->kind;
2259 }
2260 }
2261 if (new_ptr != chunk->data)
2262 fatal ("internal error - perform_relocations");
2263 }
2264 state->code_length = pc;
2265 }
2266
2267 void
2268 init_jcf_state (state, work)
2269 struct jcf_partial *state;
2270 struct obstack *work;
2271 {
2272 state->chunk_obstack = work;
2273 state->first = state->chunk = NULL;
2274 CPOOL_INIT (&state->cpool);
2275 BUFFER_INIT (&state->localvars);
2276 BUFFER_INIT (&state->bytecode);
2277 }
2278
2279 void
2280 init_jcf_method (state, method)
2281 struct jcf_partial *state;
2282 tree method;
2283 {
2284 state->current_method = method;
2285 state->blocks = state->last_block = NULL;
2286 state->linenumber_count = 0;
2287 state->first_lvar = state->last_lvar = NULL;
2288 state->lvar_count = 0;
2289 state->labeled_blocks = NULL;
2290 state->code_length = 0;
2291 BUFFER_RESET (&state->bytecode);
2292 BUFFER_RESET (&state->localvars);
2293 state->code_SP = 0;
2294 state->code_SP_max = 0;
2295 state->handlers = NULL;
2296 state->last_handler = NULL;
2297 state->num_handlers = 0;
2298 }
2299
2300 void
2301 release_jcf_state (state)
2302 struct jcf_partial *state;
2303 {
2304 CPOOL_FINISH (&state->cpool);
2305 obstack_free (state->chunk_obstack, state->first);
2306 }
2307
2308 /* Generate and return a list of chunks containing the class CLAS
2309 in the .class file representation. The list can be written to a
2310 .class file using write_chunks. Allocate chunks from obstack WORK. */
2311
2312 struct chunk *
2313 generate_classfile (clas, state)
2314 tree clas;
2315 struct jcf_partial *state;
2316 {
2317 struct chunk *cpool_chunk;
2318 char *source_file;
2319 char *ptr;
2320 int i;
2321 char *fields_count_ptr;
2322 int fields_count = 0;
2323 char *methods_count_ptr;
2324 int methods_count = 0;
2325 static tree SourceFile_node = NULL_TREE;
2326 tree part;
2327 int total_supers
2328 = clas == object_type_node ? 0
2329 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2330
2331 ptr = append_chunk (NULL, 8, state);
2332 PUT4 (0xCafeBabe); /* Magic number */
2333 PUT2 (3); /* Minor version */
2334 PUT2 (45); /* Major version */
2335
2336 append_chunk (NULL, 0, state);
2337 cpool_chunk = state->chunk;
2338
2339 /* Next allocate the chunk containing acces_flags through fields_counr. */
2340 if (clas == object_type_node)
2341 i = 10;
2342 else
2343 i = 8 + 2 * total_supers;
2344 ptr = append_chunk (NULL, i, state);
2345 i = get_access_flags (TYPE_NAME (clas)); PUT2 (i); /* acces_flags */
2346 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2347 if (clas == object_type_node)
2348 {
2349 PUT2(0); /* super_class */
2350 PUT2(0); /* interfaces_count */
2351 }
2352 else
2353 {
2354 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2355 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2356 int j = find_class_constant (&state->cpool, base);
2357 PUT2 (j); /* super_class */
2358 PUT2 (total_supers - 1); /* interfaces_count */
2359 for (i = 1; i < total_supers; i++)
2360 {
2361 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2362 j = find_class_constant (&state->cpool, base);
2363 PUT2 (j);
2364 }
2365 }
2366 fields_count_ptr = ptr;
2367
2368 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2369 {
2370 if (DECL_NAME (part) == NULL_TREE)
2371 continue;
2372 ptr = append_chunk (NULL, 8, state);
2373 i = get_access_flags (part); PUT2 (i);
2374 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2375 i = find_utf8_constant (&state->cpool, build_java_signature (TREE_TYPE (part)));
2376 PUT2(i);
2377 PUT2 (0); /* attributes_count */
2378 /* FIXME - emit ConstantValue attribute when appropriate */
2379 fields_count++;
2380 }
2381 ptr = fields_count_ptr; PUT2 (fields_count);
2382
2383 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2384 PUT2 (0);
2385
2386 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2387 {
2388 struct jcf_block *block;
2389 tree function_body = DECL_FUNCTION_BODY (part);
2390 tree body = function_body == NULL_TREE ? NULL_TREE
2391 : BLOCK_EXPR_BODY (function_body);
2392 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2393 : DECL_NAME (part);
2394 tree type = TREE_TYPE (part);
2395 ptr = append_chunk (NULL, 8, state);
2396 i = get_access_flags (part); PUT2 (i);
2397 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2398 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2399 PUT2 (i);
2400 PUT2 (body != NULL_TREE ? 1 : 0); /* attributes_count */
2401 if (body != NULL_TREE)
2402 {
2403 int code_attributes_count = 0;
2404 static tree Code_node = NULL_TREE;
2405 tree t;
2406 char *attr_len_ptr;
2407 struct jcf_handler *handler;
2408 if (Code_node == NULL_TREE)
2409 Code_node = get_identifier ("Code");
2410 ptr = append_chunk (NULL, 14, state);
2411 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2412 attr_len_ptr = ptr;
2413 init_jcf_method (state, part);
2414 get_jcf_label_here (state); /* Force a first block. */
2415 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2416 localvar_alloc (t, state);
2417 generate_bytecode_insns (body, IGNORE_TARGET, state);
2418 if (CAN_COMPLETE_NORMALLY (body))
2419 {
2420 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2421 abort();
2422 RESERVE (1);
2423 OP1 (OPCODE_return);
2424 }
2425 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2426 localvar_free (t, state);
2427 finish_jcf_block (state);
2428 perform_relocations (state);
2429
2430 ptr = attr_len_ptr;
2431 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2432 if (state->linenumber_count > 0)
2433 {
2434 code_attributes_count++;
2435 i += 8 + 4 * state->linenumber_count;
2436 }
2437 if (state->lvar_count > 0)
2438 {
2439 code_attributes_count++;
2440 i += 8 + 10 * state->lvar_count;
2441 }
2442 PUT4 (i); /* attribute_length */
2443 PUT2 (state->code_SP_max); /* max_stack */
2444 PUT2 (localvar_max); /* max_locals */
2445 PUT4 (state->code_length);
2446
2447 /* Emit the exception table. */
2448 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2449 PUT2 (state->num_handlers); /* exception_table_length */
2450 handler = state->handlers;
2451 for (; handler != NULL; handler = handler->next)
2452 {
2453 int type_index;
2454 PUT2 (handler->start_label->pc);
2455 PUT2 (handler->end_label->pc);
2456 PUT2 (handler->handler_label->pc);
2457 if (handler->type == NULL_TREE)
2458 type_index = 0;
2459 else
2460 type_index = find_class_constant (&state->cpool,
2461 handler->type);
2462 PUT2 (type_index);
2463 }
2464
2465 ptr = append_chunk (NULL, 2, state);
2466 PUT2 (code_attributes_count);
2467
2468 /* Write the LineNumberTable attribute. */
2469 if (state->linenumber_count > 0)
2470 {
2471 static tree LineNumberTable_node = NULL_TREE;
2472 ptr = append_chunk (NULL, 8 + 4 * state->linenumber_count, state);
2473 if (LineNumberTable_node == NULL_TREE)
2474 LineNumberTable_node = get_identifier ("LineNumberTable");
2475 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
2476 PUT2 (i); /* attribute_name_index */
2477 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
2478 i = state->linenumber_count; PUT2 (i);
2479 for (block = state->blocks; block != NULL; block = block->next)
2480 {
2481 int line = block->linenumber;
2482 if (line > 0)
2483 {
2484 PUT2 (block->pc);
2485 PUT2 (line);
2486 }
2487 }
2488 }
2489
2490 /* Write the LocalVariableTable attribute. */
2491 if (state->lvar_count > 0)
2492 {
2493 static tree LocalVariableTable_node = NULL_TREE;
2494 struct localvar_info *lvar = state->first_lvar;
2495 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
2496 if (LocalVariableTable_node == NULL_TREE)
2497 LocalVariableTable_node = get_identifier("LocalVariableTable");
2498 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
2499 PUT2 (i); /* attribute_name_index */
2500 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
2501 i = state->lvar_count; PUT2 (i);
2502 for ( ; lvar != NULL; lvar = lvar->next)
2503 {
2504 tree name = DECL_NAME (lvar->decl);
2505 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
2506 i = lvar->start_label->pc; PUT2 (i);
2507 i = lvar->end_label->pc - i; PUT2 (i);
2508 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2509 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
2510 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
2511 }
2512 }
2513 }
2514 methods_count++;
2515 }
2516 ptr = methods_count_ptr; PUT2 (methods_count);
2517
2518 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
2519 for (ptr = source_file; ; ptr++)
2520 {
2521 char ch = *ptr;
2522 if (ch == '\0')
2523 break;
2524 if (ch == '/' || ch == '\\')
2525 source_file = ptr+1;
2526 }
2527 ptr = append_chunk (NULL, 10, state);
2528 PUT2 (1); /* attributes_count */
2529
2530 /* generate the SourceFile attribute. */
2531 if (SourceFile_node == NULL_TREE)
2532 SourceFile_node = get_identifier ("SourceFile");
2533 i = find_utf8_constant (&state->cpool, SourceFile_node);
2534 PUT2 (i); /* attribute_name_index */
2535 PUT4 (2);
2536 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
2537 PUT2 (i);
2538
2539 /* New finally generate the contents of the constant pool chunk. */
2540 i = count_constant_pool_bytes (&state->cpool);
2541 ptr = obstack_alloc (state->chunk_obstack, i);
2542 cpool_chunk->data = ptr;
2543 cpool_chunk->size = i;
2544 write_constant_pool (&state->cpool, ptr, i);
2545 return state->first;
2546 }
2547
2548 static char *
2549 make_class_file_name (clas)
2550 tree clas;
2551 {
2552 char *cname, *dname, *slash, *r;
2553 struct stat sb;
2554
2555 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
2556 "", '.', DIR_SEPARATOR,
2557 ".class"));
2558 if (jcf_write_base_directory == NULL)
2559 {
2560 /* Make sure we put the class file into the .java file's
2561 directory, and not into some subdirectory thereof. */
2562 char *t;
2563 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
2564 slash = strrchr (dname, DIR_SEPARATOR);
2565 if (! slash)
2566 {
2567 dname = ".";
2568 slash = dname + 1;
2569 }
2570 t = strrchr (cname, DIR_SEPARATOR);
2571 if (t)
2572 cname = t + 1;
2573 }
2574 else
2575 {
2576 dname = jcf_write_base_directory;
2577 slash = dname + strlen (dname);
2578 }
2579
2580 r = xmalloc (slash - dname + strlen (cname) + 2);
2581 strncpy (r, dname, slash - dname);
2582 r[slash - dname] = DIR_SEPARATOR;
2583 strcpy (&r[slash - dname + 1], cname);
2584
2585 /* We try to make new directories when we need them. We only do
2586 this for directories which "might not" exist. For instance, we
2587 assume the `-d' directory exists, but we don't assume that any
2588 subdirectory below it exists. It might be worthwhile to keep
2589 track of which directories we've created to avoid gratuitous
2590 stat()s. */
2591 dname = r + (slash - dname) + 1;
2592 while (1)
2593 {
2594 cname = strchr (dname, DIR_SEPARATOR);
2595 if (cname == NULL)
2596 break;
2597 *cname = '\0';
2598 if (stat (r, &sb) == -1)
2599 {
2600 /* Try to make it. */
2601 if (mkdir (r, 0755) == -1)
2602 {
2603 fatal ("failed to create directory `%s'", r);
2604 free (r);
2605 return NULL;
2606 }
2607 }
2608 *cname = DIR_SEPARATOR;
2609 /* Skip consecutive separators. */
2610 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
2611 ;
2612 }
2613
2614 return r;
2615 }
2616
2617 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
2618 The output .class file name is make_class_file_name(CLAS). */
2619
2620 void
2621 write_classfile (clas)
2622 tree clas;
2623 {
2624 struct obstack *work = &temporary_obstack;
2625 struct jcf_partial state[1];
2626 char *class_file_name = make_class_file_name (clas);
2627 struct chunk *chunks;
2628
2629 if (class_file_name != NULL)
2630 {
2631 FILE* stream = fopen (class_file_name, "wb");
2632 if (stream == NULL)
2633 fatal ("failed to open `%s' for writing", class_file_name);
2634 jcf_dependency_add_target (class_file_name);
2635 init_jcf_state (state, work);
2636 chunks = generate_classfile (clas, state);
2637 write_chunks (stream, chunks);
2638 if (fclose (stream))
2639 fatal ("failed to close after writing `%s'", class_file_name);
2640 free (class_file_name);
2641 }
2642 release_jcf_state (state);
2643 }
2644
2645 /* TODO:
2646 string concatenation
2647 synchronized statement
2648 */
This page took 0.155835 seconds and 6 git commands to generate.