]> gcc.gnu.org Git - gcc.git/blob - gcc/java/verify.c
expr.c (process_jvm_instruction): Do load_type_state after JSR.
[gcc.git] / gcc / java / verify.c
1 /* Handle verification of bytecoded methods for the GNU compiler for
2 the Java(TM) language.
3 Copyright (C) 1997 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
21
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "tree.h"
29 #include "java-tree.h"
30 #include "javaop.h"
31 #include "java-opcodes.h"
32 #include "jcf.h"
33 #include "java-except.h"
34 #include "toplev.h"
35
36 extern int stack_pointer;
37
38 /* During verification, start of the current subroutine (jsr target). */
39 tree current_subr;
40
41 /* A list of pending blocks, chained using LABEL_PENDING_CHAIN.
42 A pending block is one that has LABEL_CHANGED set, which means
43 it requires (re-) verification. */
44 tree pending_blocks;
45
46 /* Append TARGET_LABEL to the pending_block stack unless already in it. */
47
48 void
49 push_pending_label (target_label)
50 tree target_label;
51 {
52 if (! LABEL_CHANGED (target_label))
53 {
54 LABEL_PENDING_CHAIN (target_label) = pending_blocks;
55 pending_blocks = target_label;
56 LABEL_CHANGED (target_label) = 1;
57 }
58 }
59
60 /* Note that TARGET_LABEL is a possible successor instruction.
61 Merge the type state etc.
62 Return NULL on sucess, or an error message on failure. */
63
64 static char *
65 check_pending_block (target_label)
66 tree target_label;
67 {
68 int changed = merge_type_state (target_label);
69
70 if (changed)
71 {
72 if (changed < 0)
73 return "types could not be merged";
74 push_pending_label (target_label);
75 }
76
77 if (current_subr == NULL)
78 {
79 if (LABEL_IN_SUBR (target_label))
80 return "might transfer control into subroutine";
81 }
82 else
83 {
84 if (LABEL_IN_SUBR (target_label))
85 {
86 if (LABEL_SUBR_START (target_label) != current_subr)
87 return "transfer out of subroutine";
88 }
89 else if (! LABEL_VERIFIED (target_label))
90 {
91 LABEL_IN_SUBR (target_label) = 1;
92 LABEL_SUBR_START (target_label) = current_subr;
93 }
94 else
95 return "transfer out of subroutine";
96 }
97 return NULL;
98 }
99
100 /* Return the "merged" types of TYPE1 and TYPE2.
101 If either is primitive, the other must match (after promotion to int).
102 For reference types, return the common super-class.
103 Return TYPE_UNKNOWN if the types cannot be merged. */
104
105 tree
106 merge_types (type1, type2)
107 tree type1, type2;
108 {
109 if (type1 == type2)
110 return type1;
111 if (type1 == TYPE_UNKNOWN || type2 == TYPE_UNKNOWN
112 || type1 == TYPE_RETURN_ADDR || type2 == TYPE_RETURN_ADDR)
113 return TYPE_UNKNOWN;
114 if (TREE_CODE (type1) == POINTER_TYPE && TREE_CODE (type2) == POINTER_TYPE)
115 {
116 int depth1, depth2;
117 tree tt1, tt2;
118 /* ptr_type_node is only used for a null reference,
119 which is compatible with any reference type. */
120 if (type1 == ptr_type_node || type2 == object_ptr_type_node)
121 return type2;
122 if (type2 == ptr_type_node || type1 == object_ptr_type_node)
123 return type1;
124
125 tt1 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type1));
126 tt2 = HANDLE_TO_CLASS_TYPE (TREE_TYPE (type2));
127
128 if (TYPE_ARRAY_P (tt1) || TYPE_ARRAY_P (tt2))
129 {
130 if (TYPE_ARRAY_P (tt1) == TYPE_ARRAY_P (tt2))
131 {
132 tree el_type1 = TYPE_ARRAY_ELEMENT (tt1);
133 tree el_type2 = TYPE_ARRAY_ELEMENT (tt2);
134 tree el_type = NULL_TREE;
135 if (el_type1 == el_type2)
136 el_type = el_type1;
137 else if (TREE_CODE (el_type1) == POINTER_TYPE
138 && TREE_CODE (el_type2) == POINTER_TYPE)
139 el_type = merge_types (el_type1, el_type2);
140 if (el_type != NULL_TREE)
141 {
142 HOST_WIDE_INT len1 = java_array_type_length (tt1);
143 HOST_WIDE_INT len2 = java_array_type_length (tt2);
144 if (len1 != len2)
145 len1 = -1;
146 else if (el_type1 == el_type2)
147 return type1;
148 return promote_type (build_java_array_type (el_type, len1));
149 }
150 }
151 return object_ptr_type_node;
152 }
153
154 if (CLASS_INTERFACE (TYPE_NAME (tt1)))
155 {
156 if (CLASS_INTERFACE (TYPE_NAME (tt2)))
157 {
158 /* This is a kludge, but matches what Sun's verifier does.
159 It can be tricked, but is safe as long as type errors
160 (i.e. interface method calls) are caught at run-time. */
161 return object_ptr_type_node;
162 }
163 else
164 {
165 if (can_widen_reference_to (tt2, tt1))
166 return type1;
167 else
168 return TYPE_UNKNOWN;
169 }
170 }
171 else if (CLASS_INTERFACE (TYPE_NAME (tt2)))
172 {
173 if (can_widen_reference_to (tt1, tt2))
174 return type2;
175 else
176 return TYPE_UNKNOWN;
177 }
178
179 type1 = tt1;
180 type2 = tt2;
181
182 depth1 = class_depth (type1);
183 depth2 = class_depth (type2);
184 for ( ; depth1 > depth2; depth1--)
185 type1 = TYPE_BINFO_BASETYPE (type1, 0);
186 for ( ; depth2 > depth1; depth2--)
187 type2 = TYPE_BINFO_BASETYPE (type2, 0);
188 while (type1 != type2)
189 {
190 type1 = TYPE_BINFO_BASETYPE (type1, 0);
191 type2 = TYPE_BINFO_BASETYPE (type2, 0);
192 }
193 return promote_type (type1);
194 }
195 if (INTEGRAL_TYPE_P (type1) && INTEGRAL_TYPE_P (type2)
196 && TYPE_PRECISION (type1) <= 32 && TYPE_PRECISION (type2) <= 32)
197 return int_type_node;
198 return TYPE_UNKNOWN;
199 }
200
201 /* Merge the current type state with that at LABEL.
202 Return -1 the the states are incompatible (i.e. on error),
203 0 if there was no change, and 1 if there was a change. */
204
205 int
206 merge_type_state (label)
207 tree label;
208 {
209 int nlocals = DECL_MAX_LOCALS(current_function_decl);
210 int cur_length = stack_pointer + nlocals;
211 tree vec = LABEL_TYPE_STATE (label);
212 tree return_map;
213 if (vec == NULL_TREE)
214 {
215 vec = make_tree_vec (cur_length);
216 LABEL_TYPE_STATE (label) = vec;
217 while (--cur_length >= 0)
218 TREE_VEC_ELT (vec, cur_length) = type_map [cur_length];
219 return 1;
220 }
221 else
222 {
223 int i;
224 int changed = 0;
225 if (LABEL_IS_SUBR_START (label) && LABEL_VERIFIED (label)
226 && current_subr != label)
227 return_map = LABEL_RETURN_TYPE_STATE (label);
228 else
229 return_map = NULL_TREE;
230 if (TREE_VEC_LENGTH (vec) != cur_length)
231 {
232 return -1;
233 }
234 for (i = 0; i < cur_length; i++)
235 {
236 tree old_type = TREE_VEC_ELT (vec, i);
237 tree new_type = merge_types (old_type, type_map [i]);
238 if (TREE_VEC_ELT (vec, i) != new_type)
239 {
240 /* If there has been a change, note that since we must re-verify.
241 However, if the label is the start of a subroutine,
242 we don't care about local variables that are neither
243 set nor used in the sub-routine. */
244 if (return_map == NULL_TREE || i >= nlocals
245 || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED
246 || (TYPE_IS_WIDE (new_type)
247 && TREE_VEC_ELT (return_map, i+1) != TYPE_UNUSED))
248 changed = 1;
249 }
250 TREE_VEC_ELT (vec, i) = new_type;
251 if (new_type == TYPE_UNKNOWN)
252 {
253 if (i >= nlocals)
254 return -1;
255 }
256 else if (TYPE_IS_WIDE (new_type))
257 i++;
258 }
259 return changed;
260 }
261 }
262
263 /* Handle dup-like operations. */
264
265 static void
266 type_stack_dup (size, offset)
267 int size, offset;
268 {
269 tree type[4];
270 int index;
271 if (size + offset > stack_pointer)
272 error ("stack underflow - dup* operation");
273 for (index = 0; index < size + offset; index++)
274 {
275 type[index] = stack_type_map[stack_pointer - 1];
276 if (type[index] == void_type_node)
277 {
278 index++;
279 type[index] = stack_type_map[stack_pointer - 2];
280 if (! TYPE_IS_WIDE (type[index]))
281 fatal ("internal error - dup operation");
282 if (index == size || index == size + offset)
283 fatal ("dup operation splits 64-bit number");
284 }
285 pop_type (type[index]);
286 }
287 for (index = size; --index >= 0; )
288 {
289 if (type[index] != void_type_node)
290 push_type (type[index]);
291 }
292
293 for (index = size + offset; --index >= 0; )
294 {
295 if (type[index] != void_type_node)
296 push_type (type[index]);
297 }
298 }
299
300 /* This causes the next iteration to ignore the next instruction
301 and look for some other unhandled instruction. */
302 #define INVALIDATE_PC (prevpc = -1, oldpc = PC, PC = INVALID_PC)
303 #define INVALID_PC (-1)
304
305 #define VERIFICATION_ERROR(MESSAGE) \
306 do { message = MESSAGE; goto verify_error; } while (0)
307
308 #define PUSH_PENDING(LABEL) \
309 do { if ((message = check_pending_block (LABEL)) != NULL) \
310 goto verify_error; } while (0)
311
312 #ifdef __GNUC__
313 #define CHECK_PC_IN_RANGE(PC) ({if (PC < 0 || PC > length) goto bad_pc; 1;})
314 #else
315 #define CHECK_PC_IN_RANGE(PC) (PC < 0 || PC > length ? \
316 (fatal("Bad byte codes.\n"), 0) : 1)
317 #endif
318
319 #define BCODE byte_ops
320
321 /* Verify the bytecodes of the current method.
322 Return 1 on sucess, 0 on failure. */
323 int
324 verify_jvm_instructions (jcf, byte_ops, length)
325 JCF* jcf;
326 unsigned char* byte_ops;
327 long length;
328 {
329 tree label;
330 int wide = 0;
331 int op_code;
332 int PC;
333 int oldpc; /* PC of start of instruction. */
334 int prevpc; /* If >= 0, PC of previous instruction. */
335 char *message;
336 int i;
337 register unsigned char *p;
338 struct eh_range *prev_eh_ranges = NULL_EH_RANGE;
339 struct eh_range *eh_ranges;
340
341 jint int_value = -1;
342
343 pending_blocks = NULL_TREE;
344
345 /* Handle the exception table. */
346 method_init_exceptions ();
347 JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length);
348 i = JCF_readu2 (jcf);
349
350 /* We read the exception backwards. */
351 p = jcf->read_ptr + 8 * i;
352 while (--i >= 0)
353 {
354 int start_pc = GET_u2 (p-8);
355 int end_pc = GET_u2 (p-6);
356 int handler_pc = GET_u2 (p-4);
357 int catch_type = GET_u2 (p-2);
358 p -= 8;
359
360 if (start_pc < 0 || start_pc >= length
361 || end_pc < 0 || end_pc > length || start_pc >= end_pc
362 || handler_pc < 0 || handler_pc >= length
363 || (handler_pc >= start_pc && handler_pc < end_pc)
364 || ! (instruction_bits [start_pc] & BCODE_INSTRUCTION_START)
365 || ! (instruction_bits [end_pc] & BCODE_INSTRUCTION_START)
366 || ! (instruction_bits [handler_pc] & BCODE_INSTRUCTION_START))
367 {
368 error ("bad pc in exception_table");
369 return 0;
370 }
371
372 if (! add_handler (start_pc, end_pc,
373 lookup_label (handler_pc),
374 catch_type == 0 ? NULL_TREE
375 : get_class_constant (jcf, catch_type)))
376 {
377 error ("overlapping exception ranges are not supported");
378 return 0;
379 }
380
381 instruction_bits [handler_pc] |= BCODE_EXCEPTION_TARGET;
382 }
383
384 for (PC = 0;;)
385 {
386 int index;
387 tree type, tmp;
388 if (((PC != INVALID_PC
389 && instruction_bits [PC] & BCODE_TARGET) != 0)
390 || PC == 0)
391 {
392 PUSH_PENDING (lookup_label (PC));
393 INVALIDATE_PC;
394 }
395 if (PC == INVALID_PC)
396 {
397 label = pending_blocks;
398 if (label == NULL_TREE)
399 break; /* We're done! */
400 pending_blocks = LABEL_PENDING_CHAIN (label);
401 LABEL_CHANGED (label) = 0;
402
403 if (LABEL_IN_SUBR (label))
404 current_subr = LABEL_SUBR_START (label);
405 else
406 current_subr = NULL_TREE;
407
408 /* Restore type_map and stack_pointer from
409 LABEL_TYPE_STATE (label), and continue
410 compiling from there. */
411 load_type_state (label);
412 PC = LABEL_PC (label);
413 }
414 else if (PC >= length)
415 VERIFICATION_ERROR ("falling through end of method");
416
417 oldpc = PC;
418
419 if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide)
420 VERIFICATION_ERROR ("PC not at instruction start");
421
422 instruction_bits[PC] |= BCODE_VERIFIED;
423
424 eh_ranges = find_handler (oldpc);
425
426 op_code = byte_ops[PC++];
427 switch (op_code)
428 {
429 int is_static, is_putting;
430 case OPCODE_nop:
431 break;
432 case OPCODE_iconst_m1:
433 case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2:
434 case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5:
435 i = op_code - OPCODE_iconst_0;
436 goto push_int;
437 push_int:
438 if (byte_ops[PC] == OPCODE_newarray
439 || byte_ops[PC] == OPCODE_newarray)
440 int_value = i;
441 push_type (int_type_node); break;
442 case OPCODE_lconst_0: case OPCODE_lconst_1:
443 push_type (long_type_node); break;
444 case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2:
445 push_type (float_type_node); break;
446 case OPCODE_dconst_0: case OPCODE_dconst_1:
447 push_type (double_type_node); break;
448 case OPCODE_bipush:
449 i = IMMEDIATE_s1;
450 goto push_int;
451 case OPCODE_sipush:
452 i = IMMEDIATE_s2;
453 goto push_int;
454 case OPCODE_iload: type = int_type_node; goto general_load;
455 case OPCODE_lload: type = long_type_node; goto general_load;
456 case OPCODE_fload: type = float_type_node; goto general_load;
457 case OPCODE_dload: type = double_type_node; goto general_load;
458 case OPCODE_aload: type = ptr_type_node; goto general_load;
459 general_load:
460 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
461 wide = 0;
462 goto load;
463 case OPCODE_iload_0: type = int_type_node; index = 0; goto load;
464 case OPCODE_iload_1: type = int_type_node; index = 1; goto load;
465 case OPCODE_iload_2: type = int_type_node; index = 2; goto load;
466 case OPCODE_iload_3: type = int_type_node; index = 3; goto load;
467 case OPCODE_lload_0: type = long_type_node; index = 0; goto load;
468 case OPCODE_lload_1: type = long_type_node; index = 1; goto load;
469 case OPCODE_lload_2: type = long_type_node; index = 2; goto load;
470 case OPCODE_lload_3: type = long_type_node; index = 3; goto load;
471 case OPCODE_fload_0: type = float_type_node; index = 0; goto load;
472 case OPCODE_fload_1: type = float_type_node; index = 1; goto load;
473 case OPCODE_fload_2: type = float_type_node; index = 2; goto load;
474 case OPCODE_fload_3: type = float_type_node; index = 3; goto load;
475 case OPCODE_dload_0: type = double_type_node; index = 0; goto load;
476 case OPCODE_dload_1: type = double_type_node; index = 1; goto load;
477 case OPCODE_dload_2: type = double_type_node; index = 2; goto load;
478 case OPCODE_dload_3: type = double_type_node; index = 3; goto load;
479 case OPCODE_aload_0: type = ptr_type_node; index = 0; goto load;
480 case OPCODE_aload_1: type = ptr_type_node; index = 1; goto load;
481 case OPCODE_aload_2: type = ptr_type_node; index = 2; goto load;
482 case OPCODE_aload_3: type = ptr_type_node; index = 3; goto load;
483 load:
484 if (index < 0
485 || (index + TYPE_IS_WIDE (type)
486 >= DECL_MAX_LOCALS (current_function_decl)))
487 VERIFICATION_ERROR ("invalid local variable index in load");
488 tmp = type_map[index];
489 if (tmp == TYPE_UNKNOWN || tmp == TYPE_SECOND
490 || (TYPE_IS_WIDE (type)
491 && type_map[index+1] != void_type_node)
492 || (type == ptr_type_node
493 ? TREE_CODE (tmp) != POINTER_TYPE
494 : type == int_type_node
495 ? (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
496 : type != tmp))
497 VERIFICATION_ERROR("invalid local variable type in load");
498 push_type (tmp);
499 goto note_used;
500 case OPCODE_istore: type = int_type_node; goto general_store;
501 case OPCODE_lstore: type = long_type_node; goto general_store;
502 case OPCODE_fstore: type = float_type_node; goto general_store;
503 case OPCODE_dstore: type = double_type_node; goto general_store;
504 case OPCODE_astore: type = ptr_type_node; goto general_store;
505 general_store:
506 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
507 wide = 0;
508 goto store;
509 case OPCODE_istore_0: type = int_type_node; index = 0; goto store;
510 case OPCODE_istore_1: type = int_type_node; index = 1; goto store;
511 case OPCODE_istore_2: type = int_type_node; index = 2; goto store;
512 case OPCODE_istore_3: type = int_type_node; index = 3; goto store;
513 case OPCODE_lstore_0: type = long_type_node; index=0; goto store;
514 case OPCODE_lstore_1: type = long_type_node; index=1; goto store;
515 case OPCODE_lstore_2: type = long_type_node; index=2; goto store;
516 case OPCODE_lstore_3: type = long_type_node; index=3; goto store;
517 case OPCODE_fstore_0: type=float_type_node; index=0; goto store;
518 case OPCODE_fstore_1: type=float_type_node; index=1; goto store;
519 case OPCODE_fstore_2: type=float_type_node; index=2; goto store;
520 case OPCODE_fstore_3: type=float_type_node; index=3; goto store;
521 case OPCODE_dstore_0: type=double_type_node; index=0; goto store;
522 case OPCODE_dstore_1: type=double_type_node; index=1; goto store;
523 case OPCODE_dstore_2: type=double_type_node; index=2; goto store;
524 case OPCODE_dstore_3: type=double_type_node; index=3; goto store;
525 case OPCODE_astore_0: type = ptr_type_node; index = 0; goto store;
526 case OPCODE_astore_1: type = ptr_type_node; index = 1; goto store;
527 case OPCODE_astore_2: type = ptr_type_node; index = 2; goto store;
528 case OPCODE_astore_3: type = ptr_type_node; index = 3; goto store;
529 store:
530 if (index < 0
531 || (index + TYPE_IS_WIDE (type)
532 >= DECL_MAX_LOCALS (current_function_decl)))
533 {
534 VERIFICATION_ERROR ("invalid local variable index in store");
535 return 0;
536 }
537 type = pop_type (type);
538 type_map[index] = type;
539
540 /* If local variable changed, we need to reconsider eh handlers. */
541 prev_eh_ranges = NULL_EH_RANGE;
542
543 /* Allocate decl and rtx for this variable now, so if we're not
544 optmizing, we get a temporary that survives the whole method. */
545 find_local_variable (index, type, oldpc);
546
547 if (TYPE_IS_WIDE (type))
548 type_map[index+1] = TYPE_SECOND;
549 /* ... fall through to note_used ... */
550 note_used:
551 /* For store or load, note that local variable INDEX is used.
552 This is needed to verify try-finally sub-routines. */
553 if (current_subr)
554 {
555 tree vec = LABEL_RETURN_TYPE_STATE (current_subr);
556 tree subr_vec = LABEL_TYPE_STATE (current_subr);
557 int len = 1 + TYPE_IS_WIDE (type);
558 while (--len >= 0)
559 {
560 if (TREE_VEC_ELT (vec, index) == TYPE_UNUSED)
561 TREE_VEC_ELT (vec, index) = TREE_VEC_ELT (subr_vec, index);
562 }
563 }
564 break;
565 case OPCODE_iadd:
566 case OPCODE_iand:
567 case OPCODE_idiv:
568 case OPCODE_imul:
569 case OPCODE_ior:
570 case OPCODE_irem:
571 case OPCODE_ishl:
572 case OPCODE_ishr:
573 case OPCODE_isub:
574 case OPCODE_iushr:
575 case OPCODE_ixor:
576 type = int_type_node; goto binop;
577 case OPCODE_ineg:
578 case OPCODE_i2c:
579 case OPCODE_i2b:
580 case OPCODE_i2s:
581 type = int_type_node; goto unop;
582 case OPCODE_ladd:
583 case OPCODE_land:
584 case OPCODE_ldiv:
585 case OPCODE_lsub:
586 case OPCODE_lmul:
587 case OPCODE_lrem:
588 case OPCODE_lor:
589 case OPCODE_lxor:
590 type = long_type_node; goto binop;
591 case OPCODE_lneg:
592 type = long_type_node; goto unop;
593 case OPCODE_fadd: case OPCODE_fsub:
594 case OPCODE_fmul: case OPCODE_fdiv: case OPCODE_frem:
595 type = float_type_node; goto binop;
596 case OPCODE_fneg:
597 type = float_type_node; goto unop;
598 case OPCODE_dadd: case OPCODE_dsub:
599 case OPCODE_dmul: case OPCODE_ddiv: case OPCODE_drem:
600 type = double_type_node; goto binop;
601 case OPCODE_dneg:
602 type = double_type_node; goto unop;
603 unop:
604 pop_type (type);
605 push_type (type);
606 break;
607 binop:
608 pop_type (type);
609 pop_type (type);
610 push_type (type);
611 break;
612 case OPCODE_lshl:
613 case OPCODE_lshr:
614 case OPCODE_lushr:
615 pop_type (int_type_node);
616 pop_type (long_type_node);
617 push_type (long_type_node);
618 break;
619 case OPCODE_iinc:
620 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
621 PC += wide + 1;
622 wide = 0;
623 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl))
624 VERIFICATION_ERROR ("invalid local variable index in iinc");
625 tmp = type_map[index];
626 if (! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32)
627 VERIFICATION_ERROR ("invalid local variable type in iinc");
628 break;
629 case OPCODE_i2l:
630 pop_type (int_type_node); push_type (long_type_node); break;
631 case OPCODE_i2f:
632 pop_type (int_type_node); push_type (float_type_node); break;
633 case OPCODE_i2d:
634 pop_type (int_type_node); push_type (double_type_node); break;
635 case OPCODE_l2i:
636 pop_type (long_type_node); push_type (int_type_node); break;
637 case OPCODE_l2f:
638 pop_type (long_type_node); push_type (float_type_node); break;
639 case OPCODE_l2d:
640 pop_type (long_type_node); push_type (double_type_node); break;
641 case OPCODE_f2i:
642 pop_type (float_type_node); push_type (int_type_node); break;
643 case OPCODE_f2l:
644 pop_type (float_type_node); push_type (long_type_node); break;
645 case OPCODE_f2d:
646 pop_type (float_type_node); push_type (double_type_node); break;
647 case OPCODE_d2i:
648 pop_type (double_type_node); push_type (int_type_node); break;
649 case OPCODE_d2l:
650 pop_type (double_type_node); push_type (long_type_node); break;
651 case OPCODE_d2f:
652 pop_type (double_type_node); push_type (float_type_node); break;
653 case OPCODE_lcmp:
654 type = long_type_node; goto compare;
655 case OPCODE_fcmpl:
656 case OPCODE_fcmpg:
657 type = float_type_node; goto compare;
658 case OPCODE_dcmpl:
659 case OPCODE_dcmpg:
660 type = double_type_node; goto compare;
661 compare:
662 pop_type (type); pop_type (type);
663 push_type (int_type_node); break;
664 case OPCODE_ifeq:
665 case OPCODE_ifne:
666 case OPCODE_iflt:
667 case OPCODE_ifge:
668 case OPCODE_ifgt:
669 case OPCODE_ifle:
670 pop_type (int_type_node); goto cond;
671 case OPCODE_ifnull:
672 case OPCODE_ifnonnull:
673 pop_type (ptr_type_node ); goto cond;
674 case OPCODE_if_icmpeq:
675 case OPCODE_if_icmpne:
676 case OPCODE_if_icmplt:
677 case OPCODE_if_icmpge:
678 case OPCODE_if_icmpgt:
679 case OPCODE_if_icmple:
680 pop_type (int_type_node); pop_type (int_type_node); goto cond;
681 case OPCODE_if_acmpeq:
682 case OPCODE_if_acmpne:
683 pop_type (object_ptr_type_node); pop_type (object_ptr_type_node);
684 goto cond;
685 cond:
686 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
687 break;
688 case OPCODE_goto:
689 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2));
690 INVALIDATE_PC;
691 break;
692 case OPCODE_wide:
693 switch (byte_ops[PC])
694 {
695 case OPCODE_iload: case OPCODE_lload:
696 case OPCODE_fload: case OPCODE_dload: case OPCODE_aload:
697 case OPCODE_istore: case OPCODE_lstore:
698 case OPCODE_fstore: case OPCODE_dstore: case OPCODE_astore:
699 case OPCODE_iinc:
700 case OPCODE_ret:
701 wide = 1;
702 break;
703 default:
704 VERIFICATION_ERROR ("invalid use of wide instruction");
705 }
706 break;
707 case OPCODE_ireturn: type = int_type_node; goto ret;
708 case OPCODE_lreturn: type = long_type_node; goto ret;
709 case OPCODE_freturn: type = float_type_node; goto ret;
710 case OPCODE_dreturn: type = double_type_node; goto ret;
711 case OPCODE_areturn: type = ptr_type_node; goto ret;
712 ret:
713 pop_type (type);
714 /* ... fall through ... */
715 case OPCODE_return:
716 INVALIDATE_PC;
717 break;
718 case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field;
719 case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field;
720 case OPCODE_getfield: is_putting = 0; is_static = 0; goto field;
721 case OPCODE_putfield: is_putting = 1; is_static = 0; goto field;
722 field:
723 {
724 int index = IMMEDIATE_u2;
725 tree self_type = get_class_constant
726 (jcf, COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool, index));
727 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, index);
728 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
729 tree field_type = get_type_from_signature (field_signature);
730 if (is_putting)
731 pop_type (field_type);
732 if (! is_static)
733 {
734 /* Defer actual checking until next pass. */
735 pop_type (ptr_type_node);
736 }
737 if (! is_putting)
738 push_type (field_type);
739 break;
740 }
741 case OPCODE_new:
742 push_type (get_class_constant (jcf, IMMEDIATE_u2));
743 break;
744 case OPCODE_dup: type_stack_dup (1, 0); break;
745 case OPCODE_dup_x1: type_stack_dup (1, 1); break;
746 case OPCODE_dup_x2: type_stack_dup (1, 2); break;
747 case OPCODE_dup2: type_stack_dup (2, 0); break;
748 case OPCODE_dup2_x1: type_stack_dup (2, 1); break;
749 case OPCODE_dup2_x2: type_stack_dup (2, 2); break;
750 case OPCODE_pop: index = 1; goto pop;
751 case OPCODE_pop2: index = 2; goto pop;
752 pop:
753 if (stack_pointer < index)
754 VERIFICATION_ERROR ("stack underflow");
755 stack_pointer -= index;
756 break;
757 case OPCODE_swap:
758 if (stack_pointer < 2)
759 VERIFICATION_ERROR ("stack underflow (in swap)");
760 else
761 {
762 tree type1 = stack_type_map[stack_pointer - 1];
763 tree type2 = stack_type_map[stack_pointer - 2];
764 if (type1 == void_type_node || type2 == void_type_node)
765 VERIFICATION_ERROR ("verifier (swap): double or long value");
766 stack_type_map[stack_pointer - 2] = type1;
767 stack_type_map[stack_pointer - 1] = type2;
768 }
769 break;
770 case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc;
771 case OPCODE_ldc2_w:
772 case OPCODE_ldc_w:
773 index = IMMEDIATE_u2; goto ldc;
774 ldc:
775 if (index <= 0 || index >= JPOOL_SIZE(current_jcf))
776 VERIFICATION_ERROR ("bad constant pool index in ldc");
777 int_value = -1;
778 switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag)
779 {
780 case CONSTANT_Integer: type = int_type_node; goto check_ldc;
781 case CONSTANT_Float: type = float_type_node; goto check_ldc;
782 case CONSTANT_String: type = string_type_node; goto check_ldc;
783 case CONSTANT_Long: type = long_type_node; goto check_ldc;
784 case CONSTANT_Double: type = double_type_node; goto check_ldc;
785 check_ldc:
786 if (TYPE_IS_WIDE (type) == (op_code == OPCODE_ldc2_w))
787 break;
788 /* ... else fall through ... */
789 default:
790 bad_ldc:
791 VERIFICATION_ERROR ("bad constant pool tag in ldc");
792 }
793 if (type == int_type_node)
794 {
795 i = TREE_INT_CST_LOW (get_constant (current_jcf, index));
796 goto push_int;
797 }
798 push_type (type);
799 break;
800
801 case OPCODE_invokevirtual:
802 case OPCODE_invokespecial:
803 case OPCODE_invokestatic:
804 case OPCODE_invokeinterface:
805 {
806 int index = IMMEDIATE_u2;
807 tree sig = COMPONENT_REF_SIGNATURE (&current_jcf->cpool, index);
808 tree self_type = get_class_constant
809 (current_jcf, COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
810 index));
811 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool, index);
812 tree method_type;
813 method_type = parse_signature_string (IDENTIFIER_POINTER (sig),
814 IDENTIFIER_LENGTH (sig));
815 if (TREE_CODE (method_type) != FUNCTION_TYPE)
816 VERIFICATION_ERROR ("bad method signature");
817 pop_argument_types (TYPE_ARG_TYPES (method_type));
818
819 /* Can't invoke <clinit> */
820 if (method_name == clinit_identifier_node)
821 VERIFICATION_ERROR ("invoke opcode can't invoke <clinit>");
822 /* Apart invokespecial, can't invoke <init> */
823 if (op_code != OPCODE_invokespecial
824 && method_name == init_identifier_node)
825 VERIFICATION_ERROR ("invoke opcode can't invoke <init>");
826
827 if (op_code != OPCODE_invokestatic)
828 pop_type (self_type);
829
830 switch (op_code)
831 {
832 case OPCODE_invokeinterface:
833 {
834 int nargs = IMMEDIATE_u1;
835 int notZero = IMMEDIATE_u1;
836
837 if (!nargs || notZero)
838 VERIFICATION_ERROR
839 ("invalid argument number in invokeinterface");
840 break;
841 }
842 }
843
844 if (TREE_TYPE (method_type) != void_type_node)
845 push_type (TREE_TYPE (method_type));
846 break;
847 }
848
849 case OPCODE_arraylength:
850 /* Type checking actually made during code generation */
851 pop_type( ptr_type_node );
852 push_type( int_type_node );
853 break;
854
855 /* Q&D verification *or* more checking done during code generation
856 for byte/boolean/char/short, the value popped is a int coerced
857 into the right type before being stored. */
858 case OPCODE_iastore: type = int_type_node; goto astore;
859 case OPCODE_lastore: type = long_type_node; goto astore;
860 case OPCODE_fastore: type = float_type_node; goto astore;
861 case OPCODE_dastore: type = double_type_node; goto astore;
862 case OPCODE_aastore: type = ptr_type_node; goto astore;
863 case OPCODE_bastore: type = int_type_node; goto astore;
864 case OPCODE_castore: type = int_type_node; goto astore;
865 case OPCODE_sastore: type = int_type_node; goto astore;
866 astore:
867 /* FIXME - need better verification here */
868 pop_type (type); /* new value */
869 pop_type (int_type_node); /* index */
870 pop_type (ptr_type_node); /* array */
871 break;
872
873 /* Q&D verification *or* more checking done during code generation
874 for byte/boolean/char/short, the value pushed is a int. */
875 case OPCODE_iaload: type = int_type_node; goto aload;
876 case OPCODE_laload: type = long_type_node; goto aload;
877 case OPCODE_faload: type = float_type_node; goto aload;
878 case OPCODE_daload: type = double_type_node; goto aload;
879 case OPCODE_aaload: type = ptr_type_node; goto aload;
880 case OPCODE_baload: type = promote_type (byte_type_node); goto aload;
881 case OPCODE_caload: type = promote_type (char_type_node); goto aload;
882 case OPCODE_saload: type = promote_type (short_type_node); goto aload;
883 aload:
884 pop_type (int_type_node);
885 type = pop_type (ptr_type_node);
886 if (! is_array_type_p (type))
887 VERIFICATION_ERROR ("array load from non-array type");
888 push_type (TYPE_ARRAY_ELEMENT (TREE_TYPE (type)));
889 break;
890
891 case OPCODE_anewarray:
892 type = get_class_constant (current_jcf, IMMEDIATE_u2);
893 type = promote_type (type);
894 goto newarray;
895
896 case OPCODE_newarray:
897 index = IMMEDIATE_u1;
898 type = decode_newarray_type (index);
899 if (type == NULL_TREE)
900 VERIFICATION_ERROR ("invalid type code in newarray opcode");
901 goto newarray;
902
903 newarray:
904 if (int_value >= 0 && prevpc >= 0)
905 {
906 /* If previous instruction pushed int constant,
907 we want to use it. */
908 switch (byte_ops[prevpc])
909 {
910 case OPCODE_iconst_0: case OPCODE_iconst_1:
911 case OPCODE_iconst_2: case OPCODE_iconst_3:
912 case OPCODE_iconst_4: case OPCODE_iconst_5:
913 case OPCODE_bipush: case OPCODE_sipush:
914 case OPCODE_ldc: case OPCODE_ldc_w:
915 break;
916 default:
917 int_value = -1;
918 }
919 }
920 else
921 int_value = -1;
922 type = build_java_array_type (type, int_value);
923 pop_type (int_type_node);
924 push_type (type);
925 break;
926
927 case OPCODE_multianewarray:
928 {
929 int ndim, i;
930 index = IMMEDIATE_u2;
931 ndim = IMMEDIATE_u1;
932
933 if( ndim < 1 )
934 VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" );
935
936 for( i = 0; i < ndim; i++ )
937 pop_type (int_type_node);
938 push_type (get_class_constant (current_jcf, index));
939 break;
940 }
941
942 case OPCODE_aconst_null:
943 push_type (ptr_type_node);
944 break;
945
946 case OPCODE_athrow:
947 pop_type (throwable_type_node);
948 INVALIDATE_PC;
949 break;
950
951 case OPCODE_checkcast:
952 pop_type (ptr_type_node);
953 type = get_class_constant (current_jcf, IMMEDIATE_u2);
954 push_type (type);
955 break;
956 case OPCODE_instanceof:
957 pop_type (ptr_type_node);
958 get_class_constant (current_jcf, IMMEDIATE_u2);
959 push_type (int_type_node);
960 break;
961
962 case OPCODE_tableswitch:
963 {
964 jint default_val, low, high;
965
966 pop_type (int_type_node);
967 while (PC%4)
968 {
969 if (byte_ops[PC++])
970 VERIFICATION_ERROR ("bad alignment in tableswitch pad");
971 }
972 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
973 low = IMMEDIATE_s4;
974 high = IMMEDIATE_s4;
975
976 if (low > high)
977 VERIFICATION_ERROR ("unsorted low/high value in tableswitch");
978
979 while (low++ <= high)
980 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
981 break;
982 }
983
984 case OPCODE_lookupswitch:
985 {
986 jint npairs, last, not_registered = 1;
987
988 pop_type (int_type_node);
989 while (PC%4)
990 {
991 if (byte_ops[PC++])
992 VERIFICATION_ERROR ("bad alignment in lookupswitch pad");
993 }
994
995 PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4));
996 npairs = IMMEDIATE_s4;
997
998 if (npairs < 0)
999 VERIFICATION_ERROR ("invalid number of targets in lookupswitch");
1000
1001 while (npairs--)
1002 {
1003 int match = IMMEDIATE_s4;
1004 if (not_registered)
1005 not_registered = 0;
1006 else if (last >= match)
1007 VERIFICATION_ERROR ("unsorted match value in lookupswitch");
1008
1009 last = match;
1010 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1011 }
1012 break;
1013 }
1014
1015 case OPCODE_monitorenter:
1016 /* fall thru */
1017 case OPCODE_monitorexit:
1018 pop_type (ptr_type_node);
1019 break;
1020
1021 case OPCODE_goto_w:
1022 PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4));
1023 INVALIDATE_PC;
1024 break;
1025
1026 case OPCODE_jsr:
1027 {
1028 tree target = lookup_label (oldpc + IMMEDIATE_s2);
1029 tree return_label = lookup_label (PC);
1030 push_type (return_address_type_node);
1031 if (! LABEL_VERIFIED (target))
1032 {
1033 /* first time seen */
1034 tree return_type_map;
1035 int nlocals = DECL_MAX_LOCALS (current_function_decl);
1036 index = nlocals + DECL_MAX_STACK (current_function_decl);
1037 return_type_map = make_tree_vec (index);
1038 while (index > nlocals)
1039 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
1040 while (index > 0)
1041 TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
1042 LABEL_RETURN_LABEL (target)
1043 = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
1044 LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;
1045 LABEL_RETURN_TYPE_STATE (target) = return_type_map;
1046 LABEL_IS_SUBR_START (target) = 1;
1047 LABEL_IN_SUBR (target) = 1;
1048 LABEL_SUBR_START (target) = target;
1049 LABEL_SUBR_CONTEXT (target) = current_subr;
1050 }
1051 else if (! LABEL_IS_SUBR_START (target)
1052 || LABEL_SUBR_CONTEXT (target) != current_subr)
1053 VERIFICATION_ERROR ("label part of different subroutines");
1054
1055 i = merge_type_state (target);
1056 if (i != 0)
1057 {
1058 if (i < 0)
1059 VERIFICATION_ERROR ("types could not be merged at jsr");
1060 push_pending_label (target);
1061 }
1062 current_subr = target;
1063
1064 /* Chain return_pc onto LABEL_RETURN_LABELS (target) if needed. */
1065 if (! value_member (return_label, LABEL_RETURN_LABELS (target)))
1066 {
1067 LABEL_RETURN_LABELS (target)
1068 = tree_cons (NULL_TREE, return_label,
1069 LABEL_RETURN_LABELS (target));
1070 }
1071
1072 if (LABEL_VERIFIED (target))
1073 {
1074 tree return_map = LABEL_RETURN_TYPE_STATE (target);
1075 int len = TREE_VEC_LENGTH (return_map);
1076 stack_pointer = len - DECL_MAX_LOCALS (current_function_decl);
1077 while (--len >= 0)
1078 {
1079 if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED)
1080 type_map[len] = TREE_VEC_ELT (return_map, len);
1081 }
1082 current_subr = LABEL_SUBR_CONTEXT (target);
1083 PUSH_PENDING (return_label);
1084 }
1085
1086 INVALIDATE_PC;
1087 }
1088 break;
1089 case OPCODE_ret:
1090 if (current_subr == NULL)
1091 VERIFICATION_ERROR ("ret instruction not in a jsr subroutine");
1092 else
1093 {
1094 tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr);
1095 tree caller = LABEL_SUBR_CONTEXT (current_subr);
1096 int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer;
1097 index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1;
1098 wide = 0;
1099 INVALIDATE_PC;
1100 if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)
1101 || type_map[index] != TYPE_RETURN_ADDR)
1102 VERIFICATION_ERROR ("invalid ret index");
1103
1104 /* The next chunk of code is similar to an inlined version of
1105 * merge_type_state (LABEL_RETURN_LABEL (current_subr)).
1106 * The main differences are that LABEL_RETURN_LABEL is
1107 * pre-allocated by the jsr (but we don't know the size then);
1108 * and that we have to handle TYPE_UNUSED. */
1109
1110 if (! RETURN_MAP_ADJUSTED (ret_map))
1111 { /* First return from this subroutine - fix stack pointer. */
1112 TREE_VEC_LENGTH (ret_map) = size;
1113 for (index = size; --index >= 0; )
1114 {
1115 if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED)
1116 TREE_VEC_ELT (ret_map, index) = type_map[index];
1117 }
1118 RETURN_MAP_ADJUSTED (ret_map) = 1;
1119 }
1120 else
1121 {
1122 if (TREE_VEC_LENGTH (ret_map) != size)
1123 VERIFICATION_ERROR ("inconsistent stack size on ret");
1124 for (index = 0; index < size; index++)
1125 {
1126 tree type = TREE_VEC_ELT (ret_map, index);
1127 if (type != TYPE_UNUSED)
1128 {
1129 type = merge_types (type, type_map [index]);
1130 TREE_VEC_ELT (ret_map, index) = type;
1131 if (type == TYPE_UNKNOWN)
1132 {
1133 if (index >= size - stack_pointer)
1134 VERIFICATION_ERROR
1135 ("inconsistent types on ret from jsr");
1136 }
1137 else if (TYPE_IS_WIDE (type))
1138 index++;
1139 }
1140 }
1141 }
1142
1143 /* Check if there are any more pending blocks in this subroutine.
1144 Because we push pending blocks in a last-in-first-out order,
1145 and because we don't push anything from our caller until we
1146 are done with this subroutine or anything nested in it,
1147 then we are done if the top of the pending_blocks stack is
1148 not in a subroutine, or it is in our caller. */
1149 if (pending_blocks == NULL_TREE
1150 || ! LABEL_IN_SUBR (pending_blocks)
1151 || LABEL_SUBR_START (pending_blocks) == caller)
1152 {
1153 /* Since we are done with this subroutine (i.e. this is the
1154 last ret from it), set up the (so far known) return
1155 address as pending - with the merged type state. */
1156 tmp = LABEL_RETURN_LABELS (current_subr);
1157 current_subr = caller;
1158 for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp))
1159 {
1160 tree return_label = TREE_VALUE (tmp);
1161 tree return_state = LABEL_TYPE_STATE (return_label);
1162 if (return_state == NULL_TREE)
1163 {
1164 /* This means means we had not verified the
1165 subroutine earlier, so this is the first jsr to
1166 call it. In this case, the type_map of the return
1167 address is just the current type_map - and that
1168 is handled by the following PUSH_PENDING. */
1169 }
1170 else
1171 {
1172 /* In this case we have to do a merge. But first
1173 restore the type_map for unused slots to those
1174 that were in effect at the jsr. */
1175 for (index = size; --index >= 0; )
1176 {
1177 type_map[index] = TREE_VEC_ELT (ret_map, index);
1178 if (type_map[index] == TYPE_UNUSED)
1179 type_map[index]
1180 = TREE_VEC_ELT (return_state, index);
1181 }
1182 }
1183 PUSH_PENDING (return_label);
1184 }
1185 }
1186 }
1187 break;
1188 case OPCODE_jsr_w:
1189 case OPCODE_ret_w:
1190 default:
1191 error ("unknown opcode %d@pc=%d during verification", op_code, PC-1);
1192 return 0;
1193 }
1194
1195 prevpc = oldpc;
1196
1197 /* The following test is true if we have entered or exited an exception
1198 handler range *or* we have done a store to a local variable.
1199 In either case we need to consider any exception handlers that
1200 might "follow" this instruction. */
1201
1202 if (eh_ranges != prev_eh_ranges)
1203 {
1204 int save_stack_pointer = stack_pointer;
1205 int index = DECL_MAX_LOCALS (current_function_decl);
1206 tree save_type = type_map[index];
1207 tree save_current_subr = current_subr;
1208 struct eh_range *ranges = find_handler (oldpc);
1209 stack_pointer = 1;
1210 for (; ranges != NULL_EH_RANGE; ranges = ranges->outer)
1211 {
1212 tree chain = ranges->handlers;
1213
1214 /* We need to determine if the handler is part of current_subr.
1215 The are two cases: (1) The exception catch range
1216 is entirely within current_subr. In that case the handler
1217 is also part of current_subr.
1218 (2) Some of the catch range is not in current_subr.
1219 In that case, the handler is *not* part of current_subr.
1220
1221 Figuring out which is the case is not necessarily obvious,
1222 in the presence of clever code generators (and obfuscators).
1223 We make a simplifying assumption that in case (2) we
1224 have that the current_subr is entirely within the catch range.
1225 In that case we can assume if that if a caller (the jsr) of
1226 a subroutine is within the catch range, then the handler is
1227 *not* part of the subroutine, and vice versa. */
1228
1229 current_subr = save_current_subr;
1230 for ( ; current_subr != NULL_TREE;
1231 current_subr = LABEL_SUBR_CONTEXT (current_subr))
1232 {
1233 tree return_labels = LABEL_RETURN_LABELS (current_subr);
1234 /* There could be multiple return_labels, but
1235 we only need to check one. */
1236 int return_pc = LABEL_PC (TREE_VALUE (return_labels));
1237 if (return_pc <= ranges->start_pc
1238 || return_pc > ranges->end_pc)
1239 break;
1240 }
1241
1242 for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain))
1243 {
1244 tree handler = TREE_VALUE (chain);
1245 tree type = TREE_PURPOSE (chain);
1246 if (type == NULL_TREE) /* a finally handler */
1247 type = throwable_type_node;
1248 type_map[index] = promote_type (type);
1249
1250 PUSH_PENDING (handler);
1251 }
1252 }
1253 stack_pointer = save_stack_pointer;
1254 current_subr = save_current_subr;
1255 type_map[index] = save_type;
1256 prev_eh_ranges = eh_ranges;
1257 }
1258 }
1259 return 1;
1260 bad_pc:
1261 message = "program counter out of range";
1262 goto verify_error;
1263 verify_error:
1264 error ("verification error at PC=%d: %s", oldpc, message);
1265 return 0;
1266 }
This page took 0.398233 seconds and 6 git commands to generate.