* alias.c: Fix comment typos.
* builtins.c: Likewise.
* cfg.c: Likewise.
* df.c: Likewise.
* dominance.c: Likewise.
* dwarf2out.c: Likewise.
* emit-rtl.c: Likewise.
* expr.c: Likewise.
* final.c: Likewise.
* fold-const.c: Likewise.
* gcse.c: Likewise.
* genattrtab.c: Likewise.
* genrecog.c: Likewise.
* gensupport.c: Likewise.
* ggc-zone.c: Likewise.
* integrate.c: Likewise.
* local-alloc.c: Likewise.
* loop.c: Likewise.
* recog.c: Likewise.
* regmove.c: Likewise.
* reg-stack.c: Likewise.
* reorg.c: Likewise.
* rtlanal.c: Likewise.
* rtl.h: Likewise.
* sched-ebb.c: Likewise.
* simplify-rtx.c: Likewise.
* toplev.c: Likewise.
* varasm.c: Likewise.
From-SVN: r75475
+2004-01-06 Kazu Hirata <kazu@cs.umass.edu>
+
+ * alias.c: Fix comment typos.
+ * builtins.c: Likewise.
+ * cfg.c: Likewise.
+ * df.c: Likewise.
+ * dominance.c: Likewise.
+ * dwarf2out.c: Likewise.
+ * emit-rtl.c: Likewise.
+ * expr.c: Likewise.
+ * final.c: Likewise.
+ * fold-const.c: Likewise.
+ * gcse.c: Likewise.
+ * genattrtab.c: Likewise.
+ * genrecog.c: Likewise.
+ * gensupport.c: Likewise.
+ * ggc-zone.c: Likewise.
+ * integrate.c: Likewise.
+ * local-alloc.c: Likewise.
+ * loop.c: Likewise.
+ * recog.c: Likewise.
+ * regmove.c: Likewise.
+ * reg-stack.c: Likewise.
+ * reorg.c: Likewise.
+ * rtlanal.c: Likewise.
+ * rtl.h: Likewise.
+ * sched-ebb.c: Likewise.
+ * simplify-rtx.c: Likewise.
+ * toplev.c: Likewise.
+ * varasm.c: Likewise.
+
2004-01-06 Kazu Hirata <kazu@cs.umass.edu>
* doc/install.texi: Fix typos.
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
break;
}
}
- /* FALLTHROUGH */
+ /* Fall through. */
case 1: /* length is greater than 1, call fwrite. */
{
tree string_arg;
if (flags == 0)
return NULL;
- /* FALLTHRU */
+ /* Fall through. */
case 0:
for (e = src->succ; e; e = e->succ_next)
if (e->dest == dst)
insn, DF_REF_READ_WRITE);
break;
}
- /* ... FALLTHRU ... */
+ /* Fall through. */
case REG:
case PARALLEL:
case PC:
node->dfs_num_out = (*num)++;
}
-/* Compute the data neccesary for fast resolving of dominator queries in a
+/* Compute the data necessary for fast resolving of dominator queries in a
static dominator tree. */
static void
dom_computed[dir] = DOM_NO_FAST_QUERY;
}
-/* Store all basic blocks immediatelly dominated by BB into BBS and return
+/* Store all basic blocks immediately dominated by BB into BBS and return
their number. */
int
get_dominated_by (enum cdi_direction dir, basic_block bb, basic_block **bbs)
indirect_p = 1;
break;
}
- /* FALLTHRU */
+ /* Fall through. */
case PARM_DECL:
{
if (copied && len > 0)
XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
- /* Call recsusively on all inside the vector. */
+ /* Call recursively on all inside the vector. */
for (j = 0; j < len; j++)
{
if (last_ptr)
case '<':
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
return 0;
- /* FALLTHRU */
+ /* Fall through. */
case '1':
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
Those routines may also be called from a higher level intercepted routine. So
to prevent recording data for an inner call to one of these for an intercept,
- we maintain a intercept nesting counter (debug_nesting). We only save the
+ we maintain an intercept nesting counter (debug_nesting). We only save the
intercepted arguments if the nesting is 1. */
int debug_nesting = 0;
case WITH_CLEANUP_EXPR: len = 2; break;
default: break;
}
- /* FALLTHROUGH */
+ /* Fall through. */
case 'r':
case '<':
case '1':
partial redundancy elimination. */
free_gcse_mem ();
- /* It does not make sense to run code hoisting unless we optimizing
+ /* It does not make sense to run code hoisting unless we are optimizing
for code size -- it rarely makes programs faster, and can make
them bigger if we did partial redundancy elimination (when optimizing
for space, we use a classic gcse algorithm instead of partial
have_error = 1;
break;
}
- /* FALLTHRU */
+ /* Fall through. */
case IOR:
case AND:
case 'V':
if (! XVEC (pattern, i))
break;
- /* FALLTHRU */
+ /* Fall through. */
case 'E':
for (j = 0; j < XVECLEN (pattern, i); j++)
case 'V':
if (! XVEC (pattern, i))
break;
- /* FALLTHRU */
+ /* Fall through. */
case 'E':
for (j = 0; j < XVECLEN (pattern, i); j++)
beyond the end of the vector. */
test = new_decision_test (DT_veclen_ge, &place);
test->u.veclen = XVECLEN (pattern, 2);
- /* FALLTHRU */
+ /* Fall through. */
case MATCH_OPERAND:
case MATCH_SCRATCH:
case MATCH_OPERAND:
i = n_alternatives (XSTR (pattern, 2));
*palt = (i > *palt ? i : *palt);
- /* FALLTHRU */
+ /* Fall through. */
case MATCH_OPERATOR:
case MATCH_SCRATCH:
case 'V':
if (XVEC (pattern, i) == NULL)
break;
- /* FALLTHRU */
+ /* Fall through. */
case 'E':
for (j = XVECLEN (pattern, i) - 1; j >= 0; --j)
collect_insn_data (XVECEXP (pattern, i, j), palt, pmax);
XSTR (pattern, 2) = new_c;
}
}
- /* FALLTHRU */
+ /* Fall through. */
case MATCH_OPERATOR:
case MATCH_SCRATCH:
Similar with increasing max_free_bin_size without increasing num_free_bins.
After much histogramming of allocation sizes and time spent on gc,
- on a powerpc G4 7450 - 667 mhz, and an pentium 4 - 2.8ghz,
+ on a PowerPC G4 7450 - 667 mhz, and a Pentium 4 - 2.8ghz,
these were determined to be the optimal values. */
#define NUM_FREE_BINS 64
#define MAX_FREE_BIN_SIZE 256
if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
break;
- /* ... FALLTHRU ... */
+ /* Fall through. */
case CODE_LABEL:
LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
= LABEL_PRESERVE_P (orig);
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
if (MEM_VOLATILE_P (x))
return 0;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
&& !EXTRA_ADDRESS_CONSTRAINT (c, p))
break;
- /* FALLTHRU */
+ /* Fall through. */
case 'p':
case 'g': case 'r':
reg_allowed = 1;
loop_info->has_multiple_exit_targets = 1;
}
}
- /* FALLTHRU */
+ /* Fall through. */
case INSN:
if (volatile_refs_p (PATTERN (insn)))
|| (GET_CODE (op) == CONST_DOUBLE
&& GET_MODE (op) == VOIDmode))
break;
- /* FALLTHRU */
+ /* Fall through. */
case 'i':
if (CONSTANT_P (op)
if (GET_CODE (pat_src) != UNSPEC
|| XINT (pat_src, 1) != UNSPEC_FNSTSW)
abort ();
- /* FALLTHRU */
+ /* Fall through. */
case UNSPEC_FNSTSW:
/* Combined fcomp+fnstsw generated for doing well with
very conservative. */
if (nonlocal_goto_handler_labels)
return 1;
- /* FALLTHRU */
+ /* Fall through. */
default:
return can_throw_internal (insn);
}
If we are not careful, this routine can take up a significant fraction
of the total compilation time (4%), but only wins rarely. Hence we
speed this routine up by making two passes. The first pass goes back
- until it hits a label and sees if it find an insn with an identical
+ until it hits a label and sees if it finds an insn with an identical
pattern. Only in this (relatively rare) event does it check for
data conflicts.
N times that of a fast register-to-register instruction. */
#define COSTS_N_INSNS(N) ((N) * 4)
-/* Maximum cost of a rtl expression. This value has the special meaning
+/* Maximum cost of an rtl expression. This value has the special meaning
not to use an rtx with this cost under any circumstances. */
#define MAX_COST INT_MAX
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
if (MEM_VOLATILE_P (x))
return 1;
- /* FALLTHROUGH */
+ /* Fall through. */
default:
break;
prev = BB_END (bb);
}
}
- /* FALLTHRU */
+ /* Fall through. */
case TRAP_RISKY:
case IRISKY:
case PRISKY_CANDIDATE:
pretend this is actually an integer. */
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
- /* FALLTHROUGH */
+ /* Fall through. */
case CONST_INT:
if (GET_CODE (op) == CONST_INT)
val = INTVAL (op);
#if defined (HAVE_ATTR_length)
/* If flow2 creates new instructions which need splitting
and scheduling after reload is not done, they might not be
- splitten until final which doesn't allow splitting
+ split until final which doesn't allow splitting
if HAVE_ATTR_length. */
#ifdef INSN_SCHEDULING
if (optimize && !flag_schedule_insns_after_reload)
break;
case rvc_normal:
value->un.du.exp = r->exp;
- /* FALLTHRU */
+ /* Fall through. */
case rvc_nan:
memcpy (value->un.du.sig, r->sig, sizeof (r->sig));
break;
break;
case rvc_normal:
d->exp = r->exp;
- /* FALLTHRU */
+ /* Fall through. */
case rvc_nan:
memcpy (d->sig, r->sig, sizeof (r->sig));
break;
|| GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
break;
tmp = XEXP (XEXP (x, 0), 0);
- /* FALLTHRU */
+ /* Fall through. */
case LABEL_REF:
tmp = XEXP (x, 0);