]> gcc.gnu.org Git - gcc.git/blame - gcc/asan.c
Update copyright years in gcc/
[gcc.git] / gcc / asan.c
CommitLineData
37d6f666 1/* AddressSanitizer, a fast memory error detector.
d1e082c2 2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
37d6f666
WM
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
37d6f666
WM
25#include "gimple.h"
26#include "tree-iterator.h"
27#include "tree-flow.h"
37d6f666 28#include "tree-pass.h"
37d6f666
WM
29#include "asan.h"
30#include "gimple-pretty-print.h"
dfe06d3e 31#include "target.h"
f3ddd692
JJ
32#include "expr.h"
33#include "optabs.h"
8240018b 34#include "output.h"
7f71fad9 35#include "tm_p.h"
0e668eaf 36#include "langhooks.h"
37d6f666 37
497a1c66
JJ
38/* AddressSanitizer finds out-of-bounds and use-after-free bugs
39 with <2x slowdown on average.
40
41 The tool consists of two parts:
42 instrumentation module (this file) and a run-time library.
43 The instrumentation module adds a run-time check before every memory insn.
44 For a 8- or 16- byte load accessing address X:
45 ShadowAddr = (X >> 3) + Offset
46 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
47 if (ShadowValue)
48 __asan_report_load8(X);
49 For a load of N bytes (N=1, 2 or 4) from address X:
50 ShadowAddr = (X >> 3) + Offset
51 ShadowValue = *(char*)ShadowAddr;
52 if (ShadowValue)
53 if ((X & 7) + N - 1 > ShadowValue)
54 __asan_report_loadN(X);
55 Stores are instrumented similarly, but using __asan_report_storeN functions.
56 A call too __asan_init() is inserted to the list of module CTORs.
57
58 The run-time library redefines malloc (so that redzone are inserted around
59 the allocated memory) and free (so that reuse of free-ed memory is delayed),
60 provides __asan_report* and __asan_init functions.
61
62 Read more:
63 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
64
65 The current implementation supports detection of out-of-bounds and
66 use-after-free in the heap, on the stack and for global variables.
67
68 [Protection of stack variables]
69
70 To understand how detection of out-of-bounds and use-after-free works
71 for stack variables, lets look at this example on x86_64 where the
72 stack grows downward:
f3ddd692
JJ
73
74 int
75 foo ()
76 {
77 char a[23] = {0};
78 int b[2] = {0};
79
80 a[5] = 1;
81 b[1] = 2;
82
83 return a[5] + b[1];
84 }
85
497a1c66
JJ
86 For this function, the stack protected by asan will be organized as
87 follows, from the top of the stack to the bottom:
f3ddd692 88
497a1c66 89 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
f3ddd692 90
497a1c66
JJ
91 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
92 the next slot be 32 bytes aligned; this one is called Partial
93 Redzone; this 32 bytes alignment is an asan constraint]
f3ddd692 94
497a1c66 95 Slot 3/ [24 bytes for variable 'a']
f3ddd692 96
497a1c66 97 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
f3ddd692 98
497a1c66 99 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
f3ddd692 100
497a1c66 101 Slot 6/ [8 bytes for variable 'b']
f3ddd692 102
497a1c66
JJ
103 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
104 'LEFT RedZone']
f3ddd692 105
497a1c66
JJ
106 The 32 bytes of LEFT red zone at the bottom of the stack can be
107 decomposed as such:
f3ddd692
JJ
108
109 1/ The first 8 bytes contain a magical asan number that is always
110 0x41B58AB3.
111
112 2/ The following 8 bytes contains a pointer to a string (to be
113 parsed at runtime by the runtime asan library), which format is
114 the following:
115
116 "<function-name> <space> <num-of-variables-on-the-stack>
117 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
118 <length-of-var-in-bytes> ){n} "
119
120 where '(...){n}' means the content inside the parenthesis occurs 'n'
121 times, with 'n' being the number of variables on the stack.
122
123 3/ The following 16 bytes of the red zone have no particular
124 format.
125
497a1c66 126 The shadow memory for that stack layout is going to look like this:
f3ddd692
JJ
127
128 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
129 The F1 byte pattern is a magic number called
130 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
131 the memory for that shadow byte is part of a the LEFT red zone
132 intended to seat at the bottom of the variables on the stack.
133
134 - content of shadow memory 8 bytes for slots 6 and 5:
135 0xF4F4F400. The F4 byte pattern is a magic number
136 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
137 memory region for this shadow byte is a PARTIAL red zone
138 intended to pad a variable A, so that the slot following
139 {A,padding} is 32 bytes aligned.
140
141 Note that the fact that the least significant byte of this
142 shadow memory content is 00 means that 8 bytes of its
143 corresponding memory (which corresponds to the memory of
144 variable 'b') is addressable.
145
146 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
147 The F2 byte pattern is a magic number called
148 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
149 region for this shadow byte is a MIDDLE red zone intended to
150 seat between two 32 aligned slots of {variable,padding}.
151
152 - content of shadow memory 8 bytes for slot 3 and 2:
497a1c66 153 0xF4000000. This represents is the concatenation of
f3ddd692
JJ
154 variable 'a' and the partial red zone following it, like what we
155 had for variable 'b'. The least significant 3 bytes being 00
156 means that the 3 bytes of variable 'a' are addressable.
157
497a1c66 158 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
f3ddd692
JJ
159 The F3 byte pattern is a magic number called
160 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
161 region for this shadow byte is a RIGHT red zone intended to seat
162 at the top of the variables of the stack.
163
497a1c66
JJ
164 Note that the real variable layout is done in expand_used_vars in
165 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
166 stack variables as well as the different red zones, emits some
167 prologue code to populate the shadow memory as to poison (mark as
168 non-accessible) the regions of the red zones and mark the regions of
169 stack variables as accessible, and emit some epilogue code to
170 un-poison (mark as accessible) the regions of red zones right before
171 the function exits.
8240018b 172
497a1c66 173 [Protection of global variables]
8240018b 174
497a1c66
JJ
175 The basic idea is to insert a red zone between two global variables
176 and install a constructor function that calls the asan runtime to do
177 the populating of the relevant shadow memory regions at load time.
8240018b 178
497a1c66
JJ
179 So the global variables are laid out as to insert a red zone between
180 them. The size of the red zones is so that each variable starts on a
181 32 bytes boundary.
8240018b 182
497a1c66
JJ
183 Then a constructor function is installed so that, for each global
184 variable, it calls the runtime asan library function
185 __asan_register_globals_with an instance of this type:
8240018b
JJ
186
187 struct __asan_global
188 {
189 // Address of the beginning of the global variable.
190 const void *__beg;
191
192 // Initial size of the global variable.
193 uptr __size;
194
195 // Size of the global variable + size of the red zone. This
196 // size is 32 bytes aligned.
197 uptr __size_with_redzone;
198
199 // Name of the global variable.
200 const void *__name;
201
202 // This is always set to NULL for now.
203 uptr __has_dynamic_init;
204 }
205
497a1c66
JJ
206 A destructor function that calls the runtime asan library function
207 _asan_unregister_globals is also installed. */
f3ddd692
JJ
208
209alias_set_type asan_shadow_set = -1;
37d6f666 210
f6d98484
JJ
211/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
212 alias set is used for all shadow memory accesses. */
213static GTY(()) tree shadow_ptr_types[2];
214
94fce891
JJ
215/* Initialize shadow_ptr_types array. */
216
217static void
218asan_init_shadow_ptr_types (void)
219{
220 asan_shadow_set = new_alias_set ();
221 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
222 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
223 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
224 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
225 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
226 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
227 initialize_sanitizer_builtins ();
228}
229
8240018b
JJ
230/* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
231static pretty_printer asan_pp;
232static bool asan_pp_initialized;
233
234/* Initialize asan_pp. */
235
236static void
237asan_pp_initialize (void)
238{
239 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
240 asan_pp_initialized = true;
241}
242
243/* Create ADDR_EXPR of STRING_CST with asan_pp text. */
244
245static tree
246asan_pp_string (void)
247{
248 const char *buf = pp_base_formatted_text (&asan_pp);
249 size_t len = strlen (buf);
250 tree ret = build_string (len + 1, buf);
251 TREE_TYPE (ret)
94fce891
JJ
252 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
253 build_index_type (size_int (len)));
8240018b
JJ
254 TREE_READONLY (ret) = 1;
255 TREE_STATIC (ret) = 1;
94fce891 256 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
8240018b
JJ
257}
258
f3ddd692
JJ
259/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
260
261static rtx
262asan_shadow_cst (unsigned char shadow_bytes[4])
263{
264 int i;
265 unsigned HOST_WIDE_INT val = 0;
266 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
267 for (i = 0; i < 4; i++)
268 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
269 << (BITS_PER_UNIT * i);
270 return GEN_INT (trunc_int_for_mode (val, SImode));
271}
272
aeb7e7c1
JJ
273/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
274 though. */
275
276static void
277asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
278{
279 rtx insn, insns, top_label, end, addr, tmp, jump;
280
281 start_sequence ();
282 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
283 insns = get_insns ();
284 end_sequence ();
285 for (insn = insns; insn; insn = NEXT_INSN (insn))
286 if (CALL_P (insn))
287 break;
288 if (insn == NULL_RTX)
289 {
290 emit_insn (insns);
291 return;
292 }
293
294 gcc_assert ((len & 3) == 0);
295 top_label = gen_label_rtx ();
296 addr = force_reg (Pmode, XEXP (shadow_mem, 0));
297 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
298 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
299 emit_label (top_label);
300
301 emit_move_insn (shadow_mem, const0_rtx);
302 tmp = expand_simple_binop (Pmode, PLUS, addr, GEN_INT (4), addr,
303 true, OPTAB_LIB_WIDEN);
304 if (tmp != addr)
305 emit_move_insn (addr, tmp);
306 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
307 jump = get_last_insn ();
308 gcc_assert (JUMP_P (jump));
309 add_reg_note (jump, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE * 80 / 100));
310}
311
f3ddd692
JJ
312/* Insert code to protect stack vars. The prologue sequence should be emitted
313 directly, epilogue sequence returned. BASE is the register holding the
314 stack base, against which OFFSETS array offsets are relative to, OFFSETS
315 array contains pairs of offsets in reverse order, always the end offset
316 of some gap that needs protection followed by starting offset,
317 and DECLS is an array of representative decls for each var partition.
318 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
319 elements long (OFFSETS include gap before the first variable as well
320 as gaps after each stack variable). */
321
322rtx
323asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
324 int length)
325{
326 rtx shadow_base, shadow_mem, ret, mem;
327 unsigned char shadow_bytes[4];
328 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
329 HOST_WIDE_INT last_offset, last_size;
330 int l;
331 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
f3ddd692
JJ
332 tree str_cst;
333
94fce891
JJ
334 if (shadow_ptr_types[0] == NULL_TREE)
335 asan_init_shadow_ptr_types ();
336
f3ddd692 337 /* First of all, prepare the description string. */
8240018b
JJ
338 if (!asan_pp_initialized)
339 asan_pp_initialize ();
340
341 pp_clear_output_area (&asan_pp);
f3ddd692 342 if (DECL_NAME (current_function_decl))
8240018b 343 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
f3ddd692 344 else
8240018b
JJ
345 pp_string (&asan_pp, "<unknown>");
346 pp_space (&asan_pp);
347 pp_decimal_int (&asan_pp, length / 2 - 1);
348 pp_space (&asan_pp);
f3ddd692
JJ
349 for (l = length - 2; l; l -= 2)
350 {
351 tree decl = decls[l / 2 - 1];
8240018b
JJ
352 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
353 pp_space (&asan_pp);
354 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
355 pp_space (&asan_pp);
f3ddd692
JJ
356 if (DECL_P (decl) && DECL_NAME (decl))
357 {
8240018b
JJ
358 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
359 pp_space (&asan_pp);
360 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
f3ddd692
JJ
361 }
362 else
8240018b
JJ
363 pp_string (&asan_pp, "9 <unknown>");
364 pp_space (&asan_pp);
f3ddd692 365 }
8240018b 366 str_cst = asan_pp_string ();
f3ddd692
JJ
367
368 /* Emit the prologue sequence. */
369 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
370 NULL_RTX, 1, OPTAB_DIRECT);
371 mem = gen_rtx_MEM (ptr_mode, base);
372 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
373 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
374 emit_move_insn (mem, expand_normal (str_cst));
375 shadow_base = expand_binop (Pmode, lshr_optab, base,
376 GEN_INT (ASAN_SHADOW_SHIFT),
377 NULL_RTX, 1, OPTAB_DIRECT);
378 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
379 GEN_INT (targetm.asan_shadow_offset ()),
380 NULL_RTX, 1, OPTAB_DIRECT);
381 gcc_assert (asan_shadow_set != -1
382 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
383 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
384 set_mem_alias_set (shadow_mem, asan_shadow_set);
385 prev_offset = base_offset;
386 for (l = length; l; l -= 2)
387 {
388 if (l == 2)
389 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
390 offset = offsets[l - 1];
391 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
392 {
393 int i;
394 HOST_WIDE_INT aoff
395 = base_offset + ((offset - base_offset)
396 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
397 shadow_mem = adjust_address (shadow_mem, VOIDmode,
398 (aoff - prev_offset)
399 >> ASAN_SHADOW_SHIFT);
400 prev_offset = aoff;
401 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
402 if (aoff < offset)
403 {
404 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
405 shadow_bytes[i] = 0;
406 else
407 shadow_bytes[i] = offset - aoff;
408 }
409 else
410 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
411 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
412 offset = aoff;
413 }
414 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
415 {
416 shadow_mem = adjust_address (shadow_mem, VOIDmode,
417 (offset - prev_offset)
418 >> ASAN_SHADOW_SHIFT);
419 prev_offset = offset;
420 memset (shadow_bytes, cur_shadow_byte, 4);
421 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
422 offset += ASAN_RED_ZONE_SIZE;
423 }
424 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
425 }
426 do_pending_stack_adjust ();
427
428 /* Construct epilogue sequence. */
429 start_sequence ();
430
431 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
432 set_mem_alias_set (shadow_mem, asan_shadow_set);
433 prev_offset = base_offset;
434 last_offset = base_offset;
435 last_size = 0;
436 for (l = length; l; l -= 2)
437 {
438 offset = base_offset + ((offsets[l - 1] - base_offset)
439 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
440 if (last_offset + last_size != offset)
441 {
442 shadow_mem = adjust_address (shadow_mem, VOIDmode,
443 (last_offset - prev_offset)
444 >> ASAN_SHADOW_SHIFT);
445 prev_offset = last_offset;
aeb7e7c1 446 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
447 last_offset = offset;
448 last_size = 0;
449 }
450 last_size += base_offset + ((offsets[l - 2] - base_offset)
451 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
452 - offset;
453 }
454 if (last_size)
455 {
456 shadow_mem = adjust_address (shadow_mem, VOIDmode,
457 (last_offset - prev_offset)
458 >> ASAN_SHADOW_SHIFT);
aeb7e7c1 459 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
460 }
461
462 do_pending_stack_adjust ();
463
464 ret = get_insns ();
465 end_sequence ();
466 return ret;
467}
468
8240018b
JJ
469/* Return true if DECL, a global var, might be overridden and needs
470 therefore a local alias. */
471
472static bool
473asan_needs_local_alias (tree decl)
474{
475 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
476}
477
478/* Return true if DECL is a VAR_DECL that should be protected
479 by Address Sanitizer, by appending a red zone with protected
480 shadow memory after it and aligning it to at least
481 ASAN_RED_ZONE_SIZE bytes. */
482
483bool
484asan_protect_global (tree decl)
485{
486 rtx rtl, symbol;
8240018b 487
94fce891
JJ
488 if (TREE_CODE (decl) == STRING_CST)
489 {
490 /* Instrument all STRING_CSTs except those created
491 by asan_pp_string here. */
492 if (shadow_ptr_types[0] != NULL_TREE
493 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
494 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
495 return false;
496 return true;
497 }
8240018b
JJ
498 if (TREE_CODE (decl) != VAR_DECL
499 /* TLS vars aren't statically protectable. */
500 || DECL_THREAD_LOCAL_P (decl)
501 /* Externs will be protected elsewhere. */
502 || DECL_EXTERNAL (decl)
8240018b
JJ
503 || !DECL_RTL_SET_P (decl)
504 /* Comdat vars pose an ABI problem, we can't know if
505 the var that is selected by the linker will have
506 padding or not. */
507 || DECL_ONE_ONLY (decl)
508 /* Similarly for common vars. People can use -fno-common. */
a8a6fd74 509 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
8240018b
JJ
510 /* Don't protect if using user section, often vars placed
511 into user section from multiple TUs are then assumed
512 to be an array of such vars, putting padding in there
513 breaks this assumption. */
514 || (DECL_SECTION_NAME (decl) != NULL_TREE
515 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
516 || DECL_SIZE (decl) == 0
517 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
518 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
519 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
520 return false;
521
522 rtl = DECL_RTL (decl);
523 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
524 return false;
525 symbol = XEXP (rtl, 0);
526
527 if (CONSTANT_POOL_ADDRESS_P (symbol)
528 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
529 return false;
530
8240018b
JJ
531 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
532 return false;
533
534#ifndef ASM_OUTPUT_DEF
535 if (asan_needs_local_alias (decl))
536 return false;
537#endif
538
497a1c66 539 return true;
8240018b
JJ
540}
541
37d6f666
WM
542/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
543 IS_STORE is either 1 (for a store) or 0 (for a load).
544 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
545
546static tree
f6d98484 547report_error_func (bool is_store, int size_in_bytes)
37d6f666 548{
0e668eaf
JJ
549 static enum built_in_function report[2][5]
550 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
551 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
552 BUILT_IN_ASAN_REPORT_LOAD16 },
553 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
554 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
555 BUILT_IN_ASAN_REPORT_STORE16 } };
556 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
37d6f666
WM
557}
558
f6d98484
JJ
559#define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
560#define PROB_ALWAYS (REG_BR_PROB_BASE)
561
01452015 562/* Split the current basic block and create a condition statement
25ae5027
DS
563 insertion point right before or after the statement pointed to by
564 ITER. Return an iterator to the point at which the caller might
565 safely insert the condition statement.
01452015
DS
566
567 THEN_BLOCK must be set to the address of an uninitialized instance
568 of basic_block. The function will then set *THEN_BLOCK to the
569 'then block' of the condition statement to be inserted by the
570 caller.
571
572 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
573 block' of the condition statement to be inserted by the caller.
574
575 Note that *FALLTHROUGH_BLOCK is a new block that contains the
576 statements starting from *ITER, and *THEN_BLOCK is a new empty
577 block.
578
25ae5027
DS
579 *ITER is adjusted to point to always point to the first statement
580 of the basic block * FALLTHROUGH_BLOCK. That statement is the
581 same as what ITER was pointing to prior to calling this function,
582 if BEFORE_P is true; otherwise, it is its following statement. */
01452015
DS
583
584static gimple_stmt_iterator
25ae5027
DS
585create_cond_insert_point (gimple_stmt_iterator *iter,
586 bool before_p,
587 bool then_more_likely_p,
588 basic_block *then_block,
589 basic_block *fallthrough_block)
01452015
DS
590{
591 gimple_stmt_iterator gsi = *iter;
592
25ae5027 593 if (!gsi_end_p (gsi) && before_p)
01452015
DS
594 gsi_prev (&gsi);
595
596 basic_block cur_bb = gsi_bb (*iter);
597
598 edge e = split_block (cur_bb, gsi_stmt (gsi));
599
600 /* Get a hold on the 'condition block', the 'then block' and the
601 'else block'. */
602 basic_block cond_bb = e->src;
603 basic_block fallthru_bb = e->dest;
604 basic_block then_bb = create_empty_bb (cond_bb);
605
606 /* Set up the newly created 'then block'. */
607 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
608 int fallthrough_probability
609 = then_more_likely_p
610 ? PROB_VERY_UNLIKELY
611 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
612 e->probability = PROB_ALWAYS - fallthrough_probability;
613 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
614
615 /* Set up the fallthrough basic block. */
616 e = find_edge (cond_bb, fallthru_bb);
617 e->flags = EDGE_FALSE_VALUE;
618 e->count = cond_bb->count;
619 e->probability = fallthrough_probability;
620
621 /* Update dominance info for the newly created then_bb; note that
622 fallthru_bb's dominance info has already been updated by
623 split_bock. */
624 if (dom_info_available_p (CDI_DOMINATORS))
625 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
626
627 *then_block = then_bb;
628 *fallthrough_block = fallthru_bb;
629 *iter = gsi_start_bb (fallthru_bb);
630
631 return gsi_last_bb (cond_bb);
632}
633
25ae5027
DS
634/* Insert an if condition followed by a 'then block' right before the
635 statement pointed to by ITER. The fallthrough block -- which is the
636 else block of the condition as well as the destination of the
637 outcoming edge of the 'then block' -- starts with the statement
638 pointed to by ITER.
639
497a1c66 640 COND is the condition of the if.
25ae5027
DS
641
642 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
643 'then block' is higher than the probability of the edge to the
644 fallthrough block.
645
646 Upon completion of the function, *THEN_BB is set to the newly
647 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
648 fallthrough block.
649
650 *ITER is adjusted to still point to the same statement it was
651 pointing to initially. */
652
653static void
654insert_if_then_before_iter (gimple cond,
655 gimple_stmt_iterator *iter,
656 bool then_more_likely_p,
657 basic_block *then_bb,
658 basic_block *fallthrough_bb)
659{
660 gimple_stmt_iterator cond_insert_point =
661 create_cond_insert_point (iter,
662 /*before_p=*/true,
663 then_more_likely_p,
664 then_bb,
665 fallthrough_bb);
666 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
667}
668
dc29bf1e 669/* Instrument the memory access instruction BASE. Insert new
25ae5027 670 statements before or after ITER.
dc29bf1e
DS
671
672 Note that the memory access represented by BASE can be either an
673 SSA_NAME, or a non-SSA expression. LOCATION is the source code
674 location. IS_STORE is TRUE for a store, FALSE for a load.
25ae5027
DS
675 BEFORE_P is TRUE for inserting the instrumentation code before
676 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
677 1, 2, 4, 8, 16.
678
679 If BEFORE_P is TRUE, *ITER is arranged to still point to the
680 statement it was pointing to prior to calling this function,
681 otherwise, it points to the statement logically following it. */
37d6f666
WM
682
683static void
25ae5027
DS
684build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
685 bool before_p, bool is_store, int size_in_bytes)
37d6f666
WM
686{
687 gimple_stmt_iterator gsi;
01452015 688 basic_block then_bb, else_bb;
f6d98484 689 tree t, base_addr, shadow;
37d6f666 690 gimple g;
f6d98484
JJ
691 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
692 tree shadow_type = TREE_TYPE (shadow_ptr_type);
693 tree uintptr_type
694 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
dc29bf1e 695 tree base_ssa = base;
37d6f666 696
01452015
DS
697 /* Get an iterator on the point where we can add the condition
698 statement for the instrumentation. */
25ae5027
DS
699 gsi = create_cond_insert_point (iter, before_p,
700 /*then_more_likely_p=*/false,
701 &then_bb,
702 &else_bb);
37d6f666 703
f6d98484 704 base = unshare_expr (base);
37d6f666 705
dc29bf1e
DS
706 /* BASE can already be an SSA_NAME; in that case, do not create a
707 new SSA_NAME for it. */
708 if (TREE_CODE (base) != SSA_NAME)
709 {
710 g = gimple_build_assign_with_ops (TREE_CODE (base),
711 make_ssa_name (TREE_TYPE (base), NULL),
712 base, NULL_TREE);
713 gimple_set_location (g, location);
714 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
715 base_ssa = gimple_assign_lhs (g);
716 }
37d6f666 717
f6d98484
JJ
718 g = gimple_build_assign_with_ops (NOP_EXPR,
719 make_ssa_name (uintptr_type, NULL),
dc29bf1e 720 base_ssa, NULL_TREE);
37d6f666 721 gimple_set_location (g, location);
f6d98484
JJ
722 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
723 base_addr = gimple_assign_lhs (g);
37d6f666 724
f6d98484
JJ
725 /* Build
726 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
37d6f666 727
f6d98484
JJ
728 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
729 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
730 make_ssa_name (uintptr_type, NULL),
731 base_addr, t);
37d6f666 732 gimple_set_location (g, location);
f6d98484
JJ
733 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
734
735 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
736 g = gimple_build_assign_with_ops (PLUS_EXPR,
737 make_ssa_name (uintptr_type, NULL),
738 gimple_assign_lhs (g), t);
37d6f666 739 gimple_set_location (g, location);
f6d98484 740 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 741
f6d98484
JJ
742 g = gimple_build_assign_with_ops (NOP_EXPR,
743 make_ssa_name (shadow_ptr_type, NULL),
744 gimple_assign_lhs (g), NULL_TREE);
745 gimple_set_location (g, location);
746 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 747
f6d98484
JJ
748 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
749 build_int_cst (shadow_ptr_type, 0));
750 g = gimple_build_assign_with_ops (MEM_REF,
751 make_ssa_name (shadow_type, NULL),
752 t, NULL_TREE);
753 gimple_set_location (g, location);
754 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
755 shadow = gimple_assign_lhs (g);
756
757 if (size_in_bytes < 8)
758 {
759 /* Slow path for 1, 2 and 4 byte accesses.
760 Test (shadow != 0)
761 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
762 g = gimple_build_assign_with_ops (NE_EXPR,
763 make_ssa_name (boolean_type_node,
764 NULL),
765 shadow,
766 build_int_cst (shadow_type, 0));
767 gimple_set_location (g, location);
768 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
769 t = gimple_assign_lhs (g);
770
771 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
772 make_ssa_name (uintptr_type,
773 NULL),
774 base_addr,
775 build_int_cst (uintptr_type, 7));
776 gimple_set_location (g, location);
777 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
778
779 g = gimple_build_assign_with_ops (NOP_EXPR,
780 make_ssa_name (shadow_type,
781 NULL),
782 gimple_assign_lhs (g), NULL_TREE);
783 gimple_set_location (g, location);
784 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
785
786 if (size_in_bytes > 1)
787 {
788 g = gimple_build_assign_with_ops (PLUS_EXPR,
789 make_ssa_name (shadow_type,
790 NULL),
791 gimple_assign_lhs (g),
792 build_int_cst (shadow_type,
793 size_in_bytes - 1));
794 gimple_set_location (g, location);
795 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
796 }
797
798 g = gimple_build_assign_with_ops (GE_EXPR,
799 make_ssa_name (boolean_type_node,
800 NULL),
801 gimple_assign_lhs (g),
802 shadow);
803 gimple_set_location (g, location);
804 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
805
806 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
807 make_ssa_name (boolean_type_node,
808 NULL),
809 t, gimple_assign_lhs (g));
810 gimple_set_location (g, location);
811 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
812 t = gimple_assign_lhs (g);
813 }
814 else
815 t = shadow;
37d6f666 816
f6d98484
JJ
817 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
818 NULL_TREE, NULL_TREE);
819 gimple_set_location (g, location);
820 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 821
f6d98484 822 /* Generate call to the run-time library (e.g. __asan_report_load8). */
37d6f666 823 gsi = gsi_start_bb (then_bb);
f6d98484
JJ
824 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
825 1, base_addr);
826 gimple_set_location (g, location);
827 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 828
dfb9e332 829 *iter = gsi_start_bb (else_bb);
37d6f666
WM
830}
831
832/* If T represents a memory access, add instrumentation code before ITER.
833 LOCATION is source code location.
25ae5027 834 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
37d6f666
WM
835
836static void
837instrument_derefs (gimple_stmt_iterator *iter, tree t,
497a1c66 838 location_t location, bool is_store)
37d6f666
WM
839{
840 tree type, base;
f6d98484 841 HOST_WIDE_INT size_in_bytes;
37d6f666
WM
842
843 type = TREE_TYPE (t);
37d6f666
WM
844 switch (TREE_CODE (t))
845 {
846 case ARRAY_REF:
847 case COMPONENT_REF:
848 case INDIRECT_REF:
849 case MEM_REF:
850 break;
851 default:
852 return;
853 }
f6d98484
JJ
854
855 size_in_bytes = int_size_in_bytes (type);
856 if ((size_in_bytes & (size_in_bytes - 1)) != 0
857 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
858 return;
859
f6d98484
JJ
860 HOST_WIDE_INT bitsize, bitpos;
861 tree offset;
862 enum machine_mode mode;
863 int volatilep = 0, unsignedp = 0;
864 get_inner_reference (t, &bitsize, &bitpos, &offset,
865 &mode, &unsignedp, &volatilep, false);
25ae5027
DS
866 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
867 || bitsize != size_in_bytes * BITS_PER_UNIT)
1fe04fdc
JJ
868 {
869 if (TREE_CODE (t) == COMPONENT_REF
870 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
871 {
872 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
873 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
874 TREE_OPERAND (t, 0), repr,
875 NULL_TREE), location, is_store);
876 }
877 return;
878 }
f6d98484
JJ
879
880 base = build_fold_addr_expr (t);
25ae5027
DS
881 build_check_stmt (location, base, iter, /*before_p=*/true,
882 is_store, size_in_bytes);
883}
884
885/* Instrument an access to a contiguous memory region that starts at
886 the address pointed to by BASE, over a length of LEN (expressed in
887 the sizeof (*BASE) bytes). ITER points to the instruction before
888 which the instrumentation instructions must be inserted. LOCATION
889 is the source location that the instrumentation instructions must
890 have. If IS_STORE is true, then the memory access is a store;
891 otherwise, it's a load. */
892
893static void
894instrument_mem_region_access (tree base, tree len,
895 gimple_stmt_iterator *iter,
896 location_t location, bool is_store)
897{
c63d3b96
JJ
898 if (!POINTER_TYPE_P (TREE_TYPE (base))
899 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
900 || integer_zerop (len))
25ae5027
DS
901 return;
902
903 gimple_stmt_iterator gsi = *iter;
904
905 basic_block fallthrough_bb = NULL, then_bb = NULL;
906 if (!is_gimple_constant (len))
907 {
908 /* So, the length of the memory area to asan-protect is
909 non-constant. Let's guard the generated instrumentation code
910 like:
911
912 if (len != 0)
913 {
914 //asan instrumentation code goes here.
497a1c66 915 }
25ae5027
DS
916 // falltrough instructions, starting with *ITER. */
917
918 gimple g = gimple_build_cond (NE_EXPR,
919 len,
920 build_int_cst (TREE_TYPE (len), 0),
921 NULL_TREE, NULL_TREE);
922 gimple_set_location (g, location);
923 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
924 &then_bb, &fallthrough_bb);
925 /* Note that fallthrough_bb starts with the statement that was
926 pointed to by ITER. */
927
928 /* The 'then block' of the 'if (len != 0) condition is where
929 we'll generate the asan instrumentation code now. */
930 gsi = gsi_start_bb (then_bb);
931 }
932
933 /* Instrument the beginning of the memory region to be accessed,
934 and arrange for the rest of the intrumentation code to be
935 inserted in the then block *after* the current gsi. */
936 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
937
938 if (then_bb)
939 /* We are in the case where the length of the region is not
940 constant; so instrumentation code is being generated in the
941 'then block' of the 'if (len != 0) condition. Let's arrange
942 for the subsequent instrumentation statements to go in the
943 'then block'. */
944 gsi = gsi_last_bb (then_bb);
945 else
946 *iter = gsi;
947
948 /* We want to instrument the access at the end of the memory region,
949 which is at (base + len - 1). */
950
951 /* offset = len - 1; */
952 len = unshare_expr (len);
c63d3b96
JJ
953 tree offset;
954 gimple_seq seq = NULL;
955 if (TREE_CODE (len) == INTEGER_CST)
956 offset = fold_build2 (MINUS_EXPR, size_type_node,
957 fold_convert (size_type_node, len),
958 build_int_cst (size_type_node, 1));
959 else
960 {
961 gimple g;
962 tree t;
963
964 if (TREE_CODE (len) != SSA_NAME)
965 {
966 t = make_ssa_name (TREE_TYPE (len), NULL);
967 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
968 gimple_set_location (g, location);
969 gimple_seq_add_stmt_without_update (&seq, g);
970 len = t;
971 }
972 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
973 {
974 t = make_ssa_name (size_type_node, NULL);
975 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
976 gimple_set_location (g, location);
977 gimple_seq_add_stmt_without_update (&seq, g);
978 len = t;
979 }
980
981 t = make_ssa_name (size_type_node, NULL);
982 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
983 build_int_cst (size_type_node, 1));
984 gimple_set_location (g, location);
985 gimple_seq_add_stmt_without_update (&seq, g);
986 offset = gimple_assign_lhs (g);
987 }
25ae5027
DS
988
989 /* _1 = base; */
990 base = unshare_expr (base);
991 gimple region_end =
992 gimple_build_assign_with_ops (TREE_CODE (base),
993 make_ssa_name (TREE_TYPE (base), NULL),
994 base, NULL);
995 gimple_set_location (region_end, location);
c63d3b96
JJ
996 gimple_seq_add_stmt_without_update (&seq, region_end);
997 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
998 gsi_prev (&gsi);
25ae5027
DS
999
1000 /* _2 = _1 + offset; */
1001 region_end =
1002 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1003 make_ssa_name (TREE_TYPE (base), NULL),
497a1c66 1004 gimple_assign_lhs (region_end),
c63d3b96 1005 offset);
25ae5027
DS
1006 gimple_set_location (region_end, location);
1007 gsi_insert_after (&gsi, region_end, GSI_NEW_STMT);
1008
1009 /* instrument access at _2; */
1010 build_check_stmt (location, gimple_assign_lhs (region_end),
1011 &gsi, /*before_p=*/false, is_store, 1);
1012}
1013
1014/* Instrument the call (to the builtin strlen function) pointed to by
1015 ITER.
1016
1017 This function instruments the access to the first byte of the
1018 argument, right before the call. After the call it instruments the
1019 access to the last byte of the argument; it uses the result of the
1020 call to deduce the offset of that last byte.
1021
1022 Upon completion, iff the call has actullay been instrumented, this
1023 function returns TRUE and *ITER points to the statement logically
1024 following the built-in strlen function call *ITER was initially
1025 pointing to. Otherwise, the function returns FALSE and *ITER
1026 remains unchanged. */
1027
1028static bool
1029instrument_strlen_call (gimple_stmt_iterator *iter)
1030{
1031 gimple call = gsi_stmt (*iter);
1032 gcc_assert (is_gimple_call (call));
1033
1034 tree callee = gimple_call_fndecl (call);
1035 gcc_assert (is_builtin_fn (callee)
1036 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1037 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1038
1039 tree len = gimple_call_lhs (call);
1040 if (len == NULL)
1041 /* Some passes might clear the return value of the strlen call;
1042 bail out in that case. Return FALSE as we are not advancing
1043 *ITER. */
1044 return false;
1045 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1046
1047 location_t loc = gimple_location (call);
1048 tree str_arg = gimple_call_arg (call, 0);
1049
1050 /* Instrument the access to the first byte of str_arg. i.e:
1051
1052 _1 = str_arg; instrument (_1); */
1053 gimple str_arg_ssa =
1054 gimple_build_assign_with_ops (NOP_EXPR,
1055 make_ssa_name (build_pointer_type
1056 (char_type_node), NULL),
1057 str_arg, NULL);
1058 gimple_set_location (str_arg_ssa, loc);
1059 gimple_stmt_iterator gsi = *iter;
1060 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1061 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1062 /*before_p=*/false, /*is_store=*/false, 1);
1063
1064 /* If we initially had an instruction like:
1065
1066 int n = strlen (str)
1067
1068 we now want to instrument the access to str[n], after the
1069 instruction above.*/
1070
1071 /* So let's build the access to str[n] that is, access through the
1072 pointer_plus expr: (_1 + len). */
1073 gimple stmt =
1074 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1075 make_ssa_name (TREE_TYPE (str_arg),
1076 NULL),
1077 gimple_assign_lhs (str_arg_ssa),
1078 len);
1079 gimple_set_location (stmt, loc);
1080 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1081
1082 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1083 /*before_p=*/false, /*is_store=*/false, 1);
1084
1085 /* Ensure that iter points to the statement logically following the
1086 one it was initially pointing to. */
1087 *iter = gsi;
1088 /* As *ITER has been advanced to point to the next statement, let's
1089 return true to inform transform_statements that it shouldn't
1090 advance *ITER anymore; otherwises it will skip that next
1091 statement, which wouldn't be instrumented. */
1092 return true;
1093}
1094
1095/* Instrument the call to a built-in memory access function that is
1096 pointed to by the iterator ITER.
1097
1098 Upon completion, return TRUE iff *ITER has been advanced to the
1099 statement following the one it was originally pointing to. */
1100
1101static bool
1102instrument_builtin_call (gimple_stmt_iterator *iter)
1103{
1104 gimple call = gsi_stmt (*iter);
1105
2b2571c9 1106 gcc_checking_assert (is_gimple_builtin_call (call));
25ae5027
DS
1107
1108 tree callee = gimple_call_fndecl (call);
1109 location_t loc = gimple_location (call);
1110 tree source0 = NULL_TREE, source1 = NULL_TREE,
1111 dest = NULL_TREE, len = NULL_TREE;
1112 bool is_store = true;
1113
1114 switch (DECL_FUNCTION_CODE (callee))
1115 {
1116 /* (s, s, n) style memops. */
1117 case BUILT_IN_BCMP:
1118 case BUILT_IN_MEMCMP:
25ae5027
DS
1119 source0 = gimple_call_arg (call, 0);
1120 source1 = gimple_call_arg (call, 1);
42b04d70 1121 len = gimple_call_arg (call, 2);
25ae5027
DS
1122 break;
1123
1124 /* (src, dest, n) style memops. */
1125 case BUILT_IN_BCOPY:
25ae5027 1126 source0 = gimple_call_arg (call, 0);
42b04d70
JJ
1127 dest = gimple_call_arg (call, 1);
1128 len = gimple_call_arg (call, 2);
25ae5027
DS
1129 break;
1130
1131 /* (dest, src, n) style memops. */
1132 case BUILT_IN_MEMCPY:
1133 case BUILT_IN_MEMCPY_CHK:
1134 case BUILT_IN_MEMMOVE:
1135 case BUILT_IN_MEMMOVE_CHK:
1136 case BUILT_IN_MEMPCPY:
1137 case BUILT_IN_MEMPCPY_CHK:
1138 dest = gimple_call_arg (call, 0);
1139 source0 = gimple_call_arg (call, 1);
1140 len = gimple_call_arg (call, 2);
1141 break;
1142
1143 /* (dest, n) style memops. */
1144 case BUILT_IN_BZERO:
1145 dest = gimple_call_arg (call, 0);
1146 len = gimple_call_arg (call, 1);
1147 break;
1148
1149 /* (dest, x, n) style memops*/
1150 case BUILT_IN_MEMSET:
1151 case BUILT_IN_MEMSET_CHK:
1152 dest = gimple_call_arg (call, 0);
1153 len = gimple_call_arg (call, 2);
1154 break;
1155
1156 case BUILT_IN_STRLEN:
1157 return instrument_strlen_call (iter);
1158
1159 /* And now the __atomic* and __sync builtins.
1160 These are handled differently from the classical memory memory
1161 access builtins above. */
1162
25ae5027
DS
1163 case BUILT_IN_ATOMIC_LOAD_1:
1164 case BUILT_IN_ATOMIC_LOAD_2:
1165 case BUILT_IN_ATOMIC_LOAD_4:
1166 case BUILT_IN_ATOMIC_LOAD_8:
1167 case BUILT_IN_ATOMIC_LOAD_16:
1168 is_store = false;
1169 /* fall through. */
1170
1171 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1172 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1173 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1174 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1175 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1176
1177 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1178 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1179 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1180 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1181 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1182
1183 case BUILT_IN_SYNC_FETCH_AND_OR_1:
1184 case BUILT_IN_SYNC_FETCH_AND_OR_2:
1185 case BUILT_IN_SYNC_FETCH_AND_OR_4:
1186 case BUILT_IN_SYNC_FETCH_AND_OR_8:
1187 case BUILT_IN_SYNC_FETCH_AND_OR_16:
1188
1189 case BUILT_IN_SYNC_FETCH_AND_AND_1:
1190 case BUILT_IN_SYNC_FETCH_AND_AND_2:
1191 case BUILT_IN_SYNC_FETCH_AND_AND_4:
1192 case BUILT_IN_SYNC_FETCH_AND_AND_8:
1193 case BUILT_IN_SYNC_FETCH_AND_AND_16:
1194
1195 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1196 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1197 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1198 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1199 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1200
1201 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1202 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1203 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1204 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1205
1206 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1207 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1208 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1209 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1210 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1211
1212 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1213 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1214 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1215 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1216 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1217
1218 case BUILT_IN_SYNC_OR_AND_FETCH_1:
1219 case BUILT_IN_SYNC_OR_AND_FETCH_2:
1220 case BUILT_IN_SYNC_OR_AND_FETCH_4:
1221 case BUILT_IN_SYNC_OR_AND_FETCH_8:
1222 case BUILT_IN_SYNC_OR_AND_FETCH_16:
1223
1224 case BUILT_IN_SYNC_AND_AND_FETCH_1:
1225 case BUILT_IN_SYNC_AND_AND_FETCH_2:
1226 case BUILT_IN_SYNC_AND_AND_FETCH_4:
1227 case BUILT_IN_SYNC_AND_AND_FETCH_8:
1228 case BUILT_IN_SYNC_AND_AND_FETCH_16:
1229
1230 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1231 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1232 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1233 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1234 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1235
1236 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1237 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1238 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1239 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1240
1241 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1242 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1243 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1244 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1245 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1246
1247 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1248 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1249 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1250 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1251 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1252
1253 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1254 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1255 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1256 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1257 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1258
1259 case BUILT_IN_SYNC_LOCK_RELEASE_1:
1260 case BUILT_IN_SYNC_LOCK_RELEASE_2:
1261 case BUILT_IN_SYNC_LOCK_RELEASE_4:
1262 case BUILT_IN_SYNC_LOCK_RELEASE_8:
1263 case BUILT_IN_SYNC_LOCK_RELEASE_16:
1264
25ae5027
DS
1265 case BUILT_IN_ATOMIC_EXCHANGE_1:
1266 case BUILT_IN_ATOMIC_EXCHANGE_2:
1267 case BUILT_IN_ATOMIC_EXCHANGE_4:
1268 case BUILT_IN_ATOMIC_EXCHANGE_8:
1269 case BUILT_IN_ATOMIC_EXCHANGE_16:
1270
25ae5027
DS
1271 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1272 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1273 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1274 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1275 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1276
25ae5027
DS
1277 case BUILT_IN_ATOMIC_STORE_1:
1278 case BUILT_IN_ATOMIC_STORE_2:
1279 case BUILT_IN_ATOMIC_STORE_4:
1280 case BUILT_IN_ATOMIC_STORE_8:
1281 case BUILT_IN_ATOMIC_STORE_16:
1282
1283 case BUILT_IN_ATOMIC_ADD_FETCH_1:
1284 case BUILT_IN_ATOMIC_ADD_FETCH_2:
1285 case BUILT_IN_ATOMIC_ADD_FETCH_4:
1286 case BUILT_IN_ATOMIC_ADD_FETCH_8:
1287 case BUILT_IN_ATOMIC_ADD_FETCH_16:
1288
1289 case BUILT_IN_ATOMIC_SUB_FETCH_1:
1290 case BUILT_IN_ATOMIC_SUB_FETCH_2:
1291 case BUILT_IN_ATOMIC_SUB_FETCH_4:
1292 case BUILT_IN_ATOMIC_SUB_FETCH_8:
1293 case BUILT_IN_ATOMIC_SUB_FETCH_16:
1294
1295 case BUILT_IN_ATOMIC_AND_FETCH_1:
1296 case BUILT_IN_ATOMIC_AND_FETCH_2:
1297 case BUILT_IN_ATOMIC_AND_FETCH_4:
1298 case BUILT_IN_ATOMIC_AND_FETCH_8:
1299 case BUILT_IN_ATOMIC_AND_FETCH_16:
1300
1301 case BUILT_IN_ATOMIC_NAND_FETCH_1:
1302 case BUILT_IN_ATOMIC_NAND_FETCH_2:
1303 case BUILT_IN_ATOMIC_NAND_FETCH_4:
1304 case BUILT_IN_ATOMIC_NAND_FETCH_8:
1305 case BUILT_IN_ATOMIC_NAND_FETCH_16:
1306
1307 case BUILT_IN_ATOMIC_XOR_FETCH_1:
1308 case BUILT_IN_ATOMIC_XOR_FETCH_2:
1309 case BUILT_IN_ATOMIC_XOR_FETCH_4:
1310 case BUILT_IN_ATOMIC_XOR_FETCH_8:
1311 case BUILT_IN_ATOMIC_XOR_FETCH_16:
1312
1313 case BUILT_IN_ATOMIC_OR_FETCH_1:
1314 case BUILT_IN_ATOMIC_OR_FETCH_2:
1315 case BUILT_IN_ATOMIC_OR_FETCH_4:
1316 case BUILT_IN_ATOMIC_OR_FETCH_8:
1317 case BUILT_IN_ATOMIC_OR_FETCH_16:
1318
1319 case BUILT_IN_ATOMIC_FETCH_ADD_1:
1320 case BUILT_IN_ATOMIC_FETCH_ADD_2:
1321 case BUILT_IN_ATOMIC_FETCH_ADD_4:
1322 case BUILT_IN_ATOMIC_FETCH_ADD_8:
1323 case BUILT_IN_ATOMIC_FETCH_ADD_16:
1324
1325 case BUILT_IN_ATOMIC_FETCH_SUB_1:
1326 case BUILT_IN_ATOMIC_FETCH_SUB_2:
1327 case BUILT_IN_ATOMIC_FETCH_SUB_4:
1328 case BUILT_IN_ATOMIC_FETCH_SUB_8:
1329 case BUILT_IN_ATOMIC_FETCH_SUB_16:
1330
1331 case BUILT_IN_ATOMIC_FETCH_AND_1:
1332 case BUILT_IN_ATOMIC_FETCH_AND_2:
1333 case BUILT_IN_ATOMIC_FETCH_AND_4:
1334 case BUILT_IN_ATOMIC_FETCH_AND_8:
1335 case BUILT_IN_ATOMIC_FETCH_AND_16:
1336
1337 case BUILT_IN_ATOMIC_FETCH_NAND_1:
1338 case BUILT_IN_ATOMIC_FETCH_NAND_2:
1339 case BUILT_IN_ATOMIC_FETCH_NAND_4:
1340 case BUILT_IN_ATOMIC_FETCH_NAND_8:
1341 case BUILT_IN_ATOMIC_FETCH_NAND_16:
1342
1343 case BUILT_IN_ATOMIC_FETCH_XOR_1:
1344 case BUILT_IN_ATOMIC_FETCH_XOR_2:
1345 case BUILT_IN_ATOMIC_FETCH_XOR_4:
1346 case BUILT_IN_ATOMIC_FETCH_XOR_8:
1347 case BUILT_IN_ATOMIC_FETCH_XOR_16:
1348
1349 case BUILT_IN_ATOMIC_FETCH_OR_1:
1350 case BUILT_IN_ATOMIC_FETCH_OR_2:
1351 case BUILT_IN_ATOMIC_FETCH_OR_4:
1352 case BUILT_IN_ATOMIC_FETCH_OR_8:
1353 case BUILT_IN_ATOMIC_FETCH_OR_16:
1354 {
1355 dest = gimple_call_arg (call, 0);
1356 /* So DEST represents the address of a memory location.
1357 instrument_derefs wants the memory location, so lets
1358 dereference the address DEST before handing it to
1359 instrument_derefs. */
1360 if (TREE_CODE (dest) == ADDR_EXPR)
1361 dest = TREE_OPERAND (dest, 0);
1362 else if (TREE_CODE (dest) == SSA_NAME)
1363 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
1364 dest, build_int_cst (TREE_TYPE (dest), 0));
1365 else
1366 gcc_unreachable ();
1367
1368 instrument_derefs (iter, dest, loc, is_store);
1369 return false;
1370 }
1371
1372 default:
1373 /* The other builtins memory access are not instrumented in this
1374 function because they either don't have any length parameter,
1375 or their length parameter is just a limit. */
1376 break;
1377 }
1378
1379 if (len != NULL_TREE)
1380 {
1381 if (source0 != NULL_TREE)
1382 instrument_mem_region_access (source0, len, iter,
1383 loc, /*is_store=*/false);
1384 if (source1 != NULL_TREE)
1385 instrument_mem_region_access (source1, len, iter,
1386 loc, /*is_store=*/false);
1387 else if (dest != NULL_TREE)
1388 instrument_mem_region_access (dest, len, iter,
1389 loc, /*is_store=*/true);
1390
1391 *iter = gsi_for_stmt (call);
1392 return false;
1393 }
1394 return false;
1395}
1396
1397/* Instrument the assignment statement ITER if it is subject to
1398 instrumentation. */
1399
1400static void
1401instrument_assignment (gimple_stmt_iterator *iter)
1402{
1403 gimple s = gsi_stmt (*iter);
1404
1405 gcc_assert (gimple_assign_single_p (s));
1406
52f2e7e1
JJ
1407 if (gimple_store_p (s))
1408 instrument_derefs (iter, gimple_assign_lhs (s),
1409 gimple_location (s), true);
1410 if (gimple_assign_load_p (s))
1411 instrument_derefs (iter, gimple_assign_rhs1 (s),
1412 gimple_location (s), false);
25ae5027
DS
1413}
1414
1415/* Instrument the function call pointed to by the iterator ITER, if it
1416 is subject to instrumentation. At the moment, the only function
1417 calls that are instrumented are some built-in functions that access
1418 memory. Look at instrument_builtin_call to learn more.
1419
1420 Upon completion return TRUE iff *ITER was advanced to the statement
1421 following the one it was originally pointing to. */
1422
1423static bool
1424maybe_instrument_call (gimple_stmt_iterator *iter)
1425{
2b2571c9
JJ
1426 gimple stmt = gsi_stmt (*iter);
1427 bool is_builtin = is_gimple_builtin_call (stmt);
1428 if (is_builtin
1429 && instrument_builtin_call (iter))
1430 return true;
1431 if (gimple_call_noreturn_p (stmt))
1432 {
1433 if (is_builtin)
1434 {
1435 tree callee = gimple_call_fndecl (stmt);
1436 switch (DECL_FUNCTION_CODE (callee))
1437 {
1438 case BUILT_IN_UNREACHABLE:
1439 case BUILT_IN_TRAP:
1440 /* Don't instrument these. */
1441 return false;
1442 }
1443 }
1444 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1445 gimple g = gimple_build_call (decl, 0);
1446 gimple_set_location (g, gimple_location (stmt));
1447 gsi_insert_before (iter, g, GSI_SAME_STMT);
1448 }
25ae5027 1449 return false;
37d6f666
WM
1450}
1451
1452/* asan: this looks too complex. Can this be done simpler? */
1453/* Transform
1454 1) Memory references.
1455 2) BUILTIN_ALLOCA calls.
1456*/
1457
1458static void
1459transform_statements (void)
1460{
1461 basic_block bb;
1462 gimple_stmt_iterator i;
1463 int saved_last_basic_block = last_basic_block;
37d6f666
WM
1464
1465 FOR_EACH_BB (bb)
1466 {
1467 if (bb->index >= saved_last_basic_block) continue;
25ae5027 1468 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
497a1c66 1469 {
25ae5027
DS
1470 gimple s = gsi_stmt (i);
1471
1472 if (gimple_assign_single_p (s))
1473 instrument_assignment (&i);
1474 else if (is_gimple_call (s))
1475 {
1476 if (maybe_instrument_call (&i))
1477 /* Avoid gsi_next (&i), because maybe_instrument_call
1478 advanced the I iterator already. */
1479 continue;
1480 }
1481 gsi_next (&i);
497a1c66 1482 }
37d6f666
WM
1483 }
1484}
1485
8240018b
JJ
1486/* Build
1487 struct __asan_global
1488 {
1489 const void *__beg;
1490 uptr __size;
1491 uptr __size_with_redzone;
1492 const void *__name;
1493 uptr __has_dynamic_init;
1494 } type. */
1495
1496static tree
1497asan_global_struct (void)
1498{
1499 static const char *field_names[5]
1500 = { "__beg", "__size", "__size_with_redzone",
1501 "__name", "__has_dynamic_init" };
1502 tree fields[5], ret;
1503 int i;
1504
1505 ret = make_node (RECORD_TYPE);
1506 for (i = 0; i < 5; i++)
1507 {
1508 fields[i]
1509 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1510 get_identifier (field_names[i]),
1511 (i == 0 || i == 3) ? const_ptr_type_node
1512 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1513 DECL_CONTEXT (fields[i]) = ret;
1514 if (i)
1515 DECL_CHAIN (fields[i - 1]) = fields[i];
1516 }
1517 TYPE_FIELDS (ret) = fields[0];
1518 TYPE_NAME (ret) = get_identifier ("__asan_global");
1519 layout_type (ret);
1520 return ret;
1521}
1522
1523/* Append description of a single global DECL into vector V.
1524 TYPE is __asan_global struct type as returned by asan_global_struct. */
1525
1526static void
9771b263 1527asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
8240018b
JJ
1528{
1529 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1530 unsigned HOST_WIDE_INT size;
1531 tree str_cst, refdecl = decl;
9771b263 1532 vec<constructor_elt, va_gc> *vinner = NULL;
8240018b
JJ
1533
1534 if (!asan_pp_initialized)
1535 asan_pp_initialize ();
1536
1537 pp_clear_output_area (&asan_pp);
1538 if (DECL_NAME (decl))
1539 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
1540 else
1541 pp_string (&asan_pp, "<unknown>");
1542 pp_space (&asan_pp);
1543 pp_left_paren (&asan_pp);
1544 pp_string (&asan_pp, main_input_filename);
1545 pp_right_paren (&asan_pp);
1546 str_cst = asan_pp_string ();
1547
1548 if (asan_needs_local_alias (decl))
1549 {
1550 char buf[20];
9771b263 1551 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
8240018b
JJ
1552 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1553 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1554 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
1555 TREE_READONLY (refdecl) = TREE_READONLY (decl);
1556 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
1557 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
1558 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
1559 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
1560 TREE_STATIC (refdecl) = 1;
1561 TREE_PUBLIC (refdecl) = 0;
1562 TREE_USED (refdecl) = 1;
1563 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
1564 }
1565
1566 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1567 fold_convert (const_ptr_type_node,
1568 build_fold_addr_expr (refdecl)));
1569 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
1570 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1571 size += asan_red_zone_size (size);
1572 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1573 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1574 fold_convert (const_ptr_type_node, str_cst));
1575 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
1576 init = build_constructor (type, vinner);
1577 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
1578}
1579
0e668eaf
JJ
1580/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
1581void
1582initialize_sanitizer_builtins (void)
1583{
1584 tree decl;
1585
1586 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
1587 return;
1588
1589 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
1590 tree BT_FN_VOID_PTR
1591 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1592 tree BT_FN_VOID_PTR_PTRMODE
1593 = build_function_type_list (void_type_node, ptr_type_node,
1594 build_nonstandard_integer_type (POINTER_SIZE,
1595 1), NULL_TREE);
c954bddd
JJ
1596 tree BT_FN_VOID_INT
1597 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
1598 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
1599 tree BT_FN_IX_CONST_VPTR_INT[5];
1600 tree BT_FN_IX_VPTR_IX_INT[5];
1601 tree BT_FN_VOID_VPTR_IX_INT[5];
1602 tree vptr
1603 = build_pointer_type (build_qualified_type (void_type_node,
1604 TYPE_QUAL_VOLATILE));
1605 tree cvptr
1606 = build_pointer_type (build_qualified_type (void_type_node,
1607 TYPE_QUAL_VOLATILE
1608 |TYPE_QUAL_CONST));
1609 tree boolt
1610 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
1611 int i;
1612 for (i = 0; i < 5; i++)
1613 {
1614 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
1615 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
1616 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
1617 integer_type_node, integer_type_node,
1618 NULL_TREE);
1619 BT_FN_IX_CONST_VPTR_INT[i]
1620 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
1621 BT_FN_IX_VPTR_IX_INT[i]
1622 = build_function_type_list (ix, vptr, ix, integer_type_node,
1623 NULL_TREE);
1624 BT_FN_VOID_VPTR_IX_INT[i]
1625 = build_function_type_list (void_type_node, vptr, ix,
1626 integer_type_node, NULL_TREE);
1627 }
1628#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
1629#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
1630#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
1631#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
1632#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
1633#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
1634#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
1635#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
1636#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
1637#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
1638#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
1639#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
1640#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
1641#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
1642#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
1643#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
1644#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
1645#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
1646#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
1647#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
0e668eaf
JJ
1648#undef ATTR_NOTHROW_LEAF_LIST
1649#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
bc77608b
JJ
1650#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
1651#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
1652#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
1653#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
bc77608b
JJ
1654#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
1655#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
1656 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
0e668eaf
JJ
1657#undef DEF_SANITIZER_BUILTIN
1658#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
1659 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
1660 BUILT_IN_NORMAL, NAME, NULL_TREE); \
1661 set_call_expr_flags (decl, ATTRS); \
1662 set_builtin_decl (ENUM, decl, true);
1663
1664#include "sanitizer.def"
1665
1666#undef DEF_SANITIZER_BUILTIN
1667}
1668
94fce891
JJ
1669/* Called via htab_traverse. Count number of emitted
1670 STRING_CSTs in the constant hash table. */
1671
1672static int
1673count_string_csts (void **slot, void *data)
1674{
1675 struct constant_descriptor_tree *desc
1676 = (struct constant_descriptor_tree *) *slot;
1677 if (TREE_CODE (desc->value) == STRING_CST
1678 && TREE_ASM_WRITTEN (desc->value)
1679 && asan_protect_global (desc->value))
1680 ++*((unsigned HOST_WIDE_INT *) data);
1681 return 1;
1682}
1683
1684/* Helper structure to pass two parameters to
1685 add_string_csts. */
1686
1687struct asan_add_string_csts_data
1688{
1689 tree type;
1690 vec<constructor_elt, va_gc> *v;
1691};
1692
1693/* Called via htab_traverse. Call asan_add_global
1694 on emitted STRING_CSTs from the constant hash table. */
1695
1696static int
1697add_string_csts (void **slot, void *data)
1698{
1699 struct constant_descriptor_tree *desc
1700 = (struct constant_descriptor_tree *) *slot;
1701 if (TREE_CODE (desc->value) == STRING_CST
1702 && TREE_ASM_WRITTEN (desc->value)
1703 && asan_protect_global (desc->value))
1704 {
1705 struct asan_add_string_csts_data *aascd
1706 = (struct asan_add_string_csts_data *) data;
1707 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
1708 aascd->type, aascd->v);
1709 }
1710 return 1;
1711}
1712
8240018b
JJ
1713/* Needs to be GTY(()), because cgraph_build_static_cdtor may
1714 invoke ggc_collect. */
1715static GTY(()) tree asan_ctor_statements;
1716
37d6f666
WM
1717/* Module-level instrumentation.
1718 - Insert __asan_init() into the list of CTORs.
1719 - TODO: insert redzones around globals.
1720 */
1721
1722void
1723asan_finish_file (void)
1724{
8240018b
JJ
1725 struct varpool_node *vnode;
1726 unsigned HOST_WIDE_INT gcount = 0;
1727
94fce891
JJ
1728 if (shadow_ptr_types[0] == NULL_TREE)
1729 asan_init_shadow_ptr_types ();
1730 /* Avoid instrumenting code in the asan ctors/dtors.
1731 We don't need to insert padding after the description strings,
1732 nor after .LASAN* array. */
1733 flag_asan = 0;
0e668eaf
JJ
1734
1735 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
1736 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
8240018b 1737 FOR_EACH_DEFINED_VARIABLE (vnode)
656e6f37
JJ
1738 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
1739 && asan_protect_global (vnode->symbol.decl))
8240018b 1740 ++gcount;
94fce891
JJ
1741 htab_t const_desc_htab = constant_pool_htab ();
1742 htab_traverse (const_desc_htab, count_string_csts, &gcount);
8240018b
JJ
1743 if (gcount)
1744 {
0e668eaf 1745 tree type = asan_global_struct (), var, ctor;
8240018b
JJ
1746 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
1747 tree dtor_statements = NULL_TREE;
9771b263 1748 vec<constructor_elt, va_gc> *v;
8240018b
JJ
1749 char buf[20];
1750
1751 type = build_array_type_nelts (type, gcount);
1752 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
1753 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
1754 type);
1755 TREE_STATIC (var) = 1;
1756 TREE_PUBLIC (var) = 0;
1757 DECL_ARTIFICIAL (var) = 1;
1758 DECL_IGNORED_P (var) = 1;
9771b263 1759 vec_alloc (v, gcount);
8240018b 1760 FOR_EACH_DEFINED_VARIABLE (vnode)
656e6f37
JJ
1761 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
1762 && asan_protect_global (vnode->symbol.decl))
8240018b 1763 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
94fce891
JJ
1764 struct asan_add_string_csts_data aascd;
1765 aascd.type = TREE_TYPE (type);
1766 aascd.v = v;
1767 htab_traverse (const_desc_htab, add_string_csts, &aascd);
8240018b
JJ
1768 ctor = build_constructor (type, v);
1769 TREE_CONSTANT (ctor) = 1;
1770 TREE_STATIC (ctor) = 1;
1771 DECL_INITIAL (var) = ctor;
1772 varpool_assemble_decl (varpool_node_for_decl (var));
1773
0e668eaf
JJ
1774 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
1775 append_to_statement_list (build_call_expr (fn, 2,
8240018b
JJ
1776 build_fold_addr_expr (var),
1777 build_int_cst (uptr, gcount)),
1778 &asan_ctor_statements);
1779
0e668eaf
JJ
1780 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
1781 append_to_statement_list (build_call_expr (fn, 2,
8240018b
JJ
1782 build_fold_addr_expr (var),
1783 build_int_cst (uptr, gcount)),
1784 &dtor_statements);
1785 cgraph_build_static_cdtor ('D', dtor_statements,
1786 MAX_RESERVED_INIT_PRIORITY - 1);
1787 }
1788 cgraph_build_static_cdtor ('I', asan_ctor_statements,
1789 MAX_RESERVED_INIT_PRIORITY - 1);
94fce891 1790 flag_asan = 1;
f6d98484
JJ
1791}
1792
37d6f666
WM
1793/* Instrument the current function. */
1794
1795static unsigned int
1796asan_instrument (void)
1797{
f6d98484 1798 if (shadow_ptr_types[0] == NULL_TREE)
94fce891 1799 asan_init_shadow_ptr_types ();
37d6f666 1800 transform_statements ();
37d6f666
WM
1801 return 0;
1802}
1803
1804static bool
1805gate_asan (void)
1806{
77bc5132
JJ
1807 return flag_asan != 0
1808 && !lookup_attribute ("no_address_safety_analysis",
1809 DECL_ATTRIBUTES (current_function_decl));
37d6f666
WM
1810}
1811
1812struct gimple_opt_pass pass_asan =
1813{
1814 {
1815 GIMPLE_PASS,
497a1c66
JJ
1816 "asan", /* name */
1817 OPTGROUP_NONE, /* optinfo_flags */
1818 gate_asan, /* gate */
1819 asan_instrument, /* execute */
1820 NULL, /* sub */
1821 NULL, /* next */
1822 0, /* static_pass_number */
1823 TV_NONE, /* tv_id */
37d6f666 1824 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
497a1c66
JJ
1825 0, /* properties_provided */
1826 0, /* properties_destroyed */
1827 0, /* todo_flags_start */
37d6f666 1828 TODO_verify_flow | TODO_verify_stmts
f6d98484 1829 | TODO_update_ssa /* todo_flags_finish */
37d6f666
WM
1830 }
1831};
f6d98484 1832
dfb9e332
JJ
1833static bool
1834gate_asan_O0 (void)
1835{
77bc5132 1836 return !optimize && gate_asan ();
dfb9e332
JJ
1837}
1838
1839struct gimple_opt_pass pass_asan_O0 =
1840{
1841 {
1842 GIMPLE_PASS,
1843 "asan0", /* name */
497a1c66 1844 OPTGROUP_NONE, /* optinfo_flags */
dfb9e332
JJ
1845 gate_asan_O0, /* gate */
1846 asan_instrument, /* execute */
1847 NULL, /* sub */
1848 NULL, /* next */
1849 0, /* static_pass_number */
1850 TV_NONE, /* tv_id */
1851 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1852 0, /* properties_provided */
1853 0, /* properties_destroyed */
1854 0, /* todo_flags_start */
1855 TODO_verify_flow | TODO_verify_stmts
1856 | TODO_update_ssa /* todo_flags_finish */
1857 }
1858};
1859
f6d98484 1860#include "gt-asan.h"
This page took 0.264113 seconds and 5 git commands to generate.