]> gcc.gnu.org Git - gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
tree-sra.c (modify_function): Record caller nodes after rebuild.
[gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
35#include "optabs.h"
2fb9a547
AM
36#include "pointer-set.h"
37#include "hash-table.h"
38#include "vec.h"
39#include "ggc.h"
40#include "basic-block.h"
41#include "tree-ssa-alias.h"
42#include "internal-fn.h"
43#include "gimple-fold.h"
44#include "tree-eh.h"
45#include "gimple-expr.h"
46#include "is-a.h"
0ac198d3 47#include "gimple.h"
5be5c238 48#include "gimple-iterator.h"
43e9d192 49
342be7f7 50enum aarch64_simd_builtin_type_mode
43e9d192 51{
342be7f7
JG
52 T_V8QI,
53 T_V4HI,
54 T_V2SI,
55 T_V2SF,
56 T_DI,
57 T_DF,
58 T_V16QI,
59 T_V8HI,
60 T_V4SI,
61 T_V4SF,
62 T_V2DI,
63 T_V2DF,
64 T_TI,
65 T_EI,
66 T_OI,
67 T_XI,
68 T_SI,
0050faf8 69 T_SF,
342be7f7
JG
70 T_HI,
71 T_QI,
72 T_MAX
43e9d192
IB
73};
74
75#define v8qi_UP T_V8QI
76#define v4hi_UP T_V4HI
77#define v2si_UP T_V2SI
78#define v2sf_UP T_V2SF
79#define di_UP T_DI
80#define df_UP T_DF
81#define v16qi_UP T_V16QI
82#define v8hi_UP T_V8HI
83#define v4si_UP T_V4SI
84#define v4sf_UP T_V4SF
85#define v2di_UP T_V2DI
86#define v2df_UP T_V2DF
87#define ti_UP T_TI
88#define ei_UP T_EI
89#define oi_UP T_OI
90#define xi_UP T_XI
91#define si_UP T_SI
0050faf8 92#define sf_UP T_SF
43e9d192
IB
93#define hi_UP T_HI
94#define qi_UP T_QI
95
96#define UP(X) X##_UP
97
b5828b4b
JG
98#define SIMD_MAX_BUILTIN_ARGS 5
99
100enum aarch64_type_qualifiers
43e9d192 101{
b5828b4b
JG
102 /* T foo. */
103 qualifier_none = 0x0,
104 /* unsigned T foo. */
105 qualifier_unsigned = 0x1, /* 1 << 0 */
106 /* const T foo. */
107 qualifier_const = 0x2, /* 1 << 1 */
108 /* T *foo. */
109 qualifier_pointer = 0x4, /* 1 << 2 */
110 /* const T *foo. */
111 qualifier_const_pointer = 0x6, /* qualifier_const | qualifier_pointer */
112 /* Used when expanding arguments if an operand could
113 be an immediate. */
114 qualifier_immediate = 0x8, /* 1 << 3 */
115 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
116 /* void foo (...). */
117 qualifier_void = 0x20, /* 1 << 5 */
118 /* Some patterns may have internal operands, this qualifier is an
119 instruction to the initialisation code to skip this operand. */
120 qualifier_internal = 0x40, /* 1 << 6 */
121 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
122 rather than using the type of the operand. */
123 qualifier_map_mode = 0x80, /* 1 << 7 */
124 /* qualifier_pointer | qualifier_map_mode */
125 qualifier_pointer_map_mode = 0x84,
126 /* qualifier_const_pointer | qualifier_map_mode */
6db1ec94
JG
127 qualifier_const_pointer_map_mode = 0x86,
128 /* Polynomial types. */
129 qualifier_poly = 0x100
b5828b4b 130};
43e9d192
IB
131
132typedef struct
133{
134 const char *name;
342be7f7
JG
135 enum aarch64_simd_builtin_type_mode mode;
136 const enum insn_code code;
137 unsigned int fcode;
b5828b4b 138 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
139} aarch64_simd_builtin_datum;
140
b5828b4b
JG
141static enum aarch64_type_qualifiers
142aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
143 = { qualifier_none, qualifier_none };
144#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
145static enum aarch64_type_qualifiers
146aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_unsigned, qualifier_unsigned };
148#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 149#define TYPES_CREATE (aarch64_types_unop_qualifiers)
bcd48995
AV
150#define TYPES_REINTERP_SS (aarch64_types_unop_qualifiers)
151static enum aarch64_type_qualifiers
152aarch64_types_unop_su_qualifiers[SIMD_MAX_BUILTIN_ARGS]
153 = { qualifier_none, qualifier_unsigned };
154#define TYPES_REINTERP_SU (aarch64_types_unop_su_qualifiers)
155static enum aarch64_type_qualifiers
156aarch64_types_unop_sp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
157 = { qualifier_none, qualifier_poly };
158#define TYPES_REINTERP_SP (aarch64_types_unop_sp_qualifiers)
159static enum aarch64_type_qualifiers
160aarch64_types_unop_us_qualifiers[SIMD_MAX_BUILTIN_ARGS]
161 = { qualifier_unsigned, qualifier_none };
162#define TYPES_REINTERP_US (aarch64_types_unop_us_qualifiers)
163static enum aarch64_type_qualifiers
164aarch64_types_unop_ps_qualifiers[SIMD_MAX_BUILTIN_ARGS]
165 = { qualifier_poly, qualifier_none };
166#define TYPES_REINTERP_PS (aarch64_types_unop_ps_qualifiers)
b5828b4b
JG
167static enum aarch64_type_qualifiers
168aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
169 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
170#define TYPES_BINOP (aarch64_types_binop_qualifiers)
171static enum aarch64_type_qualifiers
ae0533da
AL
172aarch64_types_binopv_qualifiers[SIMD_MAX_BUILTIN_ARGS]
173 = { qualifier_void, qualifier_none, qualifier_none };
174#define TYPES_BINOPV (aarch64_types_binopv_qualifiers)
175static enum aarch64_type_qualifiers
5a7a4e80
TB
176aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
177 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
178#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d
TB
179static enum aarch64_type_qualifiers
180aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
181 = { qualifier_poly, qualifier_poly, qualifier_poly };
182#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
183
5a7a4e80 184static enum aarch64_type_qualifiers
b5828b4b
JG
185aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
186 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
187#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682
TB
188static enum aarch64_type_qualifiers
189aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
190 = { qualifier_unsigned, qualifier_unsigned,
191 qualifier_unsigned, qualifier_unsigned };
192#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
193
b5828b4b
JG
194static enum aarch64_type_qualifiers
195aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
196 = { qualifier_none, qualifier_none, qualifier_none,
197 qualifier_none, qualifier_none };
198#define TYPES_QUADOP (aarch64_types_quadop_qualifiers)
199
200static enum aarch64_type_qualifiers
201aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
202 = { qualifier_none, qualifier_none, qualifier_immediate };
203#define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
204#define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
205static enum aarch64_type_qualifiers
252c7556
AV
206aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
207 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
208#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
209static enum aarch64_type_qualifiers
b5828b4b
JG
210aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
211 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
212#define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
213#define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
214#define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
215
216static enum aarch64_type_qualifiers
217aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
218 = { qualifier_none, qualifier_none, qualifier_none };
219#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
220
221static enum aarch64_type_qualifiers
222aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
223 = { qualifier_none, qualifier_const_pointer_map_mode };
224#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
225#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
226
46e778c4
JG
227static enum aarch64_type_qualifiers
228aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
229 = { qualifier_poly, qualifier_unsigned,
230 qualifier_poly, qualifier_poly };
231#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
232static enum aarch64_type_qualifiers
233aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
234 = { qualifier_none, qualifier_unsigned,
235 qualifier_none, qualifier_none };
236#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
237static enum aarch64_type_qualifiers
238aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
239 = { qualifier_unsigned, qualifier_unsigned,
240 qualifier_unsigned, qualifier_unsigned };
241#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
242
b5828b4b
JG
243/* The first argument (return type) of a store should be void type,
244 which we represent with qualifier_void. Their first operand will be
245 a DImode pointer to the location to store to, so we must use
246 qualifier_map_mode | qualifier_pointer to build a pointer to the
247 element type of the vector. */
248static enum aarch64_type_qualifiers
249aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
251#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
252#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
253static enum aarch64_type_qualifiers
254aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
255 = { qualifier_void, qualifier_pointer_map_mode,
256 qualifier_none, qualifier_none };
257#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 258
0ddec79f
JG
259#define CF0(N, X) CODE_FOR_aarch64_##N##X
260#define CF1(N, X) CODE_FOR_##N##X##1
261#define CF2(N, X) CODE_FOR_##N##X##2
262#define CF3(N, X) CODE_FOR_##N##X##3
263#define CF4(N, X) CODE_FOR_##N##X##4
264#define CF10(N, X) CODE_FOR_##N##X
265
266#define VAR1(T, N, MAP, A) \
b5828b4b 267 {#N, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
268#define VAR2(T, N, MAP, A, B) \
269 VAR1 (T, N, MAP, A) \
270 VAR1 (T, N, MAP, B)
271#define VAR3(T, N, MAP, A, B, C) \
272 VAR2 (T, N, MAP, A, B) \
273 VAR1 (T, N, MAP, C)
274#define VAR4(T, N, MAP, A, B, C, D) \
275 VAR3 (T, N, MAP, A, B, C) \
276 VAR1 (T, N, MAP, D)
277#define VAR5(T, N, MAP, A, B, C, D, E) \
278 VAR4 (T, N, MAP, A, B, C, D) \
279 VAR1 (T, N, MAP, E)
280#define VAR6(T, N, MAP, A, B, C, D, E, F) \
281 VAR5 (T, N, MAP, A, B, C, D, E) \
282 VAR1 (T, N, MAP, F)
283#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
284 VAR6 (T, N, MAP, A, B, C, D, E, F) \
285 VAR1 (T, N, MAP, G)
286#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
287 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
288 VAR1 (T, N, MAP, H)
289#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
290 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
291 VAR1 (T, N, MAP, I)
292#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
293 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
294 VAR1 (T, N, MAP, J)
295#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
296 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
297 VAR1 (T, N, MAP, K)
298#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
299 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
300 VAR1 (T, N, MAP, L)
342be7f7
JG
301
302/* BUILTIN_<ITERATOR> macros should expand to cover the same range of
303 modes as is given for each define_mode_iterator in
304 config/aarch64/iterators.md. */
305
0ddec79f
JG
306#define BUILTIN_DX(T, N, MAP) \
307 VAR2 (T, N, MAP, di, df)
308#define BUILTIN_GPF(T, N, MAP) \
309 VAR2 (T, N, MAP, sf, df)
310#define BUILTIN_SDQ_I(T, N, MAP) \
311 VAR4 (T, N, MAP, qi, hi, si, di)
312#define BUILTIN_SD_HSI(T, N, MAP) \
313 VAR2 (T, N, MAP, hi, si)
314#define BUILTIN_V2F(T, N, MAP) \
315 VAR2 (T, N, MAP, v2sf, v2df)
316#define BUILTIN_VALL(T, N, MAP) \
317 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
318 v4si, v2di, v2sf, v4sf, v2df)
bb60efd9
JG
319#define BUILTIN_VALLDI(T, N, MAP) \
320 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
321 v4si, v2di, v2sf, v4sf, v2df, di)
46e778c4
JG
322#define BUILTIN_VALLDIF(T, N, MAP) \
323 VAR12 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
324 v4si, v2di, v2sf, v4sf, v2df, di, df)
0ddec79f
JG
325#define BUILTIN_VB(T, N, MAP) \
326 VAR2 (T, N, MAP, v8qi, v16qi)
327#define BUILTIN_VD(T, N, MAP) \
328 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
329#define BUILTIN_VDC(T, N, MAP) \
330 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
331#define BUILTIN_VDIC(T, N, MAP) \
332 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
333#define BUILTIN_VDN(T, N, MAP) \
334 VAR3 (T, N, MAP, v4hi, v2si, di)
335#define BUILTIN_VDQ(T, N, MAP) \
336 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
337#define BUILTIN_VDQF(T, N, MAP) \
338 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
74dc11ed
AV
339#define BUILTIN_VDQF_DF(T, N, MAP) \
340 VAR4 (T, N, MAP, v2sf, v4sf, v2df, df)
0ddec79f
JG
341#define BUILTIN_VDQH(T, N, MAP) \
342 VAR2 (T, N, MAP, v4hi, v8hi)
343#define BUILTIN_VDQHS(T, N, MAP) \
344 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
345#define BUILTIN_VDQIF(T, N, MAP) \
346 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
347#define BUILTIN_VDQM(T, N, MAP) \
348 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
349#define BUILTIN_VDQV(T, N, MAP) \
350 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
46e778c4
JG
351#define BUILTIN_VDQQH(T, N, MAP) \
352 VAR4 (T, N, MAP, v8qi, v16qi, v4hi, v8hi)
0ddec79f
JG
353#define BUILTIN_VDQ_BHSI(T, N, MAP) \
354 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
355#define BUILTIN_VDQ_I(T, N, MAP) \
356 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
357#define BUILTIN_VDW(T, N, MAP) \
358 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
359#define BUILTIN_VD_BHSI(T, N, MAP) \
360 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
361#define BUILTIN_VD_HSI(T, N, MAP) \
362 VAR2 (T, N, MAP, v4hi, v2si)
363#define BUILTIN_VD_RE(T, N, MAP) \
364 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
365#define BUILTIN_VQ(T, N, MAP) \
366 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
367#define BUILTIN_VQN(T, N, MAP) \
368 VAR3 (T, N, MAP, v8hi, v4si, v2di)
369#define BUILTIN_VQW(T, N, MAP) \
370 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
371#define BUILTIN_VQ_HSI(T, N, MAP) \
372 VAR2 (T, N, MAP, v8hi, v4si)
373#define BUILTIN_VQ_S(T, N, MAP) \
374 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
375#define BUILTIN_VSDQ_HSI(T, N, MAP) \
376 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
377#define BUILTIN_VSDQ_I(T, N, MAP) \
378 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
379#define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
380 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
381#define BUILTIN_VSDQ_I_DI(T, N, MAP) \
382 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
383#define BUILTIN_VSD_HSI(T, N, MAP) \
384 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
385#define BUILTIN_VSQN_HSDI(T, N, MAP) \
386 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
387#define BUILTIN_VSTRUCT(T, N, MAP) \
388 VAR3 (T, N, MAP, oi, ci, xi)
43e9d192
IB
389
390static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
391#include "aarch64-simd-builtins.def"
392};
393
394#undef VAR1
0ddec79f 395#define VAR1(T, N, MAP, A) \
e993fea1 396 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
397
398enum aarch64_builtins
399{
400 AARCH64_BUILTIN_MIN,
aa87aced
KV
401
402 AARCH64_BUILTIN_GET_FPCR,
403 AARCH64_BUILTIN_SET_FPCR,
404 AARCH64_BUILTIN_GET_FPSR,
405 AARCH64_BUILTIN_SET_FPSR,
406
342be7f7
JG
407 AARCH64_SIMD_BUILTIN_BASE,
408#include "aarch64-simd-builtins.def"
409 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
410 + ARRAY_SIZE (aarch64_simd_builtin_data),
411 AARCH64_BUILTIN_MAX
43e9d192
IB
412};
413
119103ca
JG
414static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
415
43e9d192
IB
416#define NUM_DREG_TYPES 6
417#define NUM_QREG_TYPES 6
418
b5828b4b
JG
419/* Return a tree for a signed or unsigned argument of either
420 the mode specified by MODE, or the inner mode of MODE. */
421tree
6db1ec94
JG
422aarch64_build_scalar_type (enum machine_mode mode,
423 bool unsigned_p,
424 bool poly_p)
b5828b4b
JG
425{
426#undef INT_TYPES
427#define INT_TYPES \
428 AARCH64_TYPE_BUILDER (QI) \
429 AARCH64_TYPE_BUILDER (HI) \
430 AARCH64_TYPE_BUILDER (SI) \
431 AARCH64_TYPE_BUILDER (DI) \
432 AARCH64_TYPE_BUILDER (EI) \
433 AARCH64_TYPE_BUILDER (OI) \
434 AARCH64_TYPE_BUILDER (CI) \
435 AARCH64_TYPE_BUILDER (XI) \
436 AARCH64_TYPE_BUILDER (TI) \
437
438/* Statically declare all the possible types we might need. */
439#undef AARCH64_TYPE_BUILDER
440#define AARCH64_TYPE_BUILDER(X) \
6db1ec94 441 static tree X##_aarch64_type_node_p = NULL; \
b5828b4b
JG
442 static tree X##_aarch64_type_node_s = NULL; \
443 static tree X##_aarch64_type_node_u = NULL;
444
445 INT_TYPES
446
447 static tree float_aarch64_type_node = NULL;
448 static tree double_aarch64_type_node = NULL;
449
450 gcc_assert (!VECTOR_MODE_P (mode));
451
452/* If we've already initialised this type, don't initialise it again,
453 otherwise ask for a new type of the correct size. */
454#undef AARCH64_TYPE_BUILDER
455#define AARCH64_TYPE_BUILDER(X) \
456 case X##mode: \
457 if (unsigned_p) \
458 return (X##_aarch64_type_node_u \
459 ? X##_aarch64_type_node_u \
460 : X##_aarch64_type_node_u \
461 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
6db1ec94
JG
462 else if (poly_p) \
463 return (X##_aarch64_type_node_p \
464 ? X##_aarch64_type_node_p \
465 : X##_aarch64_type_node_p \
466 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
b5828b4b
JG
467 else \
468 return (X##_aarch64_type_node_s \
469 ? X##_aarch64_type_node_s \
470 : X##_aarch64_type_node_s \
471 = make_signed_type (GET_MODE_PRECISION (mode))); \
472 break;
473
474 switch (mode)
475 {
476 INT_TYPES
477 case SFmode:
478 if (!float_aarch64_type_node)
479 {
480 float_aarch64_type_node = make_node (REAL_TYPE);
481 TYPE_PRECISION (float_aarch64_type_node) = FLOAT_TYPE_SIZE;
482 layout_type (float_aarch64_type_node);
483 }
484 return float_aarch64_type_node;
485 break;
486 case DFmode:
487 if (!double_aarch64_type_node)
488 {
489 double_aarch64_type_node = make_node (REAL_TYPE);
490 TYPE_PRECISION (double_aarch64_type_node) = DOUBLE_TYPE_SIZE;
491 layout_type (double_aarch64_type_node);
492 }
493 return double_aarch64_type_node;
494 break;
495 default:
496 gcc_unreachable ();
497 }
498}
499
500tree
6db1ec94
JG
501aarch64_build_vector_type (enum machine_mode mode,
502 bool unsigned_p,
503 bool poly_p)
b5828b4b
JG
504{
505 tree eltype;
506
507#define VECTOR_TYPES \
508 AARCH64_TYPE_BUILDER (V16QI) \
509 AARCH64_TYPE_BUILDER (V8HI) \
510 AARCH64_TYPE_BUILDER (V4SI) \
511 AARCH64_TYPE_BUILDER (V2DI) \
512 AARCH64_TYPE_BUILDER (V8QI) \
513 AARCH64_TYPE_BUILDER (V4HI) \
514 AARCH64_TYPE_BUILDER (V2SI) \
515 \
516 AARCH64_TYPE_BUILDER (V4SF) \
517 AARCH64_TYPE_BUILDER (V2DF) \
518 AARCH64_TYPE_BUILDER (V2SF) \
519/* Declare our "cache" of values. */
520#undef AARCH64_TYPE_BUILDER
521#define AARCH64_TYPE_BUILDER(X) \
522 static tree X##_aarch64_type_node_s = NULL; \
6db1ec94
JG
523 static tree X##_aarch64_type_node_u = NULL; \
524 static tree X##_aarch64_type_node_p = NULL;
b5828b4b
JG
525
526 VECTOR_TYPES
527
528 gcc_assert (VECTOR_MODE_P (mode));
529
530#undef AARCH64_TYPE_BUILDER
531#define AARCH64_TYPE_BUILDER(X) \
532 case X##mode: \
533 if (unsigned_p) \
534 return X##_aarch64_type_node_u \
535 ? X##_aarch64_type_node_u \
536 : X##_aarch64_type_node_u \
537 = build_vector_type_for_mode (aarch64_build_scalar_type \
538 (GET_MODE_INNER (mode), \
6db1ec94
JG
539 unsigned_p, poly_p), mode); \
540 else if (poly_p) \
541 return X##_aarch64_type_node_p \
542 ? X##_aarch64_type_node_p \
543 : X##_aarch64_type_node_p \
544 = build_vector_type_for_mode (aarch64_build_scalar_type \
545 (GET_MODE_INNER (mode), \
546 unsigned_p, poly_p), mode); \
b5828b4b
JG
547 else \
548 return X##_aarch64_type_node_s \
549 ? X##_aarch64_type_node_s \
550 : X##_aarch64_type_node_s \
551 = build_vector_type_for_mode (aarch64_build_scalar_type \
552 (GET_MODE_INNER (mode), \
6db1ec94 553 unsigned_p, poly_p), mode); \
b5828b4b
JG
554 break;
555
556 switch (mode)
557 {
558 default:
6db1ec94
JG
559 eltype = aarch64_build_scalar_type (GET_MODE_INNER (mode),
560 unsigned_p, poly_p);
b5828b4b
JG
561 return build_vector_type_for_mode (eltype, mode);
562 break;
563 VECTOR_TYPES
564 }
565}
566
567tree
6db1ec94 568aarch64_build_type (enum machine_mode mode, bool unsigned_p, bool poly_p)
b5828b4b
JG
569{
570 if (VECTOR_MODE_P (mode))
6db1ec94 571 return aarch64_build_vector_type (mode, unsigned_p, poly_p);
b5828b4b 572 else
6db1ec94
JG
573 return aarch64_build_scalar_type (mode, unsigned_p, poly_p);
574}
575
576tree
577aarch64_build_signed_type (enum machine_mode mode)
578{
579 return aarch64_build_type (mode, false, false);
580}
581
582tree
583aarch64_build_unsigned_type (enum machine_mode mode)
584{
585 return aarch64_build_type (mode, true, false);
586}
587
588tree
589aarch64_build_poly_type (enum machine_mode mode)
590{
591 return aarch64_build_type (mode, false, true);
b5828b4b
JG
592}
593
af55e82d 594static void
342be7f7 595aarch64_init_simd_builtins (void)
43e9d192 596{
342be7f7 597 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
43e9d192 598
6db1ec94
JG
599 /* Signed scalar type nodes. */
600 tree aarch64_simd_intQI_type_node = aarch64_build_signed_type (QImode);
601 tree aarch64_simd_intHI_type_node = aarch64_build_signed_type (HImode);
602 tree aarch64_simd_intSI_type_node = aarch64_build_signed_type (SImode);
603 tree aarch64_simd_intDI_type_node = aarch64_build_signed_type (DImode);
604 tree aarch64_simd_intTI_type_node = aarch64_build_signed_type (TImode);
605 tree aarch64_simd_intEI_type_node = aarch64_build_signed_type (EImode);
606 tree aarch64_simd_intOI_type_node = aarch64_build_signed_type (OImode);
607 tree aarch64_simd_intCI_type_node = aarch64_build_signed_type (CImode);
608 tree aarch64_simd_intXI_type_node = aarch64_build_signed_type (XImode);
609
610 /* Unsigned scalar type nodes. */
611 tree aarch64_simd_intUQI_type_node = aarch64_build_unsigned_type (QImode);
612 tree aarch64_simd_intUHI_type_node = aarch64_build_unsigned_type (HImode);
613 tree aarch64_simd_intUSI_type_node = aarch64_build_unsigned_type (SImode);
614 tree aarch64_simd_intUDI_type_node = aarch64_build_unsigned_type (DImode);
615
616 /* Poly scalar type nodes. */
617 tree aarch64_simd_polyQI_type_node = aarch64_build_poly_type (QImode);
618 tree aarch64_simd_polyHI_type_node = aarch64_build_poly_type (HImode);
7baa225d
TB
619 tree aarch64_simd_polyDI_type_node = aarch64_build_poly_type (DImode);
620 tree aarch64_simd_polyTI_type_node = aarch64_build_poly_type (TImode);
b5828b4b
JG
621
622 /* Float type nodes. */
6db1ec94
JG
623 tree aarch64_simd_float_type_node = aarch64_build_signed_type (SFmode);
624 tree aarch64_simd_double_type_node = aarch64_build_signed_type (DFmode);
43e9d192
IB
625
626 /* Define typedefs which exactly correspond to the modes we are basing vector
627 types on. If you change these names you'll need to change
628 the table used by aarch64_mangle_type too. */
629 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
630 "__builtin_aarch64_simd_qi");
631 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
632 "__builtin_aarch64_simd_hi");
633 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
634 "__builtin_aarch64_simd_si");
635 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
636 "__builtin_aarch64_simd_sf");
637 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
638 "__builtin_aarch64_simd_di");
639 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
640 "__builtin_aarch64_simd_df");
641 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
642 "__builtin_aarch64_simd_poly8");
643 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
644 "__builtin_aarch64_simd_poly16");
7baa225d
TB
645 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyDI_type_node,
646 "__builtin_aarch64_simd_poly64");
647 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyTI_type_node,
648 "__builtin_aarch64_simd_poly128");
b5828b4b 649 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
43e9d192 650 "__builtin_aarch64_simd_ti");
b5828b4b 651 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
43e9d192 652 "__builtin_aarch64_simd_ei");
b5828b4b 653 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intOI_type_node,
43e9d192 654 "__builtin_aarch64_simd_oi");
b5828b4b 655 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intCI_type_node,
43e9d192 656 "__builtin_aarch64_simd_ci");
b5828b4b 657 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intXI_type_node,
43e9d192
IB
658 "__builtin_aarch64_simd_xi");
659
b5828b4b
JG
660 /* Unsigned integer types for various mode sizes. */
661 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUQI_type_node,
662 "__builtin_aarch64_simd_uqi");
663 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUHI_type_node,
664 "__builtin_aarch64_simd_uhi");
665 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUSI_type_node,
666 "__builtin_aarch64_simd_usi");
667 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUDI_type_node,
668 "__builtin_aarch64_simd_udi");
43e9d192 669
342be7f7 670 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 671 {
b5828b4b
JG
672 bool print_type_signature_p = false;
673 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 674 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7 675 const char *const modenames[] =
b5828b4b
JG
676 {
677 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
678 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
679 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
680 };
681 const enum machine_mode modes[] =
682 {
683 V8QImode, V4HImode, V2SImode, V2SFmode, DImode, DFmode,
684 V16QImode, V8HImode, V4SImode, V4SFmode, V2DImode,
685 V2DFmode, TImode, EImode, OImode, XImode, SImode,
686 SFmode, HImode, QImode
687 };
342be7f7
JG
688 char namebuf[60];
689 tree ftype = NULL;
119103ca 690 tree fndecl = NULL;
342be7f7
JG
691
692 gcc_assert (ARRAY_SIZE (modenames) == T_MAX);
43e9d192 693
342be7f7 694 d->fcode = fcode;
43e9d192 695
b5828b4b
JG
696 /* We must track two variables here. op_num is
697 the operand number as in the RTL pattern. This is
698 required to access the mode (e.g. V4SF mode) of the
699 argument, from which the base type can be derived.
700 arg_num is an index in to the qualifiers data, which
701 gives qualifiers to the type (e.g. const unsigned).
702 The reason these two variables may differ by one is the
703 void return type. While all return types take the 0th entry
704 in the qualifiers array, there is no operand for them in the
705 RTL pattern. */
706 int op_num = insn_data[d->code].n_operands - 1;
707 int arg_num = d->qualifiers[0] & qualifier_void
708 ? op_num + 1
709 : op_num;
710 tree return_type = void_type_node, args = void_list_node;
711 tree eltype;
712
713 /* Build a function type directly from the insn_data for this
714 builtin. The build_function_type () function takes care of
715 removing duplicates for us. */
716 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 717 {
b5828b4b
JG
718 enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
719 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 720
b5828b4b
JG
721 if (qualifiers & qualifier_unsigned)
722 {
723 type_signature[arg_num] = 'u';
724 print_type_signature_p = true;
725 }
6db1ec94
JG
726 else if (qualifiers & qualifier_poly)
727 {
728 type_signature[arg_num] = 'p';
729 print_type_signature_p = true;
730 }
b5828b4b
JG
731 else
732 type_signature[arg_num] = 's';
733
734 /* Skip an internal operand for vget_{low, high}. */
735 if (qualifiers & qualifier_internal)
736 continue;
737
738 /* Some builtins have different user-facing types
739 for certain arguments, encoded in d->mode. */
740 if (qualifiers & qualifier_map_mode)
741 op_mode = modes[d->mode];
742
743 /* For pointers, we want a pointer to the basic type
744 of the vector. */
745 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
746 op_mode = GET_MODE_INNER (op_mode);
747
748 eltype = aarch64_build_type (op_mode,
6db1ec94
JG
749 qualifiers & qualifier_unsigned,
750 qualifiers & qualifier_poly);
b5828b4b
JG
751
752 /* Add qualifiers. */
753 if (qualifiers & qualifier_const)
754 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
755
756 if (qualifiers & qualifier_pointer)
757 eltype = build_pointer_type (eltype);
758
759 /* If we have reached arg_num == 0, we are at a non-void
760 return type. Otherwise, we are still processing
761 arguments. */
762 if (arg_num == 0)
763 return_type = eltype;
764 else
765 args = tree_cons (NULL_TREE, eltype, args);
766 }
342be7f7 767
b5828b4b 768 ftype = build_function_type (return_type, args);
43e9d192 769
342be7f7 770 gcc_assert (ftype != NULL);
43e9d192 771
b5828b4b
JG
772 if (print_type_signature_p)
773 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s_%s",
774 d->name, modenames[d->mode], type_signature);
775 else
776 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s",
777 d->name, modenames[d->mode]);
43e9d192 778
119103ca
JG
779 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
780 NULL, NULL_TREE);
781 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
782 }
783}
784
342be7f7
JG
785void
786aarch64_init_builtins (void)
43e9d192 787{
aa87aced
KV
788 tree ftype_set_fpr
789 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
790 tree ftype_get_fpr
791 = build_function_type_list (unsigned_type_node, NULL);
792
793 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
794 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
795 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
796 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
797 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
798 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
799 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
800 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
801 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
802 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
803 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
804 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
805
342be7f7
JG
806 if (TARGET_SIMD)
807 aarch64_init_simd_builtins ();
43e9d192
IB
808}
809
119103ca
JG
810tree
811aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
812{
813 if (code >= AARCH64_BUILTIN_MAX)
814 return error_mark_node;
815
816 return aarch64_builtin_decls[code];
817}
818
43e9d192
IB
819typedef enum
820{
821 SIMD_ARG_COPY_TO_REG,
822 SIMD_ARG_CONSTANT,
823 SIMD_ARG_STOP
824} builtin_simd_arg;
825
43e9d192
IB
826static rtx
827aarch64_simd_expand_args (rtx target, int icode, int have_retval,
828 tree exp, ...)
829{
830 va_list ap;
831 rtx pat;
832 tree arg[SIMD_MAX_BUILTIN_ARGS];
833 rtx op[SIMD_MAX_BUILTIN_ARGS];
834 enum machine_mode tmode = insn_data[icode].operand[0].mode;
835 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
836 int argc = 0;
837
838 if (have_retval
839 && (!target
840 || GET_MODE (target) != tmode
841 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
842 target = gen_reg_rtx (tmode);
843
844 va_start (ap, exp);
845
846 for (;;)
847 {
848 builtin_simd_arg thisarg = (builtin_simd_arg) va_arg (ap, int);
849
850 if (thisarg == SIMD_ARG_STOP)
851 break;
852 else
853 {
854 arg[argc] = CALL_EXPR_ARG (exp, argc);
855 op[argc] = expand_normal (arg[argc]);
856 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
857
858 switch (thisarg)
859 {
860 case SIMD_ARG_COPY_TO_REG:
2888c331
YZ
861 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
862 op[argc] = convert_memory_address (Pmode, op[argc]);
43e9d192
IB
863 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
864 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
865 (op[argc], mode[argc]))
866 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
867 break;
868
869 case SIMD_ARG_CONSTANT:
870 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
871 (op[argc], mode[argc]))
872 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
873 "expected %<const int%>", argc + 1);
874 break;
875
876 case SIMD_ARG_STOP:
877 gcc_unreachable ();
878 }
879
880 argc++;
881 }
882 }
883
884 va_end (ap);
885
886 if (have_retval)
887 switch (argc)
888 {
889 case 1:
890 pat = GEN_FCN (icode) (target, op[0]);
891 break;
892
893 case 2:
894 pat = GEN_FCN (icode) (target, op[0], op[1]);
895 break;
896
897 case 3:
898 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
899 break;
900
901 case 4:
902 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
903 break;
904
905 case 5:
906 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
907 break;
908
909 default:
910 gcc_unreachable ();
911 }
912 else
913 switch (argc)
914 {
915 case 1:
916 pat = GEN_FCN (icode) (op[0]);
917 break;
918
919 case 2:
920 pat = GEN_FCN (icode) (op[0], op[1]);
921 break;
922
923 case 3:
924 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
925 break;
926
927 case 4:
928 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
929 break;
930
931 case 5:
932 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
933 break;
934
935 default:
936 gcc_unreachable ();
937 }
938
939 if (!pat)
940 return 0;
941
942 emit_insn (pat);
943
944 return target;
945}
946
947/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
948rtx
949aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
950{
342be7f7
JG
951 aarch64_simd_builtin_datum *d =
952 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 953 enum insn_code icode = d->code;
b5828b4b
JG
954 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
955 int num_args = insn_data[d->code].n_operands;
956 int is_void = 0;
957 int k;
43e9d192 958
b5828b4b 959 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 960
b5828b4b
JG
961 num_args += is_void;
962
963 for (k = 1; k < num_args; k++)
964 {
965 /* We have four arrays of data, each indexed in a different fashion.
966 qualifiers - element 0 always describes the function return type.
967 operands - element 0 is either the operand for return value (if
968 the function has a non-void return type) or the operand for the
969 first argument.
970 expr_args - element 0 always holds the first argument.
971 args - element 0 is always used for the return type. */
972 int qualifiers_k = k;
973 int operands_k = k - is_void;
974 int expr_args_k = k - 1;
975
976 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
977 args[k] = SIMD_ARG_CONSTANT;
978 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
979 {
980 rtx arg
981 = expand_normal (CALL_EXPR_ARG (exp,
982 (expr_args_k)));
983 /* Handle constants only if the predicate allows it. */
984 bool op_const_int_p =
985 (CONST_INT_P (arg)
986 && (*insn_data[icode].operand[operands_k].predicate)
987 (arg, insn_data[icode].operand[operands_k].mode));
988 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
989 }
990 else
991 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 992
43e9d192 993 }
b5828b4b
JG
994 args[k] = SIMD_ARG_STOP;
995
996 /* The interface to aarch64_simd_expand_args expects a 0 if
997 the function is void, and a 1 if it is not. */
998 return aarch64_simd_expand_args
999 (target, icode, !is_void, exp,
1000 args[1],
1001 args[2],
1002 args[3],
1003 args[4],
1004 SIMD_ARG_STOP);
43e9d192 1005}
342be7f7
JG
1006
1007/* Expand an expression EXP that calls a built-in function,
1008 with result going to TARGET if that's convenient. */
1009rtx
1010aarch64_expand_builtin (tree exp,
1011 rtx target,
1012 rtx subtarget ATTRIBUTE_UNUSED,
1013 enum machine_mode mode ATTRIBUTE_UNUSED,
1014 int ignore ATTRIBUTE_UNUSED)
1015{
1016 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1017 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1018 int icode;
1019 rtx pat, op0;
1020 tree arg0;
1021
1022 switch (fcode)
1023 {
1024 case AARCH64_BUILTIN_GET_FPCR:
1025 case AARCH64_BUILTIN_SET_FPCR:
1026 case AARCH64_BUILTIN_GET_FPSR:
1027 case AARCH64_BUILTIN_SET_FPSR:
1028 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1029 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1030 {
1031 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1032 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1033 target = gen_reg_rtx (SImode);
1034 pat = GEN_FCN (icode) (target);
1035 }
1036 else
1037 {
1038 target = NULL_RTX;
1039 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1040 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1041 arg0 = CALL_EXPR_ARG (exp, 0);
1042 op0 = expand_normal (arg0);
1043 pat = GEN_FCN (icode) (op0);
1044 }
1045 emit_insn (pat);
1046 return target;
1047 }
342be7f7
JG
1048
1049 if (fcode >= AARCH64_SIMD_BUILTIN_BASE)
1050 return aarch64_simd_expand_builtin (fcode, exp, target);
1051
1052 return NULL_RTX;
1053}
42fc9a7f
JG
1054
1055tree
1056aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1057{
1058 enum machine_mode in_mode, out_mode;
1059 int in_n, out_n;
1060
1061 if (TREE_CODE (type_out) != VECTOR_TYPE
1062 || TREE_CODE (type_in) != VECTOR_TYPE)
1063 return NULL_TREE;
1064
1065 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1066 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1067 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1068 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1069
1070#undef AARCH64_CHECK_BUILTIN_MODE
1071#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1072#define AARCH64_FIND_FRINT_VARIANT(N) \
1073 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1074 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1075 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1076 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1077 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1078 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1079 : NULL_TREE)))
1080 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1081 {
1082 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1083 switch (fn)
1084 {
1085#undef AARCH64_CHECK_BUILTIN_MODE
1086#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1087 (out_mode == N##Fmode && out_n == C \
1088 && in_mode == N##Fmode && in_n == C)
1089 case BUILT_IN_FLOOR:
1090 case BUILT_IN_FLOORF:
0659ce6f 1091 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1092 case BUILT_IN_CEIL:
1093 case BUILT_IN_CEILF:
0659ce6f 1094 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1095 case BUILT_IN_TRUNC:
1096 case BUILT_IN_TRUNCF:
0659ce6f 1097 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1098 case BUILT_IN_ROUND:
1099 case BUILT_IN_ROUNDF:
0659ce6f 1100 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1101 case BUILT_IN_NEARBYINT:
1102 case BUILT_IN_NEARBYINTF:
0659ce6f 1103 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1104 case BUILT_IN_SQRT:
1105 case BUILT_IN_SQRTF:
1106 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1107#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1108#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1109 (out_mode == SImode && out_n == C \
1110 && in_mode == N##Imode && in_n == C)
1111 case BUILT_IN_CLZ:
1112 {
1113 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1114 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1115 return NULL_TREE;
1116 }
1117#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1118#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1119 (out_mode == N##Imode && out_n == C \
1120 && in_mode == N##Fmode && in_n == C)
1121 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1122 case BUILT_IN_LFLOORF:
1123 case BUILT_IN_LLFLOOR:
0386b123 1124 case BUILT_IN_IFLOORF:
ce966824 1125 {
e993fea1 1126 enum aarch64_builtins builtin;
ce966824 1127 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1128 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1129 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1130 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1131 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1132 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1133 else
1134 return NULL_TREE;
1135
1136 return aarch64_builtin_decls[builtin];
ce966824 1137 }
42fc9a7f 1138 case BUILT_IN_LCEIL:
bf0f324e
YZ
1139 case BUILT_IN_LCEILF:
1140 case BUILT_IN_LLCEIL:
0386b123 1141 case BUILT_IN_ICEILF:
ce966824 1142 {
e993fea1 1143 enum aarch64_builtins builtin;
ce966824 1144 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1145 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1146 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1147 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1148 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1149 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1150 else
1151 return NULL_TREE;
1152
1153 return aarch64_builtin_decls[builtin];
ce966824 1154 }
0386b123
JG
1155 case BUILT_IN_LROUND:
1156 case BUILT_IN_IROUNDF:
1157 {
e993fea1 1158 enum aarch64_builtins builtin;
0386b123 1159 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1160 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1161 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1162 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1163 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1164 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1165 else
1166 return NULL_TREE;
1167
1168 return aarch64_builtin_decls[builtin];
0386b123 1169 }
c7f28cd5
KT
1170 case BUILT_IN_BSWAP16:
1171#undef AARCH64_CHECK_BUILTIN_MODE
1172#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1173 (out_mode == N##Imode && out_n == C \
1174 && in_mode == N##Imode && in_n == C)
1175 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1176 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1177 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1178 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1179 else
1180 return NULL_TREE;
1181 case BUILT_IN_BSWAP32:
1182 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1183 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1184 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1185 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1186 else
1187 return NULL_TREE;
1188 case BUILT_IN_BSWAP64:
1189 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1190 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1191 else
1192 return NULL_TREE;
42fc9a7f
JG
1193 default:
1194 return NULL_TREE;
1195 }
1196 }
1197
1198 return NULL_TREE;
1199}
0ac198d3
JG
1200
1201#undef VAR1
1202#define VAR1(T, N, MAP, A) \
e993fea1 1203 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1204
9697e620
JG
1205tree
1206aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1207 bool ignore ATTRIBUTE_UNUSED)
1208{
1209 int fcode = DECL_FUNCTION_CODE (fndecl);
1210 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1211
1212 switch (fcode)
1213 {
d05d0709 1214 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1215 return fold_build1 (ABS_EXPR, type, args[0]);
1216 break;
bb60efd9
JG
1217 BUILTIN_VALLDI (BINOP, cmge, 0)
1218 return fold_build2 (GE_EXPR, type, args[0], args[1]);
1219 break;
1220 BUILTIN_VALLDI (BINOP, cmgt, 0)
1221 return fold_build2 (GT_EXPR, type, args[0], args[1]);
1222 break;
1223 BUILTIN_VALLDI (BINOP, cmeq, 0)
1224 return fold_build2 (EQ_EXPR, type, args[0], args[1]);
1225 break;
1226 BUILTIN_VSDQ_I_DI (BINOP, cmtst, 0)
1227 {
1228 tree and_node = fold_build2 (BIT_AND_EXPR, type, args[0], args[1]);
1229 tree vec_zero_node = build_zero_cst (type);
1230 return fold_build2 (NE_EXPR, type, and_node, vec_zero_node);
1231 break;
1232 }
bcd48995
AV
1233 VAR1 (REINTERP_SS, reinterpretdi, 0, df)
1234 VAR1 (REINTERP_SS, reinterpretv8qi, 0, df)
1235 VAR1 (REINTERP_SS, reinterpretv4hi, 0, df)
1236 VAR1 (REINTERP_SS, reinterpretv2si, 0, df)
1237 VAR1 (REINTERP_SS, reinterpretv2sf, 0, df)
1238 BUILTIN_VD (REINTERP_SS, reinterpretdf, 0)
1239 BUILTIN_VD (REINTERP_SU, reinterpretdf, 0)
1240 VAR1 (REINTERP_US, reinterpretdi, 0, df)
1241 VAR1 (REINTERP_US, reinterpretv8qi, 0, df)
1242 VAR1 (REINTERP_US, reinterpretv4hi, 0, df)
1243 VAR1 (REINTERP_US, reinterpretv2si, 0, df)
1244 VAR1 (REINTERP_US, reinterpretv2sf, 0, df)
1245 BUILTIN_VD (REINTERP_SP, reinterpretdf, 0)
1246 VAR1 (REINTERP_PS, reinterpretdi, 0, df)
1247 VAR1 (REINTERP_PS, reinterpretv8qi, 0, df)
1248 VAR1 (REINTERP_PS, reinterpretv4hi, 0, df)
1249 VAR1 (REINTERP_PS, reinterpretv2si, 0, df)
1250 VAR1 (REINTERP_PS, reinterpretv2sf, 0, df)
1251 return fold_build1 (VIEW_CONVERT_EXPR, type, args[0]);
1709ff9b
JG
1252 VAR1 (UNOP, floatv2si, 2, v2sf)
1253 VAR1 (UNOP, floatv4si, 2, v4sf)
1254 VAR1 (UNOP, floatv2di, 2, v2df)
1255 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1256 default:
1257 break;
1258 }
1259
1260 return NULL_TREE;
1261}
1262
0ac198d3
JG
1263bool
1264aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1265{
1266 bool changed = false;
1267 gimple stmt = gsi_stmt (*gsi);
1268 tree call = gimple_call_fn (stmt);
1269 tree fndecl;
1270 gimple new_stmt = NULL;
1271 if (call)
1272 {
1273 fndecl = gimple_call_fndecl (stmt);
1274 if (fndecl)
1275 {
1276 int fcode = DECL_FUNCTION_CODE (fndecl);
1277 int nargs = gimple_call_num_args (stmt);
1278 tree *args = (nargs > 0
1279 ? gimple_call_arg_ptr (stmt, 0)
1280 : &error_mark_node);
1281
1282 switch (fcode)
1283 {
36054fab 1284 BUILTIN_VALL (UNOP, reduc_splus_, 10)
0ac198d3
JG
1285 new_stmt = gimple_build_assign_with_ops (
1286 REDUC_PLUS_EXPR,
1287 gimple_call_lhs (stmt),
1288 args[0],
1289 NULL_TREE);
1290 break;
1598945b
JG
1291 BUILTIN_VDQIF (UNOP, reduc_smax_, 10)
1292 new_stmt = gimple_build_assign_with_ops (
1293 REDUC_MAX_EXPR,
1294 gimple_call_lhs (stmt),
1295 args[0],
1296 NULL_TREE);
1297 break;
1298 BUILTIN_VDQIF (UNOP, reduc_smin_, 10)
1299 new_stmt = gimple_build_assign_with_ops (
1300 REDUC_MIN_EXPR,
1301 gimple_call_lhs (stmt),
1302 args[0],
1303 NULL_TREE);
1304 break;
1305
0ac198d3
JG
1306 default:
1307 break;
1308 }
1309 }
1310 }
1311
1312 if (new_stmt)
1313 {
1314 gsi_replace (gsi, new_stmt, true);
1315 changed = true;
1316 }
1317
1318 return changed;
1319}
1320
aa87aced
KV
1321void
1322aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1323{
1324 const unsigned AARCH64_FE_INVALID = 1;
1325 const unsigned AARCH64_FE_DIVBYZERO = 2;
1326 const unsigned AARCH64_FE_OVERFLOW = 4;
1327 const unsigned AARCH64_FE_UNDERFLOW = 8;
1328 const unsigned AARCH64_FE_INEXACT = 16;
1329 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1330 | AARCH64_FE_DIVBYZERO
1331 | AARCH64_FE_OVERFLOW
1332 | AARCH64_FE_UNDERFLOW
1333 | AARCH64_FE_INEXACT);
1334 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1335 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1336 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1337 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1338 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1339
1340 /* Generate the equivalence of :
1341 unsigned int fenv_cr;
1342 fenv_cr = __builtin_aarch64_get_fpcr ();
1343
1344 unsigned int fenv_sr;
1345 fenv_sr = __builtin_aarch64_get_fpsr ();
1346
1347 Now set all exceptions to non-stop
1348 unsigned int mask_cr
1349 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1350 unsigned int masked_cr;
1351 masked_cr = fenv_cr & mask_cr;
1352
1353 And clear all exception flags
1354 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1355 unsigned int masked_cr;
1356 masked_sr = fenv_sr & mask_sr;
1357
1358 __builtin_aarch64_set_cr (masked_cr);
1359 __builtin_aarch64_set_sr (masked_sr); */
1360
1361 fenv_cr = create_tmp_var (unsigned_type_node, NULL);
1362 fenv_sr = create_tmp_var (unsigned_type_node, NULL);
1363
1364 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1365 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1366 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1367 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1368
1369 mask_cr = build_int_cst (unsigned_type_node,
1370 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1371 mask_sr = build_int_cst (unsigned_type_node,
1372 ~(AARCH64_FE_ALL_EXCEPT));
1373
1374 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1375 fenv_cr, build_call_expr (get_fpcr, 0));
1376 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1377 fenv_sr, build_call_expr (get_fpsr, 0));
1378
1379 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1380 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1381
1382 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1383 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1384
1385 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1386 hold_fnclex_sr);
1387 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1388 masked_fenv_sr);
1389 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1390
1391 *hold = build2 (COMPOUND_EXPR, void_type_node,
1392 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1393 hold_fnclex);
1394
1395 /* Store the value of masked_fenv to clear the exceptions:
1396 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1397
1398 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1399
1400 /* Generate the equivalent of :
1401 unsigned int new_fenv_var;
1402 new_fenv_var = __builtin_aarch64_get_fpsr ();
1403
1404 __builtin_aarch64_set_fpsr (fenv_sr);
1405
1406 __atomic_feraiseexcept (new_fenv_var); */
1407
1408 new_fenv_var = create_tmp_var (unsigned_type_node, NULL);
1409 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1410 new_fenv_var, build_call_expr (get_fpsr, 0));
1411 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1412 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1413 update_call = build_call_expr (atomic_feraiseexcept, 1,
1414 fold_convert (integer_type_node, new_fenv_var));
1415 *update = build2 (COMPOUND_EXPR, void_type_node,
1416 build2 (COMPOUND_EXPR, void_type_node,
1417 reload_fenv, restore_fnenv), update_call);
1418}
1419
1420
42fc9a7f
JG
1421#undef AARCH64_CHECK_BUILTIN_MODE
1422#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1423#undef BUILTIN_DX
1424#undef BUILTIN_SDQ_I
1425#undef BUILTIN_SD_HSI
1426#undef BUILTIN_V2F
1427#undef BUILTIN_VALL
1428#undef BUILTIN_VB
1429#undef BUILTIN_VD
1430#undef BUILTIN_VDC
1431#undef BUILTIN_VDIC
1432#undef BUILTIN_VDN
1433#undef BUILTIN_VDQ
1434#undef BUILTIN_VDQF
1435#undef BUILTIN_VDQH
1436#undef BUILTIN_VDQHS
1437#undef BUILTIN_VDQIF
1438#undef BUILTIN_VDQM
1439#undef BUILTIN_VDQV
1440#undef BUILTIN_VDQ_BHSI
1441#undef BUILTIN_VDQ_I
1442#undef BUILTIN_VDW
1443#undef BUILTIN_VD_BHSI
1444#undef BUILTIN_VD_HSI
1445#undef BUILTIN_VD_RE
1446#undef BUILTIN_VQ
1447#undef BUILTIN_VQN
1448#undef BUILTIN_VQW
1449#undef BUILTIN_VQ_HSI
1450#undef BUILTIN_VQ_S
1451#undef BUILTIN_VSDQ_HSI
1452#undef BUILTIN_VSDQ_I
1453#undef BUILTIN_VSDQ_I_BHSI
1454#undef BUILTIN_VSDQ_I_DI
1455#undef BUILTIN_VSD_HSI
1456#undef BUILTIN_VSQN_HSDI
1457#undef BUILTIN_VSTRUCT
1458#undef CF0
1459#undef CF1
1460#undef CF2
1461#undef CF3
1462#undef CF4
1463#undef CF10
1464#undef VAR1
1465#undef VAR2
1466#undef VAR3
1467#undef VAR4
1468#undef VAR5
1469#undef VAR6
1470#undef VAR7
1471#undef VAR8
1472#undef VAR9
1473#undef VAR10
1474#undef VAR11
1475
This page took 0.578858 seconds and 5 git commands to generate.