Attachment 'C11atomic5.patch'
Download 1 * c/c-parser.c (c_parser_expr_no_commas): Call build_atomic_load on RHS.
2 * c/c-typeck.c (build_modify_expr): Call build_atomic_assign for atomic
3 expressions.
4 (build_atomic_assign): New. Construct appropriate sequences for
5 atomic assignemnt operations.
6 (build_atomic_load): New. Construct an atomic load.
7 * c-family/c-common.h (build_atomic_load): Prototype.
8
9 Index: gcc/c/c-parser.c
10 ===================================================================
11 *** gcc/c/c-parser.c (revision 201248)
12 --- gcc/c/c-parser.c (working copy)
13 *************** c_parser_expr_no_commas (c_parser *parse
14 *** 5440,5445 ****
15 --- 5440,5472 ----
16 code = BIT_IOR_EXPR;
17 break;
18 default:
19 + /* TODO
20 + This doesnt work. Expressions like ~a where a is atomic do not function
21 + properly. since the rhs is parsed/consumed as an entire expression.
22 + and at the time, we don't know if this expression is a RHS or LHS, so
23 + we dont know if we need a load or not.
24 +
25 + I think we need to introduce an ATOMIC_EXPR node, and whenever the
26 + type of an expression becomes TYPE_ATOMIC(), we immiedately hang the
27 + expression off a new ATOMIC_EXPR node as operand 0, and change the
28 + type of the ATOMIC_EXPR node to TYPE_MAIN_VARIANT(atomic_type). This
29 + will encapsulate all the expressions which need to be handled with an
30 + ATOMIC_EXPR node, and then at this point, scan the rhs and see if there
31 + are any ATOMIC_EXPR, and replace those nodes with atomic_loads's of
32 + the ATOMIC_EXPR operand.
33 +
34 + THis will also change the LHS processing in build_modify_expr...
35 + although *in theory* the top level expression *ought* to be the
36 + only thing that should have an ATOMIC_EXPR(), so it may be as
37 + simple as checking the LHS is an ATOMIC_EXPR node rather than
38 + the current check of ATOMIC_TYPE (lhs).
39 +
40 + This also means the TYPE_ATOMIC flag in expressions should ONLY
41 + occur on the operand of an ATOMIC_EXPR() nodes... anywhere else
42 + would be an error. */
43 + if (TREE_CODE (lhs.value) != ERROR_MARK
44 + && TYPE_ATOMIC (TREE_TYPE (lhs.value)))
45 + lhs.value = build_atomic_load (op_location, lhs.value);
46 return lhs;
47 }
48 c_parser_consume_token (parser);
49 Index: gcc/c/c-typeck.c
50 ===================================================================
51 *** gcc/c/c-typeck.c (revision 201248)
52 --- gcc/c/c-typeck.c (working copy)
53 *************** static void readonly_warning (tree, enum
54 *** 103,108 ****
55 --- 103,110 ----
56 static int lvalue_or_else (location_t, const_tree, enum lvalue_use);
57 static void record_maybe_used_decl (tree);
58 static int comptypes_internal (const_tree, const_tree, bool *, bool *);
59 + static tree build_atomic_assign (location_t, tree, enum tree_code, tree);
60 +
61
62 /* Return true if EXP is a null pointer constant, false otherwise. */
63
64 *************** build_modify_expr (location_t location,
65 *** 4830,4835 ****
66 --- 4832,4838 ----
67 tree lhstype = TREE_TYPE (lhs);
68 tree olhstype = lhstype;
69 bool npc;
70 + bool is_atomic_op;
71
72 /* Types that aren't fully specified cannot be used in assignments. */
73 lhs = require_complete_type (lhs);
74 *************** build_modify_expr (location_t location,
75 *** 4842,4847 ****
76 --- 4845,4852 ----
77 if (!objc_is_property_ref (lhs) && !lvalue_or_else (location, lhs, lv_assign))
78 return error_mark_node;
79
80 + is_atomic_op = TYPE_ATOMIC (TREE_TYPE (lhs));
81 +
82 if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR)
83 {
84 rhs_semantic_type = TREE_TYPE (rhs);
85 *************** build_modify_expr (location_t location,
86 *** 4872,4883 ****
87 {
88 lhs = c_fully_fold (lhs, false, NULL);
89 lhs = stabilize_reference (lhs);
90 - newrhs = build_binary_op (location,
91 - modifycode, lhs, rhs, 1);
92
93 ! /* The original type of the right hand side is no longer
94 ! meaningful. */
95 ! rhs_origtype = NULL_TREE;
96 }
97
98 if (c_dialect_objc ())
99 --- 4877,4893 ----
100 {
101 lhs = c_fully_fold (lhs, false, NULL);
102 lhs = stabilize_reference (lhs);
103
104 ! /* Construct the RHS for any non-atomic compound assignemnt. */
105 ! if (!is_atomic_op)
106 ! {
107 ! newrhs = build_binary_op (location,
108 ! modifycode, lhs, rhs, 1);
109 !
110 ! /* The original type of the right hand side is no longer
111 ! meaningful. */
112 ! rhs_origtype = NULL_TREE;
113 ! }
114 }
115
116 if (c_dialect_objc ())
117 *************** build_modify_expr (location_t location,
118 *** 4944,4949 ****
119 --- 4954,4968 ----
120 "enum conversion in assignment is invalid in C++");
121 }
122
123 + /* If the lhs is atomic, remove that qualifier. */
124 + if (is_atomic_op)
125 + {
126 + lhstype = build_qualified_type (lhstype,
127 + TYPE_QUALS(lhstype) & ~TYPE_QUAL_ATOMIC);
128 + olhstype = build_qualified_type (olhstype,
129 + TYPE_QUALS(lhstype) & ~TYPE_QUAL_ATOMIC);
130 + }
131 +
132 /* Convert new value to destination type. Fold it first, then
133 restore any excess precision information, for the sake of
134 conversion warnings. */
135 *************** build_modify_expr (location_t location,
136 *** 4970,4978 ****
137
138 /* Scan operands. */
139
140 ! result = build2 (MODIFY_EXPR, lhstype, lhs, newrhs);
141 ! TREE_SIDE_EFFECTS (result) = 1;
142 ! protected_set_expr_location (result, location);
143
144 /* If we got the LHS in a different type for storing in,
145 convert the result back to the nominal type of LHS
146 --- 4989,5002 ----
147
148 /* Scan operands. */
149
150 ! if (is_atomic_op)
151 ! result = build_atomic_assign (location, lhs, modifycode, newrhs);
152 ! else
153 ! {
154 ! result = build2 (MODIFY_EXPR, lhstype, lhs, newrhs);
155 ! TREE_SIDE_EFFECTS (result) = 1;
156 ! protected_set_expr_location (result, location);
157 ! }
158
159 /* If we got the LHS in a different type for storing in,
160 convert the result back to the nominal type of LHS
161 *************** c_build_va_arg (location_t loc, tree exp
162 *** 10972,10974 ****
163 --- 10996,11207 ----
164 "C++ requires promoted type, not enum type, in %<va_arg%>");
165 return build_va_arg (loc, expr, type);
166 }
167 +
168 +
169 + /* Expand atomic compound assignments into an approriate sequence as
170 + specified by the C11 standard section 6.5.16.2.
171 + given
172 + _Atomic T1 E1
173 + T2 E2
174 + E1 op= E2
175 +
176 + This sequence is used for integer, floating point and complex types.
177 +
178 + In addition the 'fe' prefixed routines may need to be invoked for
179 + floating point and complex when annex F is in effect (regarding floating
180 + point or exceptional conditions) See 6.5.16.2 footnote 113:
181 +
182 + TODO these are not implemented as yes, but the comments are placed at the
183 + correct locations in the code for the appropriate calls to be made. They
184 + should only be issued if the expression type is !INTEGRAL_TYPE_P().
185 +
186 + T1 newval;
187 + T1 old;
188 + T1 *addr
189 + T2 val
190 + fenv_t fenv
191 +
192 + addr = &E1;
193 + val = (E2);
194 + __atomic_load (addr, &old, SEQ_CST);
195 + feholdexcept (&fenv); <<-- float & complex only
196 + loop:
197 + newval = old op val;
198 + if (__atomic_compare_exchange_strong (addr, &old, &newval, SEQ_CST,
199 + SEQ_CST))
200 + goto done;
201 + feclearexcept (FE_ALL_EXCEPT); <<-- float & complex only
202 + goto loop:
203 + done:
204 + feupdateenv (&fenv); <<-- float & complex only
205 +
206 +
207 + Also note that the compiler is simply issuing the generic form of the atomic
208 + operations. This requires temp(s) and has their address taken. The atomic
209 + processing is smart enough to figure out when the size of an object can
210 + utilize a lock free versionm, and convert the built-in call to the appropriate
211 + lockfree routine. The optimizers will then dispose of any temps that are no
212 + longer required, and lock free implementations are utilized for integer, float
213 + and complex as long as there is target supoprt for the required size.
214 +
215 + If the operator is NOP_EXPR, then this is a simple assignment, and an
216 + __atomic_store is issued to perform the assignment rather than the above loop.
217 +
218 + */
219 +
220 + /* Build an atomic assignment at LOC, expanding into the proper sequence to
221 + store LHS MODIFYCODE= RHS. Return a value representing the result of
222 + the operation. */
223 + tree
224 + build_atomic_assign (location_t loc, tree lhs, enum tree_code modifycode,
225 + tree rhs)
226 + {
227 + tree fndecl, func_call;
228 + vec<tree, va_gc> *params;
229 + tree val, nonatomic_type, newval, newval_addr;
230 + tree old, old_addr;
231 + tree stmt, goto_stmt;
232 + tree loop_label, loop_decl, done_label, done_decl;
233 +
234 + tree lhs_type = TREE_TYPE (lhs);
235 + tree lhs_addr = build_unary_op (loc, ADDR_EXPR, lhs, 0);
236 + tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
237 +
238 + gcc_assert (TYPE_ATOMIC (lhs_type));
239 +
240 + /* allocate enough vector items for a compare_exchange. */
241 + vec_alloc (params, 6);
242 +
243 + /* Remove the qualifiers for the rest of the expressions and create
244 + the VAL temp variable to hold the RHS. */
245 + nonatomic_type = build_qualified_type (lhs_type, TYPE_UNQUALIFIED);
246 + val = create_tmp_var (nonatomic_type, NULL);
247 + TREE_ADDRESSABLE (val) = 1;
248 + rhs = build2 (MODIFY_EXPR, nonatomic_type, val, rhs);
249 + SET_EXPR_LOCATION (rhs, loc);
250 + add_stmt (rhs);
251 +
252 + /* NOP_EXPR indicates its a straight store of the RHS. Simply issue
253 + and atomic_store. */
254 + if (modifycode == NOP_EXPR)
255 + {
256 + /* Build __atomic_store (&lhs, &val, SEQ_CST) */
257 + rhs = build_unary_op (loc, ADDR_EXPR, val, 0);
258 + fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_STORE);
259 + params->quick_push (lhs_addr);
260 + params->quick_push (rhs);
261 + params->quick_push (seq_cst);
262 + func_call = build_function_call_vec (loc, fndecl, params, NULL);
263 + add_stmt (func_call);
264 +
265 + /* Val is the value which was stored, return it for any further value
266 + propagation. */
267 + return val;
268 + }
269 +
270 + /* Create the variables and labels required for the op= form. */
271 + old = create_tmp_var (nonatomic_type, NULL);
272 + old_addr = build_unary_op (loc, ADDR_EXPR, old, 0);
273 + TREE_ADDRESSABLE (val) = 1;
274 +
275 + newval = create_tmp_var (nonatomic_type, NULL);
276 + newval_addr = build_unary_op (loc, ADDR_EXPR, newval, 0);
277 + TREE_ADDRESSABLE (newval) = 1;
278 +
279 + loop_decl = create_artificial_label (loc);
280 + loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl);
281 +
282 + done_decl = create_artificial_label (loc);
283 + done_label = build1 (LABEL_EXPR, void_type_node, done_decl);
284 +
285 + /* __atomic_load (addr, &old, SEQ_CST). */
286 + fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_LOAD);
287 + params->quick_push (lhs_addr);
288 + params->quick_push (old_addr);
289 + params->quick_push (seq_cst);
290 + func_call = build_function_call_vec (loc, fndecl, params, NULL);
291 + add_stmt (func_call);
292 + params->truncate (0);
293 +
294 + /* TODO if (!integral) issue feholdexcept (&fenv); */
295 +
296 + /* loop: */
297 + add_stmt (loop_label);
298 +
299 + /* newval = old + val; */
300 + rhs = build_binary_op (loc, modifycode, old, val, 1);
301 + rhs = build2 (MODIFY_EXPR, nonatomic_type, newval, rhs);
302 + SET_EXPR_LOCATION (rhs, loc);
303 + add_stmt (rhs);
304 +
305 + /* if (__atomic_compare_exchange (addr, &old, &new, false, SEQ_CST, SEQ_CST))
306 + goto done; */
307 + fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_COMPARE_EXCHANGE);
308 + params->quick_push (lhs_addr);
309 + params->quick_push (old_addr);
310 + params->quick_push (newval_addr);
311 + params->quick_push (integer_zero_node);
312 + params->quick_push (seq_cst);
313 + params->quick_push (seq_cst);
314 + func_call = build_function_call_vec (loc, fndecl, params, NULL);
315 +
316 + goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl);
317 + SET_EXPR_LOCATION (goto_stmt, loc);
318 +
319 + stmt = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE);
320 + SET_EXPR_LOCATION (stmt, loc);
321 + add_stmt (stmt);
322 +
323 + /* TODO if (!integral) issue feclearexcept (FE_ALL_EXCEPT); */
324 +
325 + /* goto loop; */
326 + goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl);
327 + SET_EXPR_LOCATION (goto_stmt, loc);
328 + add_stmt (goto_stmt);
329 +
330 + /* done: */
331 + add_stmt (done_label);
332 +
333 + /* TODO If (!integral) issue feupdateenv (&fenv) */
334 +
335 + /* Newval is the value that was successfully stored, return that. */
336 + return newval;
337 + }
338 +
339 +
340 + /* This simply performs an atomic load from EXPR and returns the temp it was
341 + loaded into. */
342 +
343 + tree
344 + build_atomic_load (location_t loc, tree expr)
345 + {
346 + vec<tree, va_gc> *params;
347 + tree nonatomic_type, tmp, tmp_addr, fndecl, func_call;
348 + tree expr_type = TREE_TYPE (expr);
349 + tree expr_addr = build_unary_op (loc, ADDR_EXPR, expr, 0);
350 + tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
351 +
352 + gcc_assert (TYPE_ATOMIC (expr_type));
353 +
354 + /* Expansion of a generic atomoic load may require an addition element, so
355 + allocate enough to prevent a resize. */
356 + vec_alloc (params, 4);
357 +
358 + /* Remove the qualifiers for the rest of the expressions and create
359 + the VAL temp variable to hold the RHS. */
360 + nonatomic_type = build_qualified_type (expr_type, TYPE_UNQUALIFIED);
361 + tmp = create_tmp_var (nonatomic_type, NULL);
362 + tmp_addr = build_unary_op (loc, ADDR_EXPR, tmp, 0);
363 + TREE_ADDRESSABLE (tmp) = 1;
364 +
365 + /* Issue __atomic_load (&expr, &tmp, SEQ_CST); */
366 + fndecl = builtin_decl_explicit (BUILT_IN_ATOMIC_LOAD);
367 + params->quick_push (expr_addr);
368 + params->quick_push (tmp_addr);
369 + params->quick_push (seq_cst);
370 + func_call = build_function_call_vec (loc, fndecl, params, NULL);
371 + add_stmt (func_call);
372 +
373 + /* return tmp which contains the value loaded, */
374 + return tmp;
375 + }
376 Index: gcc/c-family/c-common.h
377 ===================================================================
378 *** gcc/c-family/c-common.h (revision 201248)
379 --- gcc/c-family/c-common.h (working copy)
380 *************** extern int field_decl_cmp (const void *,
381 *** 547,552 ****
382 --- 547,554 ----
383 extern void resort_sorted_fields (void *, void *, gt_pointer_operator,
384 void *);
385 extern bool has_c_linkage (const_tree decl);
386 + extern tree build_atomic_load (location_t, tree);
387 +
388
389 /* Switches common to the C front ends. */
390
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.