1 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - | FileCheck %s |
2 | // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s |
3 | // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s |
4 | |
5 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -x c++ -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
6 | // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s |
7 | // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s |
8 | // SIMD-ONLY0-NOT: {{__kmpc|__tgt}} |
9 | // expected-no-diagnostics |
10 | #ifndef HEADER |
11 | #define HEADER |
12 | |
13 | // CHECK-DAG: [[IDENT_T:%.+]] = type { i32, i32, i32, i32, i8* } |
14 | // CHECK-DAG: [[STRUCT_SHAREDS:%.+]] = type { i8*, [2 x [[STRUCT_S:%.+]]]* } |
15 | // CHECK-DAG: [[STRUCT_SHAREDS1:%.+]] = type { [2 x [[STRUCT_S:%.+]]]* } |
16 | // CHECK-DAG: [[KMP_TASK_T:%.+]] = type { i8*, i32 (i32, i8*)*, i32, %union{{.+}}, %union{{.+}} } |
17 | // CHECK-DAG: [[KMP_DEPEND_INFO:%.+]] = type { i64, i64, i8 } |
18 | struct S { |
19 | int a; |
20 | S() : a(0) {} |
21 | S(const S &s) : a(s.a) {} |
22 | ~S() {} |
23 | }; |
24 | int a; |
25 | // CHECK-LABEL: @main |
26 | int main() { |
27 | // CHECK: [[B:%.+]] = alloca i8 |
28 | // CHECK: [[S:%.+]] = alloca [2 x [[STRUCT_S]]] |
29 | char b; |
30 | S s[2]; |
31 | int arr[10][a]; |
32 | // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T]]* @{{.+}}) |
33 | // CHECK: [[B_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES:%.+]], i32 0, i32 0 |
34 | // CHECK: store i8* [[B]], i8** [[B_REF]] |
35 | // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES]], i32 0, i32 1 |
36 | // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]] |
37 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 33, i64 40, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY1:@.+]] to i32 (i32, i8*)*)) |
38 | // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0 |
39 | // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]] |
40 | // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS]]* [[CAPTURES]] to i8* |
41 | // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 16, i1 false) |
42 | // CHECK: [[PRIORITY_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR]], i32 0, i32 4 |
43 | // CHECK: [[PRIORITY:%.+]] = bitcast %union{{.+}}* [[PRIORITY_REF_PTR]] to i32* |
44 | // CHECK: store i32 {{.+}}, i32* [[PRIORITY]] |
45 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
46 | #pragma omp task shared(a, b, s) priority(b) |
47 | { |
48 | a = 15; |
49 | b = a; |
50 | s[0].a = 10; |
51 | } |
52 | // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS1]], [[STRUCT_SHAREDS1]]* [[CAPTURES:%.+]], i32 0, i32 0 |
53 | // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]] |
54 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{[^,]+}}, i32 [[GTID]], i32 1, i64 40, i64 8, |
55 | // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0 |
56 | // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]] |
57 | // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS1]]* [[CAPTURES]] to i8* |
58 | // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* align 8 [[SHAREDS_REF]], i8* align 8 [[BITCAST]], i64 8, i1 false) |
59 | // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES:%.*]], i64 0, i64 0 |
60 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 |
61 | // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* [[T0]] |
62 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 |
63 | // CHECK: store i64 4, i64* [[T0]] |
64 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 |
65 | // CHECK: store i8 1, i8* [[T0]] |
66 | // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 1 |
67 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 |
68 | // CHECK: ptrtoint i8* [[B]] to i64 |
69 | // CHECK: store i64 %{{[^,]+}}, i64* [[T0]] |
70 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 |
71 | // CHECK: store i64 1, i64* [[T0]] |
72 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 |
73 | // CHECK: store i8 1, i8* [[T0]] |
74 | // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 2 |
75 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 |
76 | // CHECK: ptrtoint [2 x [[STRUCT_S]]]* [[S]] to i64 |
77 | // CHECK: store i64 %{{[^,]+}}, i64* [[T0]] |
78 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1 |
79 | // CHECK: store i64 8, i64* [[T0]] |
80 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2 |
81 | // CHECK: store i8 1, i8* [[T0]] |
82 | // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL:%.+]] |
83 | // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
84 | // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] |
85 | // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
86 | // CHECK: [[END1:%.+]] = getelementptr i32, i32* [[END]], i32 1 |
87 | // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START]] to i64 |
88 | // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END1]] to i64 |
89 | // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] |
90 | // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 3 |
91 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0 |
92 | // CHECK: [[T1:%.*]] = ptrtoint i32* [[START]] to i64 |
93 | // CHECK: store i64 [[T1]], i64* [[T0]] |
94 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
95 | // CHECK: store i64 [[SIZEOF]], i64* [[T0]] |
96 | // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
97 | // CHECK: store i8 1, i8* [[T0]] |
98 | // CHECK: [[DEPS:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i{{32|64}} 0, i{{32|64}} 0 |
99 | // CHECK: bitcast [[KMP_DEPEND_INFO]]* [[DEPS]] to i8* |
100 | // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 4, i8* %{{[^,]+}}, i32 0, i8* null) |
101 | #pragma omp task shared(a, s) depend(in : a, b, s, arr[:]) |
102 | { |
103 | a = 15; |
104 | s[1].a = 10; |
105 | } |
106 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY2:@.+]] to i32 (i32, i8*)*)) |
107 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
108 | #pragma omp task untied |
109 | { |
110 | #pragma omp critical |
111 | a = 1; |
112 | } |
113 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, |
114 | // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0 |
115 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 |
116 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
117 | // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 |
118 | // CHECK: store i64 %{{[^,]+}}, i64* |
119 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
120 | // CHECK: store i64 4, i64* |
121 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
122 | // CHECK: store i8 3, i8* |
123 | // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] |
124 | // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 |
125 | // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]] |
126 | // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
127 | // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]] |
128 | // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] |
129 | // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 |
130 | // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] |
131 | // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
132 | // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] |
133 | // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 |
134 | // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 |
135 | // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 |
136 | // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] |
137 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 |
138 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
139 | // CHECK: ptrtoint i32* [[START1]] to i64 |
140 | // CHECK: store i64 %{{[^,]+}}, i64* |
141 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
142 | // CHECK: store i64 [[SIZEOF]], i64* |
143 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
144 | // CHECK: store i8 3, i8* |
145 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 |
146 | // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* |
147 | // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null) |
148 | #pragma omp task untied depend(out : s[0], arr[4:][b]) |
149 | { |
150 | a = 1; |
151 | } |
152 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, |
153 | // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0 |
154 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 |
155 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
156 | // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 |
157 | // CHECK: store i64 %{{[^,]+}}, i64* |
158 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
159 | // CHECK: store i64 4, i64* |
160 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
161 | // CHECK: store i8 4, i8* |
162 | // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] |
163 | // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 |
164 | // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]] |
165 | // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
166 | // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]] |
167 | // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] |
168 | // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64 |
169 | // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]] |
170 | // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
171 | // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] |
172 | // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 |
173 | // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 |
174 | // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 |
175 | // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] |
176 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 |
177 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
178 | // CHECK: ptrtoint i32* [[START1]] to i64 |
179 | // CHECK: store i64 %{{[^,]+}}, i64* |
180 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
181 | // CHECK: store i64 [[SIZEOF]], i64* |
182 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
183 | // CHECK: store i8 4, i8* |
184 | // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 |
185 | // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* |
186 | // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null) |
187 | #pragma omp task untied depend(mutexinoutset: s[0], arr[4:][b]) |
188 | { |
189 | a = 1; |
190 | } |
191 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, |
192 | // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0 |
193 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
194 | // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* |
195 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
196 | // CHECK: store i64 4, i64* |
197 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
198 | // CHECK: store i8 3, i8* |
199 | // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 1 |
200 | // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1 |
201 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
202 | // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64 |
203 | // CHECK: store i64 %{{[^,]+}}, i64* |
204 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
205 | // CHECK: store i64 4, i64* |
206 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
207 | // CHECK: store i8 3, i8* |
208 | // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL]] |
209 | // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
210 | // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 3 |
211 | // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}}, |
212 | // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64 |
213 | // CHECK: [[IDX2:%.+]] = sub nsw i64 [[NEW_A_VAL_I64]], 1 |
214 | // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}}, |
215 | // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64 |
216 | // CHECK: [[SUB:%.+]] = add nsw i64 -1, [[NEW_A_VAL_I64]] |
217 | // CHECK: [[IDX1:%.+]] = mul nsw i64 [[SUB]], [[A_VAL]] |
218 | // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]] |
219 | // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]] |
220 | // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1 |
221 | // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64 |
222 | // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64 |
223 | // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]] |
224 | // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 2 |
225 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0 |
226 | // CHECK: ptrtoint i32* [[START1]] to i64 |
227 | // CHECK: store i64 %{{[^,]+}}, i64* |
228 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1 |
229 | // CHECK: store i64 [[SIZEOF]], i64* |
230 | // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2 |
231 | // CHECK: store i8 3, i8* |
232 | // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i{{32|64}} 0, i{{32|64}} 0 |
233 | // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8* |
234 | // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 3, i8* %{{[^,]+}}, i32 0, i8* null) |
235 | #pragma omp task final(true) depend(inout: a, s[1], arr[:a][3:]) |
236 | { |
237 | a = 2; |
238 | } |
239 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY3:@.+]] to i32 (i32, i8*)*)) |
240 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
241 | #pragma omp task final(true) |
242 | { |
243 | a = 2; |
244 | } |
245 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 1, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY4:@.+]] to i32 (i32, i8*)*)) |
246 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
247 | const bool flag = false; |
248 | #pragma omp task final(flag) |
249 | { |
250 | a = 3; |
251 | } |
252 | // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]] |
253 | // CHECK: [[CMP:%.+]] = icmp ne i8 [[B_VAL]], 0 |
254 | // CHECK: [[FINAL:%.+]] = select i1 [[CMP]], i32 2, i32 0 |
255 | // CHECK: [[FLAGS:%.+]] = or i32 [[FINAL]], 1 |
256 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 [[FLAGS]], i64 40, i64 8, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY5:@.+]] to i32 (i32, i8*)*)) |
257 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
258 | int c __attribute__((aligned(128))); |
259 | #pragma omp task final(b) shared(c) |
260 | { |
261 | a = 4; |
262 | c = 5; |
263 | } |
264 | // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY6:@.+]] to i32 (i32, i8*)*)) |
265 | // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]]) |
266 | #pragma omp task untied |
267 | { |
268 | S s1; |
269 | #pragma omp task |
270 | a = 4; |
271 | #pragma omp taskyield |
272 | s1 = S(); |
273 | #pragma omp taskwait |
274 | } |
275 | return a; |
276 | } |
277 | // CHECK: define internal i32 [[TASK_ENTRY1]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
278 | // CHECK: store i32 15, i32* [[A_PTR:@.+]] |
279 | // CHECK: [[A_VAL:%.+]] = load i32, i32* [[A_PTR]] |
280 | // CHECK: [[A_VAL_I8:%.+]] = trunc i32 [[A_VAL]] to i8 |
281 | // CHECK: store i8 [[A_VAL_I8]], i8* %{{.+}} |
282 | // CHECK: store i32 10, i32* %{{.+}} |
283 | |
284 | // CHECK: define internal i32 [[TASK_ENTRY2]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
285 | // CHECK: store i32 1, i32* [[A_PTR]] |
286 | |
287 | // CHECK: define internal i32 [[TASK_ENTRY3]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
288 | // CHECK: store i32 2, i32* [[A_PTR]] |
289 | |
290 | // CHECK: define internal i32 [[TASK_ENTRY4]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
291 | // CHECK: store i32 3, i32* [[A_PTR]] |
292 | |
293 | // CHECK: define internal i32 [[TASK_ENTRY5]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
294 | // CHECK: store i32 4, i32* [[A_PTR]] |
295 | // CHECK: store i32 5, i32* [[C_PTR:%.+]], align 128 |
296 | |
297 | // CHECK: define internal i32 |
298 | // CHECK: store i32 4, i32* [[A_PTR]] |
299 | |
300 | // CHECK: define internal i32 [[TASK_ENTRY6]](i32, [[KMP_TASK_T]]{{.*}}* noalias) |
301 | // CHECK: switch i32 %{{.+}}, label |
302 | // CHECK: load i32*, i32** % |
303 | // CHECK: store i32 1, i32* % |
304 | // CHECK: call i32 @__kmpc_omp_task(% |
305 | |
306 | // CHECK: call i8* @__kmpc_omp_task_alloc( |
307 | // CHECK: call i32 @__kmpc_omp_task(% |
308 | // CHECK: load i32*, i32** % |
309 | // CHECK: store i32 2, i32* % |
310 | // CHECK: call i32 @__kmpc_omp_task(% |
311 | |
312 | // CHECK: call i32 @__kmpc_omp_taskyield(% |
313 | // CHECK: load i32*, i32** % |
314 | // CHECK: store i32 3, i32* % |
315 | // CHECK: call i32 @__kmpc_omp_task(% |
316 | |
317 | // CHECK: call i32 @__kmpc_omp_taskwait(% |
318 | // CHECK: load i32*, i32** % |
319 | // CHECK: store i32 4, i32* % |
320 | // CHECK: call i32 @__kmpc_omp_task(% |
321 | |
322 | struct S1 { |
323 | int a; |
324 | S1() { taskinit(); } |
325 | void taskinit() { |
326 | #pragma omp task |
327 | a = 0; |
328 | } |
329 | } s1; |
330 | |
331 | // CHECK-LABEL: taskinit |
332 | // CHECK: call i8* @__kmpc_omp_task_alloc( |
333 | |
334 | #endif |
335 | |