1 | |
2 | |
3 | |
4 | |
5 | |
6 | |
7 | |
8 | |
9 | |
10 | |
11 | |
12 | |
13 | |
14 | #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" |
15 | #include "clang/AST/ExprCXX.h" |
16 | #include "clang/Basic/SourceManager.h" |
17 | #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" |
18 | #include "clang/StaticAnalyzer/Core/Checker.h" |
19 | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
20 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
21 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" |
22 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" |
23 | #include "llvm/ADT/SmallString.h" |
24 | #include "llvm/Support/raw_ostream.h" |
25 | using namespace clang; |
26 | using namespace ento; |
27 | |
28 | namespace { |
29 | class StackAddrEscapeChecker |
30 | : public Checker<check::PreCall, check::PreStmt<ReturnStmt>, |
31 | check::EndFunction> { |
32 | mutable IdentifierInfo *dispatch_semaphore_tII; |
33 | mutable std::unique_ptr<BuiltinBug> BT_stackleak; |
34 | mutable std::unique_ptr<BuiltinBug> BT_returnstack; |
35 | mutable std::unique_ptr<BuiltinBug> BT_capturedstackasync; |
36 | mutable std::unique_ptr<BuiltinBug> BT_capturedstackret; |
37 | |
38 | public: |
39 | enum CheckKind { |
40 | CK_StackAddrEscapeChecker, |
41 | CK_StackAddrAsyncEscapeChecker, |
42 | CK_NumCheckKinds |
43 | }; |
44 | |
45 | DefaultBool ChecksEnabled[CK_NumCheckKinds]; |
46 | |
47 | void checkPreCall(const CallEvent &Call, CheckerContext &C) const; |
48 | void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const; |
49 | void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const; |
50 | |
51 | private: |
52 | void checkReturnedBlockCaptures(const BlockDataRegion &B, |
53 | CheckerContext &C) const; |
54 | void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B, |
55 | CheckerContext &C) const; |
56 | void EmitStackError(CheckerContext &C, const MemRegion *R, |
57 | const Expr *RetE) const; |
58 | bool isSemaphoreCaptured(const BlockDecl &B) const; |
59 | static SourceRange genName(raw_ostream &os, const MemRegion *R, |
60 | ASTContext &Ctx); |
61 | static SmallVector<const MemRegion *, 4> |
62 | getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C); |
63 | static bool isArcManagedBlock(const MemRegion *R, CheckerContext &C); |
64 | static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C); |
65 | }; |
66 | } |
67 | |
68 | SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R, |
69 | ASTContext &Ctx) { |
70 | |
71 | R = R->getBaseRegion(); |
72 | SourceManager &SM = Ctx.getSourceManager(); |
73 | SourceRange range; |
74 | os << "Address of "; |
75 | |
76 | |
77 | if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) { |
78 | const CompoundLiteralExpr *CL = CR->getLiteralExpr(); |
79 | os << "stack memory associated with a compound literal " |
80 | "declared on line " |
81 | << SM.getExpansionLineNumber(CL->getBeginLoc()) << " returned to caller"; |
82 | range = CL->getSourceRange(); |
83 | } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) { |
84 | const Expr *ARE = AR->getExpr(); |
85 | SourceLocation L = ARE->getBeginLoc(); |
86 | range = ARE->getSourceRange(); |
87 | os << "stack memory allocated by call to alloca() on line " |
88 | << SM.getExpansionLineNumber(L); |
89 | } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) { |
90 | const BlockDecl *BD = BR->getCodeRegion()->getDecl(); |
91 | SourceLocation L = BD->getBeginLoc(); |
92 | range = BD->getSourceRange(); |
93 | os << "stack-allocated block declared on line " |
94 | << SM.getExpansionLineNumber(L); |
95 | } else if (const auto *VR = dyn_cast<VarRegion>(R)) { |
96 | os << "stack memory associated with local variable '" << VR->getString() |
97 | << '\''; |
98 | range = VR->getDecl()->getSourceRange(); |
99 | } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) { |
100 | QualType Ty = TOR->getValueType().getLocalUnqualifiedType(); |
101 | os << "stack memory associated with temporary object of type '"; |
102 | Ty.print(os, Ctx.getPrintingPolicy()); |
103 | os << "'"; |
104 | range = TOR->getExpr()->getSourceRange(); |
105 | } else { |
106 | llvm_unreachable("Invalid region in ReturnStackAddressChecker."); |
107 | } |
108 | |
109 | return range; |
110 | } |
111 | |
112 | bool StackAddrEscapeChecker::isArcManagedBlock(const MemRegion *R, |
113 | CheckerContext &C) { |
114 | (0) . __assert_fail ("R && \"MemRegion should not be null\"", "/home/seafit/code_projects/clang_source/clang/lib/StaticAnalyzer/Checkers/StackAddrEscapeChecker.cpp", 114, __PRETTY_FUNCTION__))" file_link="../../../../include/assert.h.html#88" macro="true">assert(R && "MemRegion should not be null"); |
115 | return C.getASTContext().getLangOpts().ObjCAutoRefCount && |
116 | isa<BlockDataRegion>(R); |
117 | } |
118 | |
119 | bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R, |
120 | CheckerContext &C) { |
121 | const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace()); |
122 | return S->getStackFrame() != C.getStackFrame(); |
123 | } |
124 | |
125 | bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const { |
126 | if (!dispatch_semaphore_tII) |
127 | dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t"); |
128 | for (const auto &C : B.captures()) { |
129 | const auto *T = C.getVariable()->getType()->getAs<TypedefType>(); |
130 | if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII) |
131 | return true; |
132 | } |
133 | return false; |
134 | } |
135 | |
136 | SmallVector<const MemRegion *, 4> |
137 | StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B, |
138 | CheckerContext &C) { |
139 | SmallVector<const MemRegion *, 4> Regions; |
140 | BlockDataRegion::referenced_vars_iterator I = B.referenced_vars_begin(); |
141 | BlockDataRegion::referenced_vars_iterator E = B.referenced_vars_end(); |
142 | for (; I != E; ++I) { |
143 | SVal Val = C.getState()->getSVal(I.getCapturedRegion()); |
144 | const MemRegion *Region = Val.getAsRegion(); |
145 | if (Region && isa<StackSpaceRegion>(Region->getMemorySpace())) |
146 | Regions.push_back(Region); |
147 | } |
148 | return Regions; |
149 | } |
150 | |
151 | void StackAddrEscapeChecker::EmitStackError(CheckerContext &C, |
152 | const MemRegion *R, |
153 | const Expr *RetE) const { |
154 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
155 | if (!N) |
156 | return; |
157 | if (!BT_returnstack) |
158 | BT_returnstack = llvm::make_unique<BuiltinBug>( |
159 | this, "Return of address to stack-allocated memory"); |
160 | |
161 | SmallString<128> buf; |
162 | llvm::raw_svector_ostream os(buf); |
163 | SourceRange range = genName(os, R, C.getASTContext()); |
164 | os << " returned to caller"; |
165 | auto report = llvm::make_unique<BugReport>(*BT_returnstack, os.str(), N); |
166 | report->addRange(RetE->getSourceRange()); |
167 | if (range.isValid()) |
168 | report->addRange(range); |
169 | C.emitReport(std::move(report)); |
170 | } |
171 | |
172 | void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures( |
173 | const BlockDataRegion &B, CheckerContext &C) const { |
174 | |
175 | |
176 | |
177 | |
178 | |
179 | |
180 | |
181 | if (isSemaphoreCaptured(*B.getDecl())) |
182 | return; |
183 | for (const MemRegion *Region : getCapturedStackRegions(B, C)) { |
184 | |
185 | |
186 | |
187 | |
188 | |
189 | |
190 | if (isa<BlockDataRegion>(Region)) |
191 | continue; |
192 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
193 | if (!N) |
194 | continue; |
195 | if (!BT_capturedstackasync) |
196 | BT_capturedstackasync = llvm::make_unique<BuiltinBug>( |
197 | this, "Address of stack-allocated memory is captured"); |
198 | SmallString<128> Buf; |
199 | llvm::raw_svector_ostream Out(Buf); |
200 | SourceRange Range = genName(Out, Region, C.getASTContext()); |
201 | Out << " is captured by an asynchronously-executed block"; |
202 | auto Report = |
203 | llvm::make_unique<BugReport>(*BT_capturedstackasync, Out.str(), N); |
204 | if (Range.isValid()) |
205 | Report->addRange(Range); |
206 | C.emitReport(std::move(Report)); |
207 | } |
208 | } |
209 | |
210 | void StackAddrEscapeChecker::checkReturnedBlockCaptures( |
211 | const BlockDataRegion &B, CheckerContext &C) const { |
212 | for (const MemRegion *Region : getCapturedStackRegions(B, C)) { |
213 | if (isArcManagedBlock(Region, C) || isNotInCurrentFrame(Region, C)) |
214 | continue; |
215 | ExplodedNode *N = C.generateNonFatalErrorNode(); |
216 | if (!N) |
217 | continue; |
218 | if (!BT_capturedstackret) |
219 | BT_capturedstackret = llvm::make_unique<BuiltinBug>( |
220 | this, "Address of stack-allocated memory is captured"); |
221 | SmallString<128> Buf; |
222 | llvm::raw_svector_ostream Out(Buf); |
223 | SourceRange Range = genName(Out, Region, C.getASTContext()); |
224 | Out << " is captured by a returned block"; |
225 | auto Report = |
226 | llvm::make_unique<BugReport>(*BT_capturedstackret, Out.str(), N); |
227 | if (Range.isValid()) |
228 | Report->addRange(Range); |
229 | C.emitReport(std::move(Report)); |
230 | } |
231 | } |
232 | |
233 | void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call, |
234 | CheckerContext &C) const { |
235 | if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker]) |
236 | return; |
237 | if (!Call.isGlobalCFunction("dispatch_after") && |
238 | !Call.isGlobalCFunction("dispatch_async")) |
239 | return; |
240 | for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) { |
241 | if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>( |
242 | Call.getArgSVal(Idx).getAsRegion())) |
243 | checkAsyncExecutedBlockCaptures(*B, C); |
244 | } |
245 | } |
246 | |
247 | void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS, |
248 | CheckerContext &C) const { |
249 | if (!ChecksEnabled[CK_StackAddrEscapeChecker]) |
250 | return; |
251 | |
252 | const Expr *RetE = RS->getRetValue(); |
253 | if (!RetE) |
254 | return; |
255 | RetE = RetE->IgnoreParens(); |
256 | |
257 | SVal V = C.getSVal(RetE); |
258 | const MemRegion *R = V.getAsRegion(); |
259 | if (!R) |
260 | return; |
261 | |
262 | if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R)) |
263 | checkReturnedBlockCaptures(*B, C); |
264 | |
265 | if (!isa<StackSpaceRegion>(R->getMemorySpace()) || |
266 | isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C)) |
267 | return; |
268 | |
269 | |
270 | |
271 | |
272 | if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE)) |
273 | RetE = Cleanup->getSubExpr(); |
274 | if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType()) |
275 | return; |
276 | |
277 | |
278 | |
279 | if (auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) { |
280 | if (isa<BlockDataRegion>(R) && |
281 | ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) { |
282 | return; |
283 | } |
284 | } |
285 | |
286 | EmitStackError(C, R, RetE); |
287 | } |
288 | |
289 | void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS, |
290 | CheckerContext &Ctx) const { |
291 | if (!ChecksEnabled[CK_StackAddrEscapeChecker]) |
292 | return; |
293 | |
294 | ProgramStateRef State = Ctx.getState(); |
295 | |
296 | |
297 | |
298 | class CallBack : public StoreManager::BindingsHandler { |
299 | private: |
300 | CheckerContext &Ctx; |
301 | const StackFrameContext *CurSFC; |
302 | |
303 | public: |
304 | SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V; |
305 | |
306 | CallBack(CheckerContext &CC) : Ctx(CC), CurSFC(CC.getStackFrame()) {} |
307 | |
308 | bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region, |
309 | SVal Val) override { |
310 | |
311 | if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace())) |
312 | return true; |
313 | const MemRegion *VR = Val.getAsRegion(); |
314 | if (VR && isa<StackSpaceRegion>(VR->getMemorySpace()) && |
315 | !isArcManagedBlock(VR, Ctx) && !isNotInCurrentFrame(VR, Ctx)) |
316 | V.emplace_back(Region, VR); |
317 | return true; |
318 | } |
319 | }; |
320 | |
321 | CallBack Cb(Ctx); |
322 | State->getStateManager().getStoreManager().iterBindings(State->getStore(), |
323 | Cb); |
324 | |
325 | if (Cb.V.empty()) |
326 | return; |
327 | |
328 | |
329 | ExplodedNode *N = Ctx.generateNonFatalErrorNode(State); |
330 | if (!N) |
331 | return; |
332 | |
333 | if (!BT_stackleak) |
334 | BT_stackleak = llvm::make_unique<BuiltinBug>( |
335 | this, "Stack address stored into global variable", |
336 | "Stack address was saved into a global variable. " |
337 | "This is dangerous because the address will become " |
338 | "invalid after returning from the function"); |
339 | |
340 | for (const auto &P : Cb.V) { |
341 | |
342 | SmallString<128> Buf; |
343 | llvm::raw_svector_ostream Out(Buf); |
344 | SourceRange Range = genName(Out, P.second, Ctx.getASTContext()); |
345 | Out << " is still referred to by the "; |
346 | if (isa<StaticGlobalSpaceRegion>(P.first->getMemorySpace())) |
347 | Out << "static"; |
348 | else |
349 | Out << "global"; |
350 | Out << " variable '"; |
351 | const VarRegion *VR = cast<VarRegion>(P.first->getBaseRegion()); |
352 | Out << *VR->getDecl() |
353 | << "' upon returning to the caller. This will be a dangling reference"; |
354 | auto Report = llvm::make_unique<BugReport>(*BT_stackleak, Out.str(), N); |
355 | if (Range.isValid()) |
356 | Report->addRange(Range); |
357 | |
358 | Ctx.emitReport(std::move(Report)); |
359 | } |
360 | } |
361 | |
362 | void ento::registerStackAddrEscapeBase(CheckerManager &mgr) { |
363 | mgr.registerChecker<StackAddrEscapeChecker>(); |
364 | } |
365 | |
366 | bool ento::shouldRegisterStackAddrEscapeBase(const LangOptions &LO) { |
367 | return true; |
368 | } |
369 | |
370 | #define REGISTER_CHECKER(name) \ |
371 | void ento::register##name(CheckerManager &Mgr) { \ |
372 | StackAddrEscapeChecker *Chk = \ |
373 | Mgr.getChecker<StackAddrEscapeChecker>(); \ |
374 | Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \ |
375 | } \ |
376 | \ |
377 | bool ento::shouldRegister##name(const LangOptions &LO) { \ |
378 | return true; \ |
379 | } |
380 | |
381 | REGISTER_CHECKER(StackAddrEscapeChecker) |
382 | REGISTER_CHECKER(StackAddrAsyncEscapeChecker) |
383 | |