Newer
Older
Ted Kremenek
committed
EvalStore(TmpDst, StoreE, Pred, St, location, Val);
for (NodeSet::iterator I=TmpDst.begin(), E=TmpDst.end(); I!=E; ++I)
MakeNode(Dst, Ex, *I, (*I)->getState());
}
const GRState* GRExprEngine::EvalLocation(Stmt* Ex, NodeTy* Pred,
// Check for loads/stores from/to undefined values.
if (location.isUndef()) {
ProgramPoint::Kind K =
isLoad ? ProgramPoint::PostLoadKind : ProgramPoint::PostStmtKind;
if (NodeTy* Succ = Builder->generateNode(Ex, St, Pred, K)) {
Succ->markAsSink();
UndefDeref.insert(Succ);
}
return NULL;
}
// Check for loads/stores from/to unknown locations. Treat as No-Ops.
if (location.isUnknown())
return St;
// During a load, one of two possible situations arise:
// (1) A crash, because the location (pointer) was NULL.
// (2) The location (pointer) is not NULL, and the dereference works.
//
// We add these assumptions.
Zhongxing Xu
committed
Loc LV = cast<Loc>(location);
// "Assume" that the pointer is not NULL.
bool isFeasibleNotNull = false;
const GRState* StNotNull = Assume(St, LV, true, isFeasibleNotNull);
// "Assume" that the pointer is NULL.
bool isFeasibleNull = false;
Ted Kremenek
committed
GRStateRef StNull = GRStateRef(Assume(St, LV, false, isFeasibleNull),
getStateManager());
if (isFeasibleNull) {
Ted Kremenek
committed
// Use the Generic Data Map to mark in the state what lval was null.
Zhongxing Xu
committed
const SVal* PersistentLV = getBasicVals().getPersistentSVal(LV);
Ted Kremenek
committed
StNull = StNull.set<GRState::NullDerefTag>(PersistentLV);
// We don't use "MakeNode" here because the node will be a sink
// and we have no intention of processing it later.
ProgramPoint::Kind K =
isLoad ? ProgramPoint::PostLoadKind : ProgramPoint::PostStmtKind;
NodeTy* NullNode = Builder->generateNode(Ex, StNull, Pred, K);
if (NullNode) {
NullNode->markAsSink();
if (isFeasibleNotNull) ImplicitNullDeref.insert(NullNode);
else ExplicitNullDeref.insert(NullNode);
}
}
Zhongxing Xu
committed
// Check for out-of-bound array access.
if (isFeasibleNotNull && isa<loc::MemRegionVal>(LV)) {
const MemRegion* R = cast<loc::MemRegionVal>(LV).getRegion();
if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) {
// Get the index of the accessed element.
SVal Idx = ER->getIndex();
// Get the extent of the array.
SVal NumElements = StateMgr.getStoreManager().getSizeInElements(StNotNull,
ER->getSuperRegion());
bool isFeasibleInBound = false;
const GRState* StInBound = AssumeInBound(StNotNull, Idx, NumElements,
true, isFeasibleInBound);
bool isFeasibleOutBound = false;
const GRState* StOutBound = AssumeInBound(StNotNull, Idx, NumElements,
false, isFeasibleOutBound);
if (isFeasibleOutBound) {
// Report warning.
StOutBound = 0;
}
return isFeasibleInBound ? StInBound : NULL;
Zhongxing Xu
committed
}
}
return isFeasibleNotNull ? StNotNull : NULL;
//===----------------------------------------------------------------------===//
// Transfer function: Function calls.
//===----------------------------------------------------------------------===//
void GRExprEngine::VisitCall(CallExpr* CE, NodeTy* Pred,
CallExpr::arg_iterator AI,
CallExpr::arg_iterator AE,
NodeSet& Dst)
{
// Determine the type of function we're calling (if available).
const FunctionTypeProto *Proto = NULL;
QualType FnType = CE->getCallee()->IgnoreParens()->getType();
if (const PointerType *FnTypePtr = FnType->getAsPointerType())
Proto = FnTypePtr->getPointeeType()->getAsFunctionTypeProto();
VisitCallRec(CE, Pred, AI, AE, Dst, Proto, /*ParamIdx=*/0);
}
void GRExprEngine::VisitCallRec(CallExpr* CE, NodeTy* Pred,
CallExpr::arg_iterator AI,
CallExpr::arg_iterator AE,
NodeSet& Dst, const FunctionTypeProto *Proto,
unsigned ParamIdx) {
// Process the arguments.
if (AI != AE) {
// If the call argument is being bound to a reference parameter,
// visit it as an lvalue, not an rvalue.
bool VisitAsLvalue = false;
if (Proto && ParamIdx < Proto->getNumArgs())
VisitAsLvalue = Proto->getArgType(ParamIdx)->isReferenceType();
NodeSet DstTmp;
if (VisitAsLvalue)
VisitLValue(*AI, Pred, DstTmp);
else
Visit(*AI, Pred, DstTmp);
++AI;
for (NodeSet::iterator DI=DstTmp.begin(), DE=DstTmp.end(); DI != DE; ++DI)
VisitCallRec(CE, *DI, AI, AE, Dst, Proto, ParamIdx + 1);
return;
}
// If we reach here we have processed all of the arguments. Evaluate
// the callee expression.
NodeSet DstTmp;
Expr* Callee = CE->getCallee()->IgnoreParens();
Zhongxing Xu
committed
Visit(Callee, Pred, DstTmp);
// Finally, evaluate the function call.
for (NodeSet::iterator DI = DstTmp.begin(), DE = DstTmp.end(); DI!=DE; ++DI) {
Zhongxing Xu
committed
SVal L = GetSVal(St, Callee);
// FIXME: Add support for symbolic function calls (calls involving
// function pointer values that are symbolic).
// Check for undefined control-flow or calls to NULL.
Zhongxing Xu
committed
if (L.isUndef() || isa<loc::ConcreteInt>(L)) {
NodeTy* N = Builder->generateNode(CE, St, *DI);
if (N) {
N->markAsSink();
BadCalls.insert(N);
}
Ted Kremenek
committed
}
// Check for the "noreturn" attribute.
SaveAndRestore<bool> OldSink(Builder->BuildSinks);
Zhongxing Xu
committed
if (isa<loc::FuncVal>(L)) {
Zhongxing Xu
committed
FunctionDecl* FD = cast<loc::FuncVal>(L).getDecl();
if (FD->getAttr<NoReturnAttr>())
Ted Kremenek
committed
Builder->BuildSinks = true;
else {
// HACK: Some functions are not marked noreturn, and don't return.
// Here are a few hardwired ones. If this takes too long, we can
// potentially cache these results.
const char* s = FD->getIdentifier()->getName();
unsigned n = strlen(s);
switch (n) {
default:
break;
case 4:
if (!memcmp(s, "exit", 4)) Builder->BuildSinks = true;
break;
case 5:
if (!memcmp(s, "panic", 5)) Builder->BuildSinks = true;
else if (!memcmp(s, "error", 5)) {
if (CE->getNumArgs() > 0) {
Zhongxing Xu
committed
SVal X = GetSVal(St, *CE->arg_begin());
// FIXME: use Assume to inspect the possible symbolic value of
// X. Also check the specific signature of error().
Zhongxing Xu
committed
nonloc::ConcreteInt* CI = dyn_cast<nonloc::ConcreteInt>(&X);
if (CI && CI->getValue() != 0)
Builder->BuildSinks = true;
if (!memcmp(s, "Assert", 6)) {
Builder->BuildSinks = true;
break;
}
// FIXME: This is just a wrapper around throwing an exception.
// Eventually inter-procedural analysis should handle this easily.
if (!memcmp(s, "ziperr", 6)) Builder->BuildSinks = true;
break;
case 7:
if (!memcmp(s, "assfail", 7)) Builder->BuildSinks = true;
case 8:
if (!memcmp(s ,"db_error", 8)) Builder->BuildSinks = true;
break;
case 12:
if (!memcmp(s, "__assert_rtn", 12)) Builder->BuildSinks = true;
break;
case 13:
if (!memcmp(s, "__assert_fail", 13)) Builder->BuildSinks = true;
break;
if (!memcmp(s, "dtrace_assfail", 14) ||
!memcmp(s, "yy_fatal_error", 14))
Builder->BuildSinks = true;
if (!memcmp(s, "_XCAssertionFailureHandler", 26) ||
!memcmp(s, "_DTAssertionFailureHandler", 26))
}
}
}
Ted Kremenek
committed
// Evaluate the call.
Zhongxing Xu
committed
if (isa<loc::FuncVal>(L)) {
Zhongxing Xu
committed
IdentifierInfo* Info = cast<loc::FuncVal>(L).getDecl()->getIdentifier();
switch (id) {
case Builtin::BI__builtin_expect: {
// For __builtin_expect, just return the value of the subexpression.
assert (CE->arg_begin() != CE->arg_end());
Zhongxing Xu
committed
SVal X = GetSVal(St, *(CE->arg_begin()));
continue;
}
case Builtin::BI__builtin_alloca: {
// FIXME: Handle size.
// FIXME: Refactor into StoreManager itself?
MemRegionManager& RM = getStateManager().getRegionManager();
const MemRegion* R =
RM.getAllocaRegion(CE, Builder->getCurrentBlockCount());
MakeNode(Dst, CE, *DI, BindExpr(St, CE, loc::MemRegionVal(R)));
continue;
}
default:
break;
}
}
// Check any arguments passed-by-value against being undefined.
bool badArg = false;
for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
I != E; ++I) {
Zhongxing Xu
committed
if (GetSVal(GetState(*DI), *I).isUndef()) {
NodeTy* N = Builder->generateNode(CE, GetState(*DI), *DI);
if (N) {
N->markAsSink();
UndefArgs[N] = *I;
}
if (badArg)
continue;
// Dispatch to the plug-in transfer function.
unsigned size = Dst.size();
SaveOr OldHasGen(Builder->HasGeneratedNode);
EvalCall(Dst, CE, L, *DI);
// Handle the case where no nodes where generated. Auto-generate that
// contains the updated state if we aren't generating sinks.
if (!Builder->BuildSinks && Dst.size() == size &&
!Builder->HasGeneratedNode)
MakeNode(Dst, CE, *DI, St);
}
}
//===----------------------------------------------------------------------===//
// Transfer function: Objective-C ivar references.
//===----------------------------------------------------------------------===//
void GRExprEngine::VisitObjCIvarRefExpr(ObjCIvarRefExpr* Ex,
NodeTy* Pred, NodeSet& Dst,
bool asLValue) {
Expr* Base = cast<Expr>(Ex->getBase());
NodeSet Tmp;
Visit(Base, Pred, Tmp);
for (NodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
const GRState* St = GetState(*I);
Zhongxing Xu
committed
SVal BaseVal = GetSVal(St, Base);
SVal location = StateMgr.GetLValue(St, Ex->getDecl(), BaseVal);
else
EvalLoad(Dst, Ex, *I, St, location);
}
Ted Kremenek
committed
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
}
//===----------------------------------------------------------------------===//
// Transfer function: Objective-C fast enumeration 'for' statements.
//===----------------------------------------------------------------------===//
void GRExprEngine::VisitObjCForCollectionStmt(ObjCForCollectionStmt* S,
NodeTy* Pred, NodeSet& Dst) {
// ObjCForCollectionStmts are processed in two places. This method
// handles the case where an ObjCForCollectionStmt* occurs as one of the
// statements within a basic block. This transfer function does two things:
//
// (1) binds the next container value to 'element'. This creates a new
// node in the ExplodedGraph.
//
// (2) binds the value 0/1 to the ObjCForCollectionStmt* itself, indicating
// whether or not the container has any more elements. This value
// will be tested in ProcessBranch. We need to explicitly bind
// this value because a container can contain nil elements.
//
// FIXME: Eventually this logic should actually do dispatches to
// 'countByEnumeratingWithState:objects:count:' (NSFastEnumeration).
// This will require simulating a temporary NSFastEnumerationState, either
// through an SVal or through the use of MemRegions. This value can
// be affixed to the ObjCForCollectionStmt* instead of 0/1; when the loop
// terminates we reclaim the temporary (it goes out of scope) and we
// we can test if the SVal is 0 or if the MemRegion is null (depending
// on what approach we take).
//
// For now: simulate (1) by assigning either a symbol or nil if the
// container is empty. Thus this transfer function will by default
// result in state splitting.
Stmt* elem = S->getElement();
SVal ElementV;
Ted Kremenek
committed
if (DeclStmt* DS = dyn_cast<DeclStmt>(elem)) {
VarDecl* ElemD = cast<VarDecl>(DS->getSolitaryDecl());
Ted Kremenek
committed
assert (ElemD->getInit() == 0);
ElementV = getStateManager().GetLValue(GetState(Pred), ElemD);
VisitObjCForCollectionStmtAux(S, Pred, Dst, ElementV);
return;
Ted Kremenek
committed
}
NodeSet Tmp;
VisitLValue(cast<Expr>(elem), Pred, Tmp);
Ted Kremenek
committed
for (NodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I!=E; ++I) {
const GRState* state = GetState(*I);
VisitObjCForCollectionStmtAux(S, *I, Dst, GetSVal(state, elem));
}
}
void GRExprEngine::VisitObjCForCollectionStmtAux(ObjCForCollectionStmt* S,
NodeTy* Pred, NodeSet& Dst,
SVal ElementV) {
Ted Kremenek
committed
// Get the current state. Use 'EvalLocation' to determine if it is a null
// pointer, etc.
Stmt* elem = S->getElement();
GRStateRef state = GRStateRef(EvalLocation(elem, Pred, GetState(Pred),
ElementV, false),
getStateManager());
Ted Kremenek
committed
if (!state)
return;
Ted Kremenek
committed
// Handle the case where the container still has elements.
QualType IntTy = getContext().IntTy;
Ted Kremenek
committed
SVal TrueV = NonLoc::MakeVal(getBasicVals(), 1, IntTy);
GRStateRef hasElems = state.BindExpr(S, TrueV);
// Handle the case where the container has no elements.
SVal FalseV = NonLoc::MakeVal(getBasicVals(), 0, IntTy);
GRStateRef noElems = state.BindExpr(S, FalseV);
if (loc::MemRegionVal* MV = dyn_cast<loc::MemRegionVal>(&ElementV))
if (const TypedRegion* R = dyn_cast<TypedRegion>(MV->getRegion())) {
// FIXME: The proper thing to do is to really iterate over the
// container. We will do this with dispatch logic to the store.
// For now, just 'conjure' up a symbolic value.
QualType T = R->getType(getContext());
assert (Loc::IsLocType(T));
unsigned Count = Builder->getCurrentBlockCount();
loc::SymbolVal SymV(SymMgr.getConjuredSymbol(elem, T, Count));
hasElems = hasElems.BindLoc(ElementV, SymV);
// Bind the location to 'nil' on the false branch.
SVal nilV = loc::ConcreteInt(getBasicVals().getValue(0, T));
noElems = noElems.BindLoc(ElementV, nilV);
}
// Create the new nodes.
MakeNode(Dst, S, Pred, hasElems);
MakeNode(Dst, S, Pred, noElems);
//===----------------------------------------------------------------------===//
// Transfer function: Objective-C message expressions.
//===----------------------------------------------------------------------===//
void GRExprEngine::VisitObjCMessageExpr(ObjCMessageExpr* ME, NodeTy* Pred,
NodeSet& Dst){
VisitObjCMessageExprArgHelper(ME, ME->arg_begin(), ME->arg_end(),
Pred, Dst);
}
void GRExprEngine::VisitObjCMessageExprArgHelper(ObjCMessageExpr* ME,
ObjCMessageExpr::arg_iterator AI,
ObjCMessageExpr::arg_iterator AE,
NodeTy* Pred, NodeSet& Dst) {
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
if (AI == AE) {
// Process the receiver.
if (Expr* Receiver = ME->getReceiver()) {
NodeSet Tmp;
Visit(Receiver, Pred, Tmp);
for (NodeSet::iterator NI = Tmp.begin(), NE = Tmp.end(); NI != NE; ++NI)
VisitObjCMessageExprDispatchHelper(ME, *NI, Dst);
return;
}
VisitObjCMessageExprDispatchHelper(ME, Pred, Dst);
return;
}
NodeSet Tmp;
Visit(*AI, Pred, Tmp);
++AI;
for (NodeSet::iterator NI = Tmp.begin(), NE = Tmp.end(); NI != NE; ++NI)
VisitObjCMessageExprArgHelper(ME, AI, AE, *NI, Dst);
}
void GRExprEngine::VisitObjCMessageExprDispatchHelper(ObjCMessageExpr* ME,
NodeTy* Pred,
NodeSet& Dst) {
// FIXME: More logic for the processing the method call.
Ted Kremenek
committed
bool RaisesException = false;
if (Expr* Receiver = ME->getReceiver()) {
Zhongxing Xu
committed
SVal L = GetSVal(St, Receiver);
// Check for undefined control-flow or calls to NULL.
if (L.isUndef()) {
NodeTy* N = Builder->generateNode(ME, St, Pred);
if (N) {
N->markAsSink();
UndefReceivers.insert(N);
}
return;
}
Ted Kremenek
committed
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
// Check if the "raise" message was sent.
if (ME->getSelector() == RaiseSel)
RaisesException = true;
}
else {
IdentifierInfo* ClsName = ME->getClassName();
Selector S = ME->getSelector();
// Check for special instance methods.
if (!NSExceptionII) {
ASTContext& Ctx = getContext();
NSExceptionII = &Ctx.Idents.get("NSException");
}
if (ClsName == NSExceptionII) {
enum { NUM_RAISE_SELECTORS = 2 };
// Lazily create a cache of the selectors.
if (!NSExceptionInstanceRaiseSelectors) {
ASTContext& Ctx = getContext();
NSExceptionInstanceRaiseSelectors = new Selector[NUM_RAISE_SELECTORS];
llvm::SmallVector<IdentifierInfo*, NUM_RAISE_SELECTORS> II;
unsigned idx = 0;
// raise:format:
II.push_back(&Ctx.Idents.get("raise"));
II.push_back(&Ctx.Idents.get("format"));
Ted Kremenek
committed
NSExceptionInstanceRaiseSelectors[idx++] =
Ctx.Selectors.getSelector(II.size(), &II[0]);
// raise:format::arguments:
II.push_back(&Ctx.Idents.get("arguments"));
Ted Kremenek
committed
NSExceptionInstanceRaiseSelectors[idx++] =
Ctx.Selectors.getSelector(II.size(), &II[0]);
}
for (unsigned i = 0; i < NUM_RAISE_SELECTORS; ++i)
if (S == NSExceptionInstanceRaiseSelectors[i]) {
RaisesException = true; break;
}
}
}
// Check for any arguments that are uninitialized/undefined.
for (ObjCMessageExpr::arg_iterator I = ME->arg_begin(), E = ME->arg_end();
I != E; ++I) {
Zhongxing Xu
committed
if (GetSVal(St, *I).isUndef()) {
// Generate an error node for passing an uninitialized/undefined value
// as an argument to a message expression. This node is a sink.
NodeTy* N = Builder->generateNode(ME, St, Pred);
if (N) {
N->markAsSink();
MsgExprUndefArgs[N] = *I;
}
return;
}
Ted Kremenek
committed
}
// Check if we raise an exception. For now treat these as sinks. Eventually
// we will want to handle exceptions properly.
SaveAndRestore<bool> OldSink(Builder->BuildSinks);
if (RaisesException)
Builder->BuildSinks = true;
// Dispatch to plug-in transfer function.
unsigned size = Dst.size();
SaveOr OldHasGen(Builder->HasGeneratedNode);
EvalObjCMessageExpr(Dst, ME, Pred);
// Handle the case where no nodes where generated. Auto-generate that
// contains the updated state if we aren't generating sinks.
if (!Builder->BuildSinks && Dst.size() == size && !Builder->HasGeneratedNode)
MakeNode(Dst, ME, Pred, St);
//===----------------------------------------------------------------------===//
// Transfer functions: Miscellaneous statements.
//===----------------------------------------------------------------------===//
void GRExprEngine::VisitCast(Expr* CastE, Expr* Ex, NodeTy* Pred, NodeSet& Dst){
NodeSet S1;
QualType T = CastE->getType();
QualType ExTy = Ex->getType();
if (const ExplicitCastExpr *ExCast=dyn_cast_or_null<ExplicitCastExpr>(CastE))
T = ExCast->getTypeAsWritten();
if (ExTy->isArrayType() || ExTy->isFunctionType() || T->isReferenceType())
Zhongxing Xu
committed
VisitLValue(Ex, Pred, S1);
else
Visit(Ex, Pred, S1);
Ted Kremenek
committed
// Check for casting to "void".
if (T->isVoidType()) {
for (NodeSet::iterator I1 = S1.begin(), E1 = S1.end(); I1 != E1; ++I1)
Dst.Add(*I1);
return;
}
Ted Kremenek
committed
// FIXME: The rest of this should probably just go into EvalCall, and
// let the transfer function object be responsible for constructing
// nodes.
for (NodeSet::iterator I1 = S1.begin(), E1 = S1.end(); I1 != E1; ++I1) {
NodeTy* N = *I1;
Zhongxing Xu
committed
SVal V = GetSVal(St, Ex);
Ted Kremenek
committed
// Unknown?
if (V.isUnknown()) {
Dst.Add(N);
continue;
}
// Undefined?
if (V.isUndef()) {
Ted Kremenek
committed
continue;
}
Ted Kremenek
committed
// For const casts, just propagate the value.
ASTContext& C = getContext();
if (C.getCanonicalType(T).getUnqualifiedType() ==
C.getCanonicalType(ExTy).getUnqualifiedType()) {
Ted Kremenek
committed
continue;
}
Ted Kremenek
committed
// Check for casts from pointers to integers.
Zhongxing Xu
committed
if (T->isIntegerType() && Loc::IsLocType(ExTy)) {
Ted Kremenek
committed
unsigned bits = getContext().getTypeSize(ExTy);
// FIXME: Determine if the number of bits of the target type is
// equal or exceeds the number of bits to store the pointer value.
// If not, flag an error.
Zhongxing Xu
committed
V = nonloc::LocAsInteger::Make(getBasicVals(), cast<Loc>(V), bits);
Ted Kremenek
committed
continue;
}
// Check for casts from integers to pointers.
Zhongxing Xu
committed
if (Loc::IsLocType(T) && ExTy->isIntegerType())
if (nonloc::LocAsInteger *LV = dyn_cast<nonloc::LocAsInteger>(&V)) {
Ted Kremenek
committed
// Just unpackage the lval and return it.
Zhongxing Xu
committed
V = LV->getLoc();
Ted Kremenek
committed
continue;
}
Zhongxing Xu
committed
Zhongxing Xu
committed
if (ExTy->isArrayType()) {
Zhongxing Xu
committed
V = StateMgr.ArrayToPointer(V);
Zhongxing Xu
committed
continue;
}
// Check for casts from AllocaRegion pointer to typed pointer.
if (isa<loc::MemRegionVal>(V)) {
assert(Loc::IsLocType(T));
assert(Loc::IsLocType(ExTy));
// Delegate to store manager.
Zhongxing Xu
committed
std::pair<const GRState*, SVal> Res =
getStoreManager().CastRegion(St, V, T, CastE);
const GRState* NewSt = Res.first;
SVal NewPtr = Res.second;
// If no new region is created, fall through to the default case.
if (NewSt != St) {
Zhongxing Xu
committed
MakeNode(Dst, CastE, N, BindExpr(NewSt, CastE, NewPtr));
continue;
}
}
Ted Kremenek
committed
// All other cases.
MakeNode(Dst, CastE, N, BindExpr(St, CastE, EvalCast(V, CastE->getType())));
void GRExprEngine::VisitCompoundLiteralExpr(CompoundLiteralExpr* CL,
Zhongxing Xu
committed
NodeTy* Pred, NodeSet& Dst,
bool asLValue) {
InitListExpr* ILE = cast<InitListExpr>(CL->getInitializer()->IgnoreParens());
NodeSet Tmp;
Visit(ILE, Pred, Tmp);
for (NodeSet::iterator I = Tmp.begin(), EI = Tmp.end(); I!=EI; ++I) {
Zhongxing Xu
committed
const GRState* St = GetState(*I);
SVal ILV = GetSVal(St, ILE);
St = StateMgr.BindCompoundLiteral(St, CL, ILV);
Zhongxing Xu
committed
if (asLValue)
MakeNode(Dst, CL, *I, BindExpr(St, CL, StateMgr.GetLValue(St, CL)));
else
MakeNode(Dst, CL, *I, BindExpr(St, CL, ILV));
}
}
void GRExprEngine::VisitDeclStmt(DeclStmt* DS, NodeTy* Pred, NodeSet& Dst) {
Ted Kremenek
committed
// The CFG has one DeclStmt per Decl.
ScopedDecl* D = *DS->decl_begin();
return;
const VarDecl* VD = dyn_cast<VarDecl>(D);
Ted Kremenek
committed
Expr* InitEx = const_cast<Expr*>(VD->getInit());
// FIXME: static variables may have an initializer, but the second
// time a function is called those values may not be current.
NodeSet Tmp;
Ted Kremenek
committed
if (InitEx)
Visit(InitEx, Pred, Tmp);
if (Tmp.empty())
Tmp.Add(Pred);
for (NodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
Ted Kremenek
committed
unsigned Count = Builder->getCurrentBlockCount();
if (InitEx) {
SVal InitVal = GetSVal(St, InitEx);
QualType T = VD->getType();
// Recover some path-sensitivity if a scalar value evaluated to
// UnknownVal.
if (InitVal.isUnknown()) {
if (Loc::IsLocType(T)) {
SymbolID Sym = SymMgr.getConjuredSymbol(InitEx, Count);
InitVal = loc::SymbolVal(Sym);
}
Ted Kremenek
committed
else if (T->isIntegerType() && T->isScalarType()) {
Ted Kremenek
committed
SymbolID Sym = SymMgr.getConjuredSymbol(InitEx, Count);
InitVal = nonloc::SymbolVal(Sym);
}
}
St = StateMgr.BindDecl(St, VD, &InitVal, Count);
}
else
St = StateMgr.BindDecl(St, VD, 0, Count);
}
}
namespace {
// This class is used by VisitInitListExpr as an item in a worklist
// for processing the values contained in an InitListExpr.
class VISIBILITY_HIDDEN InitListWLItem {
public:
llvm::ImmutableList<SVal> Vals;
GRExprEngine::NodeTy* N;
InitListExpr::reverse_iterator Itr;
InitListWLItem(GRExprEngine::NodeTy* n, llvm::ImmutableList<SVal> vals,
InitListExpr::reverse_iterator itr)
: Vals(vals), N(n), Itr(itr) {}
};
}
Zhongxing Xu
committed
void GRExprEngine::VisitInitListExpr(InitListExpr* E, NodeTy* Pred,
NodeSet& Dst) {
Ted Kremenek
committed
const GRState* state = GetState(Pred);
QualType T = getContext().getCanonicalType(E->getType());
unsigned NumInitElements = E->getNumInits();
Zhongxing Xu
committed
if (T->isArrayType() || T->isStructureType()) {
Zhongxing Xu
committed
Ted Kremenek
committed
llvm::ImmutableList<SVal> StartVals = getBasicVals().getEmptySValList();
Ted Kremenek
committed
// Handle base case where the initializer has no elements.
// e.g: static int* myArray[] = {};
if (NumInitElements == 0) {
SVal V = NonLoc::MakeCompoundVal(T, StartVals, getBasicVals());
MakeNode(Dst, E, Pred, BindExpr(state, E, V));
return;
}
// Create a worklist to process the initializers.
llvm::SmallVector<InitListWLItem, 10> WorkList;
WorkList.reserve(NumInitElements);
WorkList.push_back(InitListWLItem(Pred, StartVals, E->rbegin()));
InitListExpr::reverse_iterator ItrEnd = E->rend();
Ted Kremenek
committed
// Process the worklist until it is empty.
while (!WorkList.empty()) {
InitListWLItem X = WorkList.back();
WorkList.pop_back();
NodeSet Tmp;
Visit(*X.Itr, X.N, Tmp);
InitListExpr::reverse_iterator NewItr = X.Itr + 1;
Zhongxing Xu
committed
for (NodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
// Get the last initializer value.
state = GetState(*NI);
SVal InitV = GetSVal(state, cast<Expr>(*X.Itr));
// Construct the new list of values by prepending the new value to
// the already constructed list.
llvm::ImmutableList<SVal> NewVals =
getBasicVals().consVals(InitV, X.Vals);
if (NewItr == ItrEnd) {
// Now we have a list holding all init values. Make CompoundValData.
SVal V = NonLoc::MakeCompoundVal(T, NewVals, getBasicVals());
Zhongxing Xu
committed
// Make final state and node.
MakeNode(Dst, E, *NI, BindExpr(state, E, V));
}
else {
// Still some initializer values to go. Push them onto the worklist.
WorkList.push_back(InitListWLItem(*NI, NewVals, NewItr));
}
}
}
Zhongxing Xu
committed
}
Ted Kremenek
committed
if (T->isUnionType() || T->isVectorType()) {
// FIXME: to be implemented.
// Note: That vectors can return true for T->isIntegerType()
MakeNode(Dst, E, Pred, state);
return;
}
Zhongxing Xu
committed
if (Loc::IsLocType(T) || T->isIntegerType()) {
assert (E->getNumInits() == 1);
NodeSet Tmp;
Expr* Init = E->getInit(0);
Visit(Init, Pred, Tmp);
for (NodeSet::iterator I = Tmp.begin(), EI = Tmp.end(); I != EI; ++I) {
state = GetState(*I);
Zhongxing Xu
committed
}
return;
}
printf("InitListExpr type = %s\n", T.getAsString().c_str());
assert(0 && "unprocessed InitListExpr type");
}
Sebastian Redl
committed
/// VisitSizeOfAlignOfExpr - Transfer function for sizeof(type).
void GRExprEngine::VisitSizeOfAlignOfExpr(SizeOfAlignOfExpr* Ex,
NodeTy* Pred,
NodeSet& Dst) {
QualType T = Ex->getTypeOfArgument();
uint64_t amt;
if (Ex->isSizeOf()) {
// FIXME: Add support for VLAs.
if (!T.getTypePtr()->isConstantSizeType())
return;
Ted Kremenek
committed
// Some code tries to take the sizeof an ObjCInterfaceType, relying that
// the compiler has laid out its representation. Just report Unknown
// for these.
if (T->isObjCInterfaceType())
return;
amt = 1; // Handle sizeof(void)
if (T != getContext().VoidTy)
amt = getContext().getTypeSize(T) / 8;
}
else // Get alignment of the type.
amt = getContext().getTypeAlign(T) / 8;
BindExpr(GetState(Pred), Ex,
NonLoc::MakeVal(getBasicVals(), amt, Ex->getType())));
Ted Kremenek
committed
}
void GRExprEngine::VisitUnaryOperator(UnaryOperator* U, NodeTy* Pred,
Zhongxing Xu
committed
NodeSet& Dst, bool asLValue) {
switch (U->getOpcode()) {
default:
break;
Ted Kremenek
committed
case UnaryOperator::Deref: {
Expr* Ex = U->getSubExpr()->IgnoreParens();
NodeSet Tmp;
Visit(Ex, Pred, Tmp);
for (NodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
Zhongxing Xu
committed
SVal location = GetSVal(St, Ex);
Zhongxing Xu
committed
if (asLValue)
Ted Kremenek
committed
EvalLoad(Dst, U, *I, St, location);
}
Ted Kremenek
committed
case UnaryOperator::Real: {
Expr* Ex = U->getSubExpr()->IgnoreParens();
NodeSet Tmp;
Visit(Ex, Pred, Tmp);
for (NodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
Zhongxing Xu
committed
// FIXME: We don't have complex SValues yet.
Ted Kremenek
committed
if (Ex->getType()->isAnyComplexType()) {
// Just report "Unknown."
Dst.Add(*I);
continue;
}
// For all other types, UnaryOperator::Real is an identity operation.
assert (U->getType() == Ex->getType());
Ted Kremenek
committed
}
return;
}
case UnaryOperator::Imag: {
Expr* Ex = U->getSubExpr()->IgnoreParens();
NodeSet Tmp;
Visit(Ex, Pred, Tmp);
for (NodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
Zhongxing Xu
committed
// FIXME: We don't have complex SValues yet.
Ted Kremenek
committed
if (Ex->getType()->isAnyComplexType()) {
// Just report "Unknown."
Dst.Add(*I);
continue;
}
// For all other types, UnaryOperator::Float returns 0.
assert (Ex->getType()->isIntegerType());
Zhongxing Xu
committed
SVal X = NonLoc::MakeVal(getBasicVals(), 0, Ex->getType());