Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions lib/checkunusedfunctions.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -417,15 +417,15 @@ void CheckUnusedFunctions::unusedFunctionError(ErrorLogger& errorLogger,
}

CheckUnusedFunctions::FunctionDecl::FunctionDecl(const Function *f)
: functionName(f->name()), fileName(f->token->fileName()), lineNumber(f->token->linenr())
: functionName(f->name()), fileIndex(f->token->fileIndex()), lineNumber(f->token->linenr())
{}

std::string CheckUnusedFunctions::analyzerInfo() const
std::string CheckUnusedFunctions::analyzerInfo(const Tokenizer &tokenizer) const
{
std::ostringstream ret;
for (const FunctionDecl &functionDecl : mFunctionDecl) {
ret << " <functiondecl"
<< " file=\"" << ErrorLogger::toxml(functionDecl.fileName) << '\"'
<< " file=\"" << ErrorLogger::toxml(tokenizer.list.getFiles()[functionDecl.fileIndex]) << '\"'
<< " functionName=\"" << ErrorLogger::toxml(functionDecl.functionName) << '\"'
<< " lineNumber=\"" << functionDecl.lineNumber << "\"/>\n";
}
Expand Down
4 changes: 2 additions & 2 deletions lib/checkunusedfunctions.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class CPPCHECKLIB CheckUnusedFunctions {
// * What functions are declared
void parseTokens(const Tokenizer &tokenizer, const Settings &settings);

std::string analyzerInfo() const;
std::string analyzerInfo(const Tokenizer &tokenizer) const;

static void analyseWholeProgram(const Settings &settings, ErrorLogger& errorLogger, const std::string &buildDir);

Expand Down Expand Up @@ -86,7 +86,7 @@ class CPPCHECKLIB CheckUnusedFunctions {
public:
explicit FunctionDecl(const Function *f);
std::string functionName;
std::string fileName;
nonneg int fileIndex;
unsigned int lineNumber;
};
std::list<FunctionDecl> mFunctionDecl;
Expand Down
4 changes: 2 additions & 2 deletions lib/cppcheck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,7 @@ unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string

std::list<ErrorMessage> errors;
analyzerInformation->analyzeFile(mSettings.buildDir, file.spath(), cfgname, hash, errors);
analyzerInformation->setFileInfo("CheckUnusedFunctions", mUnusedFunctionsCheck->analyzerInfo());
analyzerInformation->setFileInfo("CheckUnusedFunctions", mUnusedFunctionsCheck->analyzerInfo(tokenizer));
analyzerInformation->close();
}
}
Expand Down Expand Up @@ -1389,7 +1389,7 @@ void CppCheck::checkNormalTokens(const Tokenizer &tokenizer, AnalyzerInformation
}

if (mSettings.checks.isEnabled(Checks::unusedFunction) && analyzerInformation) {
analyzerInformation->setFileInfo("CheckUnusedFunctions", unusedFunctionsChecker.analyzerInfo());
analyzerInformation->setFileInfo("CheckUnusedFunctions", unusedFunctionsChecker.analyzerInfo(tokenizer));
}

#ifdef HAVE_RULES
Expand Down
23 changes: 10 additions & 13 deletions lib/token.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,16 +60,17 @@ namespace {
const std::list<ValueFlow::Value> TokenImpl::mEmptyValueList;
const std::string Token::mEmptyString;

Token::Token(TokensFrontBack &tokensFrontBack)
: mTokensFrontBack(tokensFrontBack)
, mIsC(mTokensFrontBack.list.isC())
, mIsCpp(mTokensFrontBack.list.isCPP())
Token::Token(const TokenList& tokenlist, TokensFrontBack &tokensFrontBack)
: mList(tokenlist)
, mTokensFrontBack(tokensFrontBack)
, mIsC(mList.isC())
, mIsCpp(mList.isCPP())
{
mImpl = new TokenImpl();
}

Token::Token(const Token* tok)
: Token(const_cast<Token*>(tok)->mTokensFrontBack)
: Token(tok->mList, const_cast<Token*>(tok)->mTokensFrontBack)
{
fileIndex(tok->fileIndex());
linenr(tok->linenr());
Expand Down Expand Up @@ -127,7 +128,7 @@ void Token::update_property_info()
else if (std::isalpha(static_cast<unsigned char>(mStr[0])) || mStr[0] == '_' || mStr[0] == '$') { // Name
if (mImpl->mVarId)
tokType(eVariable);
else if (mTokensFrontBack.list.isKeyword(mStr)) {
else if (mList.isKeyword(mStr)) {
tokType(eKeyword);
update_property_isStandardType();
if (mTokType != eType) // cannot be a control-flow keyword when it is a type
Expand Down Expand Up @@ -1058,7 +1059,7 @@ Token* Token::insertToken(const std::string& tokenStr, const std::string& origin
if (mStr.empty())
newToken = this;
else
newToken = new Token(mTokensFrontBack);
newToken = new Token(mList, mTokensFrontBack);
newToken->str(tokenStr);
if (!originalNameStr.empty())
newToken->originalName(originalNameStr);
Expand Down Expand Up @@ -1731,7 +1732,7 @@ std::string Token::astStringZ3() const
return "(" + str() + " " + astOperand1()->astStringZ3() + " " + astOperand2()->astStringZ3() + ")";
}

void Token::printValueFlow(bool xml, std::ostream &out) const
void Token::printValueFlow(const std::vector<std::string>& files, bool xml, std::ostream &out) const
{
std::string outs;

Expand All @@ -1758,7 +1759,7 @@ void Token::printValueFlow(bool xml, std::ostream &out) const
else {
if (fileIndex != tok->fileIndex()) {
outs += "File ";
outs += tok->mTokensFrontBack.list.getFiles()[tok->fileIndex()];
outs += files[tok->fileIndex()];
outs += '\n';
line = 0;
}
Expand Down Expand Up @@ -2713,7 +2714,3 @@ Token* findLambdaEndScope(Token* tok)
const Token* findLambdaEndScope(const Token* tok) {
return findLambdaEndScope(const_cast<Token*>(tok));
}

const std::string& Token::fileName() const {
return mTokensFrontBack.list.getFiles()[mImpl->mFileIndex];
}
8 changes: 4 additions & 4 deletions lib/token.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class Variable;
class ConstTokenRange;
class Token;
struct TokensFrontBack;
class TokenList;

struct ScopeInfo2 {
ScopeInfo2(std::string name_, const Token *bodyEnd_, std::set<std::string> usingNamespaces_ = std::set<std::string>()) : name(std::move(name_)), bodyEnd(bodyEnd_), usingNamespaces(std::move(usingNamespaces_)) {}
Expand Down Expand Up @@ -165,6 +166,7 @@ class CPPCHECKLIB Token {
friend class TestToken;

private:
const TokenList& mList;
TokensFrontBack& mTokensFrontBack;

static const std::string mEmptyString;
Expand All @@ -184,7 +186,7 @@ class CPPCHECKLIB Token {
eNone
};

explicit Token(TokensFrontBack &tokensFrontBack);
Token(const TokenList& tokenlist, TokensFrontBack &tokensFrontBack);
// for usage in CheckIO::ArgumentInfo only
explicit Token(const Token *tok);
~Token();
Expand Down Expand Up @@ -885,8 +887,6 @@ class CPPCHECKLIB Token {
static int multiCompare(const Token *tok, const char *haystack, nonneg int varid);

public:
const std::string& fileName() const;

nonneg int fileIndex() const {
return mImpl->mFileIndex;
}
Expand Down Expand Up @@ -1554,7 +1554,7 @@ class CPPCHECKLIB Token {

void printAst(bool verbose, bool xml, const std::vector<std::string> &fileNames, std::ostream &out) const;

void printValueFlow(bool xml, std::ostream &out) const;
void printValueFlow(const std::vector<std::string>& files, bool xml, std::ostream &out) const;

void scopeInfo(std::shared_ptr<ScopeInfo2> newScopeInfo);
std::shared_ptr<ScopeInfo2> scopeInfo() const;
Expand Down
4 changes: 2 additions & 2 deletions lib/tokenize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5909,7 +5909,7 @@ void Tokenizer::printDebugOutput(int simplification, std::ostream &out) const
if (mSettings.verbose)
list.front()->printAst(mSettings.verbose, xml, list.getFiles(), out);

list.front()->printValueFlow(xml, out);
list.front()->printValueFlow(list.getFiles(), xml, out);

if (xml)
out << "</debug>" << std::endl;
Expand Down Expand Up @@ -6169,7 +6169,7 @@ void Tokenizer::dump(std::ostream &out) const
}

if (list.front())
list.front()->printValueFlow(true, out);
list.front()->printValueFlow(list.getFiles(), true, out);

outs += dumpTypedefInfo();

Expand Down
14 changes: 7 additions & 7 deletions lib/tokenlist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ static constexpr int AST_MAX_DEPTH = 150;


TokenList::TokenList(const Settings* settings)
: mTokensFrontBack(*this)
: mTokensFrontBack()
, mSettings(settings)
{
if (mSettings && (mSettings->enforcedLang != Standards::Language::None)) {
Expand Down Expand Up @@ -172,7 +172,7 @@ void TokenList::addtoken(const std::string& str, const nonneg int lineno, const
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(str);
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(str);
}
Expand All @@ -190,7 +190,7 @@ void TokenList::addtoken(const std::string& str, const Token *locationTok)
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(str);
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(str);
}
Expand All @@ -208,7 +208,7 @@ void TokenList::addtoken(const Token * tok, const nonneg int lineno, const nonne
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(tok->str(), tok->originalName());
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(tok->str());
if (!tok->originalName().empty())
Expand All @@ -229,7 +229,7 @@ void TokenList::addtoken(const Token *tok, const Token *locationTok)
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(tok->str(), tok->originalName());
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(tok->str());
if (!tok->originalName().empty())
Expand All @@ -250,7 +250,7 @@ void TokenList::addtoken(const Token *tok)
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(tok->str(), tok->originalName(), tok->getMacroName());
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(tok->str());
if (!tok->originalName().empty())
Expand Down Expand Up @@ -409,7 +409,7 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList)
if (mTokensFrontBack.back) {
mTokensFrontBack.back->insertToken(str);
} else {
mTokensFrontBack.front = new Token(mTokensFrontBack);
mTokensFrontBack.front = new Token(*this, mTokensFrontBack);
mTokensFrontBack.back = mTokensFrontBack.front;
mTokensFrontBack.back->str(str);
}
Expand Down
2 changes: 0 additions & 2 deletions lib/tokenlist.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,8 @@ namespace simplecpp {
* @brief This struct stores pointers to the front and back tokens of the list this token is in.
*/
struct TokensFrontBack {
explicit TokensFrontBack(const TokenList& list) : list(list) {}
Token *front{};
Token* back{};
const TokenList& list;
};

class CPPCHECKLIB TokenList {
Expand Down
12 changes: 6 additions & 6 deletions test/testmathlib.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -410,8 +410,8 @@ class TestMathLib : public TestFixture {
{
TokenList list{&settingsDefault};
list.appendFileIfNew("test.c");
TokensFrontBack tokensFrontBack(list);
auto *tok = new Token(tokensFrontBack);
TokensFrontBack tokensFrontBack;
auto *tok = new Token(list, tokensFrontBack);
tok->str("invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigNumber(tok), INTERNAL, "Internal Error. MathLib::toBigNumber: invalid_argument: invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toBigNumber: invalid_argument: invalid");
Expand Down Expand Up @@ -586,8 +586,8 @@ class TestMathLib : public TestFixture {
{
TokenList list{&settingsDefault};
list.appendFileIfNew("test.c");
TokensFrontBack tokensFrontBack(list);
auto *tok = new Token(tokensFrontBack);
TokensFrontBack tokensFrontBack;
auto *tok = new Token(list, tokensFrontBack);
tok->str("invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigUNumber(tok), INTERNAL, "Internal Error. MathLib::toBigUNumber: invalid_argument: invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigUNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toBigUNumber: invalid_argument: invalid");
Expand Down Expand Up @@ -716,8 +716,8 @@ class TestMathLib : public TestFixture {
{
TokenList list{&settingsDefault};
list.appendFileIfNew("test.c");
TokensFrontBack tokensFrontBack(list);
auto *tok = new Token(tokensFrontBack);
TokensFrontBack tokensFrontBack;
auto *tok = new Token(list, tokensFrontBack);
tok->str("invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toDoubleNumber(tok), INTERNAL, "Internal Error. MathLib::toDoubleNumber: conversion failed: invalid");
ASSERT_THROW_INTERNAL_EQUALS(MathLib::toDoubleNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toDoubleNumber: conversion failed: invalid");
Expand Down
Loading
Loading