From 50cfc27c780c57b21d8e18cff6148d15a201c2e8 Mon Sep 17 00:00:00 2001 From: firewave Date: Fri, 11 Apr 2025 10:22:48 +0200 Subject: [PATCH 1/3] moved `TokenList` reference out of `TokenFrontBack` --- lib/token.cpp | 19 +-- lib/token.h | 4 +- lib/tokenlist.cpp | 14 +-- lib/tokenlist.h | 2 - test/testmathlib.cpp | 12 +- test/testtoken.cpp | 268 +++++++++++++++++++++---------------------- 6 files changed, 160 insertions(+), 159 deletions(-) diff --git a/lib/token.cpp b/lib/token.cpp index 1bb62dd8c5d..ca71e969e54 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -60,16 +60,17 @@ namespace { const std::list TokenImpl::mEmptyValueList; const std::string Token::mEmptyString; -Token::Token(TokensFrontBack &tokensFrontBack) - : mTokensFrontBack(tokensFrontBack) - , mIsC(mTokensFrontBack.list.isC()) - , mIsCpp(mTokensFrontBack.list.isCPP()) +Token::Token(const TokenList& tokenlist, TokensFrontBack &tokensFrontBack) + : mList(tokenlist) + , mTokensFrontBack(tokensFrontBack) + , mIsC(mList.isC()) + , mIsCpp(mList.isCPP()) { mImpl = new TokenImpl(); } Token::Token(const Token* tok) - : Token(const_cast(tok)->mTokensFrontBack) + : Token(tok->mList, const_cast(tok)->mTokensFrontBack) { fileIndex(tok->fileIndex()); linenr(tok->linenr()); @@ -127,7 +128,7 @@ void Token::update_property_info() else if (std::isalpha(static_cast(mStr[0])) || mStr[0] == '_' || mStr[0] == '$') { // Name if (mImpl->mVarId) tokType(eVariable); - else if (mTokensFrontBack.list.isKeyword(mStr)) { + else if (mList.isKeyword(mStr)) { tokType(eKeyword); update_property_isStandardType(); if (mTokType != eType) // cannot be a control-flow keyword when it is a type @@ -1058,7 +1059,7 @@ Token* Token::insertToken(const std::string& tokenStr, const std::string& origin if (mStr.empty()) newToken = this; else - newToken = new Token(mTokensFrontBack); + newToken = new Token(mList, mTokensFrontBack); newToken->str(tokenStr); if (!originalNameStr.empty()) newToken->originalName(originalNameStr); @@ -1758,7 +1759,7 @@ void Token::printValueFlow(bool xml, std::ostream &out) const else { if (fileIndex != tok->fileIndex()) { outs += "File "; - outs += tok->mTokensFrontBack.list.getFiles()[tok->fileIndex()]; + outs += mList.getFiles()[tok->fileIndex()]; outs += '\n'; line = 0; } @@ -2715,5 +2716,5 @@ const Token* findLambdaEndScope(const Token* tok) { } const std::string& Token::fileName() const { - return mTokensFrontBack.list.getFiles()[mImpl->mFileIndex]; + return mList.getFiles()[mImpl->mFileIndex]; } diff --git a/lib/token.h b/lib/token.h index ef5cc39a325..7965a96f8b5 100644 --- a/lib/token.h +++ b/lib/token.h @@ -52,6 +52,7 @@ class Variable; class ConstTokenRange; class Token; struct TokensFrontBack; +class TokenList; struct ScopeInfo2 { ScopeInfo2(std::string name_, const Token *bodyEnd_, std::set usingNamespaces_ = std::set()) : name(std::move(name_)), bodyEnd(bodyEnd_), usingNamespaces(std::move(usingNamespaces_)) {} @@ -165,6 +166,7 @@ class CPPCHECKLIB Token { friend class TestToken; private: + const TokenList& mList; TokensFrontBack& mTokensFrontBack; static const std::string mEmptyString; @@ -184,7 +186,7 @@ class CPPCHECKLIB Token { eNone }; - explicit Token(TokensFrontBack &tokensFrontBack); + Token(const TokenList& tokenlist, TokensFrontBack &tokensFrontBack); // for usage in CheckIO::ArgumentInfo only explicit Token(const Token *tok); ~Token(); diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 399d0e5c4d7..34247111eb0 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -61,7 +61,7 @@ static constexpr int AST_MAX_DEPTH = 150; TokenList::TokenList(const Settings* settings) - : mTokensFrontBack(*this) + : mTokensFrontBack() , mSettings(settings) { if (mSettings && (mSettings->enforcedLang != Standards::Language::None)) { @@ -172,7 +172,7 @@ void TokenList::addtoken(const std::string& str, const nonneg int lineno, const if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(str); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(str); } @@ -190,7 +190,7 @@ void TokenList::addtoken(const std::string& str, const Token *locationTok) if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(str); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(str); } @@ -208,7 +208,7 @@ void TokenList::addtoken(const Token * tok, const nonneg int lineno, const nonne if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(tok->str(), tok->originalName()); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(tok->str()); if (!tok->originalName().empty()) @@ -229,7 +229,7 @@ void TokenList::addtoken(const Token *tok, const Token *locationTok) if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(tok->str(), tok->originalName()); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(tok->str()); if (!tok->originalName().empty()) @@ -250,7 +250,7 @@ void TokenList::addtoken(const Token *tok) if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(tok->str(), tok->originalName(), tok->getMacroName()); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(tok->str()); if (!tok->originalName().empty()) @@ -409,7 +409,7 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList) if (mTokensFrontBack.back) { mTokensFrontBack.back->insertToken(str); } else { - mTokensFrontBack.front = new Token(mTokensFrontBack); + mTokensFrontBack.front = new Token(*this, mTokensFrontBack); mTokensFrontBack.back = mTokensFrontBack.front; mTokensFrontBack.back->str(str); } diff --git a/lib/tokenlist.h b/lib/tokenlist.h index ca39b581a72..a5c5508a6dc 100644 --- a/lib/tokenlist.h +++ b/lib/tokenlist.h @@ -44,10 +44,8 @@ namespace simplecpp { * @brief This struct stores pointers to the front and back tokens of the list this token is in. */ struct TokensFrontBack { - explicit TokensFrontBack(const TokenList& list) : list(list) {} Token *front{}; Token* back{}; - const TokenList& list; }; class CPPCHECKLIB TokenList { diff --git a/test/testmathlib.cpp b/test/testmathlib.cpp index ee38deb84e3..92bf63b6c71 100644 --- a/test/testmathlib.cpp +++ b/test/testmathlib.cpp @@ -410,8 +410,8 @@ class TestMathLib : public TestFixture { { TokenList list{&settingsDefault}; list.appendFileIfNew("test.c"); - TokensFrontBack tokensFrontBack(list); - auto *tok = new Token(tokensFrontBack); + TokensFrontBack tokensFrontBack; + auto *tok = new Token(list, tokensFrontBack); tok->str("invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigNumber(tok), INTERNAL, "Internal Error. MathLib::toBigNumber: invalid_argument: invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toBigNumber: invalid_argument: invalid"); @@ -586,8 +586,8 @@ class TestMathLib : public TestFixture { { TokenList list{&settingsDefault}; list.appendFileIfNew("test.c"); - TokensFrontBack tokensFrontBack(list); - auto *tok = new Token(tokensFrontBack); + TokensFrontBack tokensFrontBack; + auto *tok = new Token(list, tokensFrontBack); tok->str("invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigUNumber(tok), INTERNAL, "Internal Error. MathLib::toBigUNumber: invalid_argument: invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toBigUNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toBigUNumber: invalid_argument: invalid"); @@ -716,8 +716,8 @@ class TestMathLib : public TestFixture { { TokenList list{&settingsDefault}; list.appendFileIfNew("test.c"); - TokensFrontBack tokensFrontBack(list); - auto *tok = new Token(tokensFrontBack); + TokensFrontBack tokensFrontBack; + auto *tok = new Token(list, tokensFrontBack); tok->str("invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toDoubleNumber(tok), INTERNAL, "Internal Error. MathLib::toDoubleNumber: conversion failed: invalid"); ASSERT_THROW_INTERNAL_EQUALS(MathLib::toDoubleNumber("invalid", tok), INTERNAL, "Internal Error. MathLib::toDoubleNumber: conversion failed: invalid"); diff --git a/test/testtoken.cpp b/test/testtoken.cpp index 6f88bef266e..e627ae564e4 100644 --- a/test/testtoken.cpp +++ b/test/testtoken.cpp @@ -130,8 +130,8 @@ class TestToken : public TestFixture { } void nextprevious() const { - TokensFrontBack tokensFrontBack(list); - auto *token = new Token(tokensFrontBack); + TokensFrontBack tokensFrontBack; + auto *token = new Token(list, tokensFrontBack); token->str("1"); (void)token->insertToken("2"); (void)token->next()->insertToken("3"); @@ -164,15 +164,15 @@ class TestToken : public TestFixture { void multiCompare() const { // Test for found { - TokensFrontBack tokensFrontBack(list); - Token one(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token one(list, tokensFrontBack); one.str("one"); ASSERT_EQUALS(1, Token::multiCompare(&one, "one|two", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token two(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token two(list, tokensFrontBack); two.str("two"); ASSERT_EQUALS(1, Token::multiCompare(&two, "one|two", 0)); ASSERT_EQUALS(1, Token::multiCompare(&two, "verybig|two|", 0)); @@ -180,8 +180,8 @@ class TestToken : public TestFixture { // Test for empty string found { - TokensFrontBack tokensFrontBack(list); - Token notfound(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token notfound(list, tokensFrontBack); notfound.str("notfound"); ASSERT_EQUALS(0, Token::multiCompare(¬found, "one|two|", 0)); @@ -190,51 +190,51 @@ class TestToken : public TestFixture { } { - TokensFrontBack tokensFrontBack(list); - Token s(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token s(list, tokensFrontBack); s.str("s"); ASSERT_EQUALS(-1, Token::multiCompare(&s, "verybig|two", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token ne(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token ne(list, tokensFrontBack); ne.str("ne"); ASSERT_EQUALS(-1, Token::multiCompare(&ne, "one|two", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token a(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token a(list, tokensFrontBack); a.str("a"); ASSERT_EQUALS(-1, Token::multiCompare(&a, "abc|def", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token abcd(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token abcd(list, tokensFrontBack); abcd.str("abcd"); ASSERT_EQUALS(-1, Token::multiCompare(&abcd, "abc|def", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token def(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token def(list, tokensFrontBack); def.str("default"); ASSERT_EQUALS(-1, Token::multiCompare(&def, "abc|def", 0)); } // %op% { - TokensFrontBack tokensFrontBack(list); - Token plus(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token plus(list, tokensFrontBack); plus.str("+"); ASSERT_EQUALS(1, Token::multiCompare(&plus, "one|%op%", 0)); ASSERT_EQUALS(1, Token::multiCompare(&plus, "%op%|two", 0)); } { - TokensFrontBack tokensFrontBack(list); - Token x(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token x(list, tokensFrontBack); x.str("x"); ASSERT_EQUALS(-1, Token::multiCompare(&x, "one|%op%", 0)); ASSERT_EQUALS(-1, Token::multiCompare(&x, "%op%|two", 0)); @@ -312,15 +312,15 @@ class TestToken : public TestFixture { } void multiCompare5() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("||"); ASSERT_EQUALS(true, Token::multiCompare(&tok, "+|%or%|%oror%", 0) >= 0); } void charTypes() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("'a'"); ASSERT_EQUALS(true, tok.isCChar()); @@ -396,8 +396,8 @@ class TestToken : public TestFixture { } void stringTypes() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"a\""); ASSERT_EQUALS(true, tok.isCChar()); @@ -441,8 +441,8 @@ class TestToken : public TestFixture { } void getStrLength() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"\""); ASSERT_EQUALS(0, Token::getStrLength(&tok)); @@ -470,8 +470,8 @@ class TestToken : public TestFixture { } void getStrSize() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"\""); ASSERT_EQUALS(sizeof(""), Token::getStrSize(&tok, settingsDefault)); @@ -487,8 +487,8 @@ class TestToken : public TestFixture { } void strValue() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"\""); ASSERT_EQUALS("", tok.strValue()); @@ -519,8 +519,8 @@ class TestToken : public TestFixture { } void concatStr() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"\""); tok.concatStr("\"\""); @@ -559,9 +559,9 @@ class TestToken : public TestFixture { } void deleteLast() const { - TokensFrontBack listEnds(list); + TokensFrontBack listEnds; Token ** const tokensBack = &(listEnds.back); - Token tok(listEnds); + Token tok(list, listEnds); (void)tok.insertToken("aba"); ASSERT_EQUALS(true, *tokensBack == tok.next()); tok.deleteNext(); @@ -569,9 +569,9 @@ class TestToken : public TestFixture { } void deleteFirst() const { - TokensFrontBack listEnds(list); + TokensFrontBack listEnds; Token ** const tokensFront = &(listEnds.front); - Token tok(listEnds); + Token tok(list, listEnds); (void)tok.insertToken("aba"); @@ -615,8 +615,8 @@ class TestToken : public TestFixture { ASSERT_EQUALS(true, Token::Match(singleChar.front(), "[a|bc]")); ASSERT_EQUALS(false, Token::Match(singleChar.front(), "[d|ef]")); - TokensFrontBack tokensFrontBack(list); - Token multiChar(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token multiChar(list, tokensFrontBack); multiChar.str("[ab"); ASSERT_EQUALS(false, Token::Match(&multiChar, "[ab|def]")); } @@ -857,8 +857,8 @@ class TestToken : public TestFixture { void isArithmeticalOp() const { for (auto test_op = arithmeticalOps.cbegin(); test_op != arithmeticalOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(true, tok.isArithmeticalOp()); } @@ -872,8 +872,8 @@ class TestToken : public TestFixture { append_vector(other_ops, assignmentOps); for (auto other_op = other_ops.cbegin(); other_op != other_ops.cend(); ++other_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*other_op); ASSERT_EQUALS_MSG(false, tok.isArithmeticalOp(), "Failing arithmetical operator: " + *other_op); } @@ -888,8 +888,8 @@ class TestToken : public TestFixture { append_vector(test_ops, assignmentOps); for (auto test_op = test_ops.cbegin(); test_op != test_ops.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(true, tok.isOp()); } @@ -899,8 +899,8 @@ class TestToken : public TestFixture { append_vector(other_ops, extendedOps); for (auto other_op = other_ops.cbegin(); other_op != other_ops.cend(); ++other_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*other_op); ASSERT_EQUALS_MSG(false, tok.isOp(), "Failing normal operator: " + *other_op); } @@ -914,8 +914,8 @@ class TestToken : public TestFixture { append_vector(test_ops, logicalOps); for (auto test_op = test_ops.cbegin(); test_op != test_ops.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(true, tok.isConstOp()); } @@ -926,8 +926,8 @@ class TestToken : public TestFixture { append_vector(other_ops, assignmentOps); for (auto other_op = other_ops.cbegin(); other_op != other_ops.cend(); ++other_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*other_op); ASSERT_EQUALS_MSG(false, tok.isConstOp(), "Failing normal operator: " + *other_op); } @@ -942,16 +942,16 @@ class TestToken : public TestFixture { append_vector(test_ops, extendedOps); for (auto test_op = test_ops.cbegin(); test_op != test_ops.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(true, tok.isExtendedOp()); } // Negative test against assignment operators for (auto other_op = assignmentOps.cbegin(); other_op != assignmentOps.cend(); ++other_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*other_op); ASSERT_EQUALS_MSG(false, tok.isExtendedOp(), "Failing assignment operator: " + *other_op); } @@ -959,8 +959,8 @@ class TestToken : public TestFixture { void isAssignmentOp() const { for (auto test_op = assignmentOps.cbegin(); test_op != assignmentOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(true, tok.isAssignmentOp()); } @@ -974,8 +974,8 @@ class TestToken : public TestFixture { append_vector(other_ops, extendedOps); for (auto other_op = other_ops.cbegin(); other_op != other_ops.cend(); ++other_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*other_op); ASSERT_EQUALS_MSG(false, tok.isAssignmentOp(), "Failing assignment operator: " + *other_op); } @@ -983,31 +983,31 @@ class TestToken : public TestFixture { void operators() const { for (auto test_op = extendedOps.cbegin(); test_op != extendedOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(Token::eExtendedOp, tok.tokType()); } for (auto test_op = logicalOps.cbegin(); test_op != logicalOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(Token::eLogicalOp, tok.tokType()); } for (auto test_op = bitOps.cbegin(); test_op != bitOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(Token::eBitOp, tok.tokType()); } for (auto test_op = comparisonOps.cbegin(); test_op != comparisonOps.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS(Token::eComparisonOp, tok.tokType()); } - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("++"); ASSERT_EQUALS(Token::eIncDecOp, tok.tokType()); tok.str("--"); @@ -1015,8 +1015,8 @@ class TestToken : public TestFixture { } void literals() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("\"foo\""); ASSERT(tok.tokType() == Token::eString); @@ -1046,15 +1046,15 @@ class TestToken : public TestFixture { standard_types.emplace_back("size_t"); for (auto test_op = standard_types.cbegin(); test_op != standard_types.cend(); ++test_op) { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(*test_op); ASSERT_EQUALS_MSG(true, tok.isStandardType(), "Failing standard type: " + *test_op); } // Negative test - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("string"); ASSERT_EQUALS(false, tok.isStandardType()); @@ -1070,8 +1070,8 @@ class TestToken : public TestFixture { } void updateProperties() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("foobar"); ASSERT_EQUALS(true, tok.isName()); @@ -1084,45 +1084,45 @@ class TestToken : public TestFixture { } void isNameGuarantees1() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("Name"); ASSERT_EQUALS(true, tok.isName()); } void isNameGuarantees2() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("_name"); ASSERT_EQUALS(true, tok.isName()); } void isNameGuarantees3() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("_123"); ASSERT_EQUALS(true, tok.isName()); } void isNameGuarantees4() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("123456"); ASSERT_EQUALS(false, tok.isName()); ASSERT_EQUALS(true, tok.isNumber()); } void isNameGuarantees5() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("a123456"); ASSERT_EQUALS(true, tok.isName()); ASSERT_EQUALS(false, tok.isNumber()); } void isNameGuarantees6() const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("$f"); ASSERT_EQUALS(true, tok.isName()); } @@ -1231,8 +1231,8 @@ class TestToken : public TestFixture { v2.valueType = ValueFlow::Value::ValueType::BUFFER_SIZE; v2.setKnown(); - TokensFrontBack tokensFrontBack(list); - Token token(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token token(list, tokensFrontBack); ASSERT_EQUALS(true, token.addValue(v1)); ASSERT_EQUALS(true, token.addValue(v2)); ASSERT_EQUALS(false, token.hasKnownIntValue()); @@ -1250,8 +1250,8 @@ class TestToken : public TestFixture { void _assert_tok(const char* file, int line, const std::string& s, Token::Type t, bool l = false, bool std = false, bool ctrl = false) const { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str(s); _assert_tok(file, line, &tok, t, l, std, ctrl); } @@ -1338,8 +1338,8 @@ class TestToken : public TestFixture { void update_property_info_evariable() const { { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("var1"); tok.varId(17); assert_tok(&tok, Token::Type::eVariable); @@ -1352,24 +1352,24 @@ class TestToken : public TestFixture { const Settings s = settingsBuilder().c(Standards::cstd_t::C89).build(); TokenList list_c{&s}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("alignas"); // not a C89 keyword assert_tok(&tok, Token::Type::eName); } { TokenList list_c{&settingsDefault}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("alignas"); // a C23 keyword assert_tok(&tok, Token::Type::eKeyword); } { TokenList list_c{&settingsDefault}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("and_eq"); // a C++ keyword assert_tok(&tok, Token::Type::eName); } @@ -1381,24 +1381,24 @@ class TestToken : public TestFixture { const Settings s = settingsBuilder().cpp(Standards::cppstd_t::CPP03).build(); TokenList list_cpp{&s}; list_cpp.setLang(Standards::Language::CPP); - TokensFrontBack tokensFrontBack(list_cpp); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_cpp, tokensFrontBack); tok.str("consteval"); // not a C++03 keyword assert_tok(&tok, Token::Type::eName); } { TokenList list_cpp{&settingsDefault}; list_cpp.setLang(Standards::Language::CPP); - TokensFrontBack tokensFrontBack(list_cpp); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_cpp, tokensFrontBack); tok.str("consteval"); // a C++20 keyword assert_tok(&tok, Token::Type::eKeyword); } { TokenList list_cpp{&settingsDefault}; list_cpp.setLang(Standards::Language::CPP); - TokensFrontBack tokensFrontBack(list_cpp); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_cpp, tokensFrontBack); tok.str("typeof_unqual"); // a C keyword assert_tok(&tok, Token::Type::eName); } @@ -1407,20 +1407,20 @@ class TestToken : public TestFixture { void update_property_info_ebracket_link() const { { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("<"); - Token tok2(tokensFrontBack); + Token tok2(list, tokensFrontBack); tok.link(&tok2); assert_tok(&tok, Token::Type::eBracket); } { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); - Token tok2(tokensFrontBack); + Token tok2(list, tokensFrontBack); tok.link(&tok2); tok.str("<"); @@ -1431,20 +1431,20 @@ class TestToken : public TestFixture { void update_property_info_ecomparisonop_link() const { { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("=="); - Token tok2(tokensFrontBack); + Token tok2(list, tokensFrontBack); tok.link(&tok2); // TODO: does not (and probably should not) update assert_tok(&tok, Token::Type::eComparisonOp); } { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); - Token tok2(tokensFrontBack); + Token tok2(list, tokensFrontBack); tok.link(&tok2); tok.str("=="); @@ -1457,16 +1457,16 @@ class TestToken : public TestFixture { { TokenList list_c{&settingsDefault}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("char"); // not treated as keyword in TokenList::isKeyword() assert_tok(&tok, Token::Type::eType, /*l=*/ false, /*std=*/ true); } { TokenList list_c{&settingsDefault}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("size_t"); // not treated as keyword in TokenList::isKeyword() assert_tok(&tok, Token::Type::eType, /*l=*/ false, /*std=*/ true); } @@ -1477,16 +1477,16 @@ class TestToken : public TestFixture { { TokenList list_cpp{&settingsDefault}; list_cpp.setLang(Standards::Language::CPP); - TokensFrontBack tokensFrontBack(list_cpp); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_cpp, tokensFrontBack); tok.str("bool"); // not treated as keyword in TokenList::isKeyword() assert_tok(&tok, Token::Type::eType, /*l=*/ false, /*std=*/ true); } { TokenList list_cpp{&settingsDefault}; list_cpp.setLang(Standards::Language::CPP); - TokensFrontBack tokensFrontBack(list_cpp); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_cpp, tokensFrontBack); tok.str("size_t"); assert_tok(&tok, Token::Type::eType, /*l=*/ false, /*std=*/ true); } @@ -1494,8 +1494,8 @@ class TestToken : public TestFixture { void update_property_info_replace() const // #13743 { - TokensFrontBack tokensFrontBack(list); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list, tokensFrontBack); tok.str("size_t"); assert_tok(&tok, Token::Type::eType, false, true); tok.str("long"); @@ -1506,8 +1506,8 @@ class TestToken : public TestFixture { { TokenList list_c{&settingsDefault}; list_c.setLang(Standards::Language::C); - TokensFrontBack tokensFrontBack(list_c); - Token tok(tokensFrontBack); + TokensFrontBack tokensFrontBack; + Token tok(list_c, tokensFrontBack); tok.str("int"); // not treated as keyword in TokenList::isKeyword() assert_tok(&tok, Token::Type::eType, /*l=*/ false, /*std=*/ true); tok.varId(0); From 3fbeed433f505f3db9b34f8514dae2c2b8ded7d7 Mon Sep 17 00:00:00 2001 From: firewave Date: Fri, 11 Apr 2025 10:32:09 +0200 Subject: [PATCH 2/3] provide filelist to `Token::printValueFlow()` --- lib/token.cpp | 4 ++-- lib/token.h | 2 +- lib/tokenize.cpp | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/token.cpp b/lib/token.cpp index ca71e969e54..291d027a359 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -1732,7 +1732,7 @@ std::string Token::astStringZ3() const return "(" + str() + " " + astOperand1()->astStringZ3() + " " + astOperand2()->astStringZ3() + ")"; } -void Token::printValueFlow(bool xml, std::ostream &out) const +void Token::printValueFlow(const std::vector& files, bool xml, std::ostream &out) const { std::string outs; @@ -1759,7 +1759,7 @@ void Token::printValueFlow(bool xml, std::ostream &out) const else { if (fileIndex != tok->fileIndex()) { outs += "File "; - outs += mList.getFiles()[tok->fileIndex()]; + outs += files[tok->fileIndex()]; outs += '\n'; line = 0; } diff --git a/lib/token.h b/lib/token.h index 7965a96f8b5..45fda386d98 100644 --- a/lib/token.h +++ b/lib/token.h @@ -1556,7 +1556,7 @@ class CPPCHECKLIB Token { void printAst(bool verbose, bool xml, const std::vector &fileNames, std::ostream &out) const; - void printValueFlow(bool xml, std::ostream &out) const; + void printValueFlow(const std::vector& files, bool xml, std::ostream &out) const; void scopeInfo(std::shared_ptr newScopeInfo); std::shared_ptr scopeInfo() const; diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index ecfa8aa4390..1f94136d4bb 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5909,7 +5909,7 @@ void Tokenizer::printDebugOutput(int simplification, std::ostream &out) const if (mSettings.verbose) list.front()->printAst(mSettings.verbose, xml, list.getFiles(), out); - list.front()->printValueFlow(xml, out); + list.front()->printValueFlow(list.getFiles(), xml, out); if (xml) out << "" << std::endl; @@ -6169,7 +6169,7 @@ void Tokenizer::dump(std::ostream &out) const } if (list.front()) - list.front()->printValueFlow(true, out); + list.front()->printValueFlow(list.getFiles(), true, out); outs += dumpTypedefInfo(); From d15e353a6dd58780030f9e6da6c4b081b9392176 Mon Sep 17 00:00:00 2001 From: firewave Date: Fri, 11 Apr 2025 10:33:26 +0200 Subject: [PATCH 3/3] removed `Token::fileName()` --- lib/checkunusedfunctions.cpp | 6 +++--- lib/checkunusedfunctions.h | 4 ++-- lib/cppcheck.cpp | 4 ++-- lib/token.cpp | 4 ---- lib/token.h | 2 -- 5 files changed, 7 insertions(+), 13 deletions(-) diff --git a/lib/checkunusedfunctions.cpp b/lib/checkunusedfunctions.cpp index f363524f03d..f6a07b8e01f 100644 --- a/lib/checkunusedfunctions.cpp +++ b/lib/checkunusedfunctions.cpp @@ -417,15 +417,15 @@ void CheckUnusedFunctions::unusedFunctionError(ErrorLogger& errorLogger, } CheckUnusedFunctions::FunctionDecl::FunctionDecl(const Function *f) - : functionName(f->name()), fileName(f->token->fileName()), lineNumber(f->token->linenr()) + : functionName(f->name()), fileIndex(f->token->fileIndex()), lineNumber(f->token->linenr()) {} -std::string CheckUnusedFunctions::analyzerInfo() const +std::string CheckUnusedFunctions::analyzerInfo(const Tokenizer &tokenizer) const { std::ostringstream ret; for (const FunctionDecl &functionDecl : mFunctionDecl) { ret << " \n"; } diff --git a/lib/checkunusedfunctions.h b/lib/checkunusedfunctions.h index a3c325958eb..69b7cf2fc88 100644 --- a/lib/checkunusedfunctions.h +++ b/lib/checkunusedfunctions.h @@ -52,7 +52,7 @@ class CPPCHECKLIB CheckUnusedFunctions { // * What functions are declared void parseTokens(const Tokenizer &tokenizer, const Settings &settings); - std::string analyzerInfo() const; + std::string analyzerInfo(const Tokenizer &tokenizer) const; static void analyseWholeProgram(const Settings &settings, ErrorLogger& errorLogger, const std::string &buildDir); @@ -86,7 +86,7 @@ class CPPCHECKLIB CheckUnusedFunctions { public: explicit FunctionDecl(const Function *f); std::string functionName; - std::string fileName; + nonneg int fileIndex; unsigned int lineNumber; }; std::list mFunctionDecl; diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index 38c8848133b..c0e56162489 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -931,7 +931,7 @@ unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string std::list errors; analyzerInformation->analyzeFile(mSettings.buildDir, file.spath(), cfgname, hash, errors); - analyzerInformation->setFileInfo("CheckUnusedFunctions", mUnusedFunctionsCheck->analyzerInfo()); + analyzerInformation->setFileInfo("CheckUnusedFunctions", mUnusedFunctionsCheck->analyzerInfo(tokenizer)); analyzerInformation->close(); } } @@ -1389,7 +1389,7 @@ void CppCheck::checkNormalTokens(const Tokenizer &tokenizer, AnalyzerInformation } if (mSettings.checks.isEnabled(Checks::unusedFunction) && analyzerInformation) { - analyzerInformation->setFileInfo("CheckUnusedFunctions", unusedFunctionsChecker.analyzerInfo()); + analyzerInformation->setFileInfo("CheckUnusedFunctions", unusedFunctionsChecker.analyzerInfo(tokenizer)); } #ifdef HAVE_RULES diff --git a/lib/token.cpp b/lib/token.cpp index 291d027a359..9beae94f398 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -2714,7 +2714,3 @@ Token* findLambdaEndScope(Token* tok) const Token* findLambdaEndScope(const Token* tok) { return findLambdaEndScope(const_cast(tok)); } - -const std::string& Token::fileName() const { - return mList.getFiles()[mImpl->mFileIndex]; -} diff --git a/lib/token.h b/lib/token.h index 45fda386d98..7e7c2ce624c 100644 --- a/lib/token.h +++ b/lib/token.h @@ -887,8 +887,6 @@ class CPPCHECKLIB Token { static int multiCompare(const Token *tok, const char *haystack, nonneg int varid); public: - const std::string& fileName() const; - nonneg int fileIndex() const { return mImpl->mFileIndex; }