From 19f048813568faf71cd1943550ae90a7a409c64f Mon Sep 17 00:00:00 2001 From: chrchr-github <78114321+chrchr-github@users.noreply.github.com> Date: Wed, 14 Jan 2026 16:32:55 +0100 Subject: [PATCH 1/3] Update tokenize.cpp --- lib/tokenize.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 96c69bb4dd9..0aced8ac0d2 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8830,17 +8830,17 @@ void Tokenizer::findGarbageCode() const continue; // count number of semicolons int semicolons = 0, colons = 0; - const Token* const startTok = tok; - tok = tok->linkAt(1)->previous(); // find ")" of the for-loop - // walk backwards until we find the beginning (startTok) of the for() again - for (; tok != startTok; tok = tok->previous()) { + const Token* const endTok = tok->linkAt(1); + for (tok = tok->tokAt(2); tok != endTok; tok = tok->next()) { + if (const Token* lam = findLambdaEndTokenWithoutAST(tok)) { + tok = lam; + continue; + } if (tok->str() == ";") { // do the counting semicolons++; } else if (tok->str() == ":") { - if (tok->strAt(-1) == ",") - syntaxError(tok); colons++; - } else if (tok->str() == ")") { // skip pairs of ( ) + } else if (tok->str() == "(") { // skip pairs of ( ) tok = tok->link(); } } From 4910a03503c9b8730fab747c80470aeb1e075126 Mon Sep 17 00:00:00 2001 From: chrchr-github <78114321+chrchr-github@users.noreply.github.com> Date: Wed, 14 Jan 2026 16:34:31 +0100 Subject: [PATCH 2/3] Update testtokenize.cpp --- test/testtokenize.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index dcbbd0201af..3baa9f4a269 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -7683,6 +7683,10 @@ class TestTokenizer : public TestFixture { ASSERT_NO_THROW(tokenizeAndStringify("struct S { unsigned u:2, :30; };")); // #14393 + ASSERT_NO_THROW(tokenizeAndStringify("void f() {\n" // #14395 + " for (int i : [](int a, int b) { ++a; ++b; return std::vector{a, b}; }(1, 2)) {}\n" + "}\n")); + ignore_errout(); } From df9c4de4c1b4a1c98faaec042608308d7d00feb4 Mon Sep 17 00:00:00 2001 From: chrchr-github <78114321+chrchr-github@users.noreply.github.com> Date: Wed, 14 Jan 2026 16:36:16 +0100 Subject: [PATCH 3/3] Update tokenize.cpp --- lib/tokenize.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 0aced8ac0d2..bc7931d1c70 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8839,6 +8839,8 @@ void Tokenizer::findGarbageCode() const if (tok->str() == ";") { // do the counting semicolons++; } else if (tok->str() == ":") { + if (tok->strAt(-1) == ",") + syntaxError(tok); colons++; } else if (tok->str() == "(") { // skip pairs of ( ) tok = tok->link();