From 836bcc87b84388420ce6d0892d161ee778ed1c91 Mon Sep 17 00:00:00 2001 From: firewave Date: Mon, 30 Mar 2026 12:55:27 +0200 Subject: [PATCH 1/3] remove --- .github/workflows/CI-cygwin.yml | 62 -- .github/workflows/CI-mingw.yml | 75 -- .github/workflows/CI-unixish-docker.yml | 150 ---- .github/workflows/CI-unixish.yml | 734 -------------------- .github/workflows/CI-windows.yml | 338 --------- .github/workflows/asan.yml | 143 ---- .github/workflows/buildman.yml | 65 -- .github/workflows/cifuzz.yml | 34 - .github/workflows/clang-tidy.yml | 93 --- .github/workflows/codeql-analysis.yml | 52 -- .github/workflows/coverage.yml | 71 -- .github/workflows/coverity.yml | 41 -- .github/workflows/cppcheck-premium.yml | 72 -- .github/workflows/format.yml | 55 -- .github/workflows/iwyu.yml | 267 ------- .github/workflows/release-windows-mingw.yml | 69 -- .github/workflows/release-windows.yml | 215 ------ .github/workflows/selfcheck.yml | 204 ------ .github/workflows/tsan.yml | 146 ---- .github/workflows/ubsan.yml | 140 ---- .github/workflows/valgrind.yml | 65 -- 21 files changed, 3091 deletions(-) delete mode 100644 .github/workflows/CI-cygwin.yml delete mode 100644 .github/workflows/CI-mingw.yml delete mode 100644 .github/workflows/CI-unixish-docker.yml delete mode 100644 .github/workflows/CI-unixish.yml delete mode 100644 .github/workflows/CI-windows.yml delete mode 100644 .github/workflows/asan.yml delete mode 100644 .github/workflows/buildman.yml delete mode 100644 .github/workflows/cifuzz.yml delete mode 100644 .github/workflows/clang-tidy.yml delete mode 100644 .github/workflows/codeql-analysis.yml delete mode 100644 .github/workflows/coverage.yml delete mode 100644 .github/workflows/coverity.yml delete mode 100644 .github/workflows/cppcheck-premium.yml delete mode 100644 .github/workflows/format.yml delete mode 100644 .github/workflows/iwyu.yml delete mode 100644 .github/workflows/release-windows-mingw.yml delete mode 100644 .github/workflows/release-windows.yml delete mode 100644 .github/workflows/selfcheck.yml delete mode 100644 .github/workflows/tsan.yml delete mode 100644 .github/workflows/ubsan.yml delete mode 100644 .github/workflows/valgrind.yml diff --git a/.github/workflows/CI-cygwin.yml b/.github/workflows/CI-cygwin.yml deleted file mode 100644 index 0eee9446869..00000000000 --- a/.github/workflows/CI-cygwin.yml +++ /dev/null @@ -1,62 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CI-cygwin - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -defaults: - run: - shell: cmd - -jobs: - # TODO: add CMake build - build_cygwin: - strategy: - matrix: - # only use the latest windows-* as the installed toolchain is identical - os: [windows-2025] - platform: [x86_64] - include: - - platform: 'x86_64' - packages: | - gcc-g++ - python3 - fail-fast: false - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up Cygwin - uses: cygwin/cygwin-install-action@master - with: - site: https://mirrors.cicku.me/cygwin/ - platform: ${{ matrix.platform }} - packages: ${{ matrix.packages }} - - # Cygwin will always link the binaries even if they already exist. The linking is also extremely slow. So just run the "check" target which includes all the binaries. - - name: Build all and run test - run: | - C:\cygwin\bin\bash.exe -l -c cd %GITHUB_WORKSPACE% && make VERBOSE=1 -j%NUMBER_OF_PROCESSORS% CXXOPTS="-Werror" test - - - name: Extra test for misra - run: | - cd %GITHUB_WORKSPACE%\addons\test - ..\..\cppcheck.exe --dump -DDUMMY --suppress=uninitvar --inline-suppr misra\misra-test.c --std=c89 --platform=unix64 - python3 ..\misra.py -verify misra\misra-test.c.dump - ..\..\cppcheck.exe --addon=misra --enable=style --inline-suppr --enable=information --error-exitcode=1 misra\misra-ctu-1-test.c misra\misra-ctu-2-test.c - diff --git a/.github/workflows/CI-mingw.yml b/.github/workflows/CI-mingw.yml deleted file mode 100644 index 1b0cf3e5672..00000000000 --- a/.github/workflows/CI-mingw.yml +++ /dev/null @@ -1,75 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CI-mingw - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -defaults: - run: - shell: msys2 {0} - -jobs: - # TODO: add CMake build - build_mingw: - strategy: - matrix: - # only use the latest windows-* as the installed toolchain is identical - os: [windows-2025] - fail-fast: false - - runs-on: ${{ matrix.os }} - - timeout-minutes: 19 # max + 3*std of the last 7K runs - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up MSYS2 - uses: msys2/setup-msys2@v2 - with: - release: false # use pre-installed - # TODO: install mingw-w64-x86_64-make and use mingw32.make instead - currently fails with "Windows Subsystem for Linux has no installed distributions." - install: >- - mingw-w64-x86_64-lld - mingw-w64-x86_64-ccache - make - mingw-w64-x86_64-gcc - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Build cppcheck - run: | - export PATH="/mingw64/lib/ccache/bin:$PATH" - # set RDYNAMIC to work around broken MinGW detection - # use lld for faster linking - make VERBOSE=1 RDYNAMIC=-lshlwapi LDOPTS=-fuse-ld=lld -j$(nproc) CXXOPTS="-Werror" cppcheck - - - name: Build test - run: | - export PATH="/mingw64/lib/ccache/bin:$PATH" - # set RDYNAMIC to work around broken MinGW detection - # use lld for faster linking - make VERBOSE=1 RDYNAMIC=-lshlwapi LDOPTS=-fuse-ld=lld -j$(nproc) CXXOPTS="-Werror" testrunner - - - name: Run test - run: | - export PATH="/mingw64/lib/ccache/bin:$PATH" - # set RDYNAMIC to work around broken MinGW detection - # use lld for faster linking - make VERBOSE=1 RDYNAMIC=-lshlwapi LDOPTS=-fuse-ld=lld -j$(nproc) CXXOPTS="-Werror" test diff --git a/.github/workflows/CI-unixish-docker.yml b/.github/workflows/CI-unixish-docker.yml deleted file mode 100644 index a38feb452f0..00000000000 --- a/.github/workflows/CI-unixish-docker.yml +++ /dev/null @@ -1,150 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CI-unixish-docker - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build_cmake: - - strategy: - matrix: - include: - - image: "ubuntu:24.04" - with_gui: true - full_build: true - - image: "ubuntu:25.10" - with_gui: true - full_build: true - - image: "alpine:3.23" - with_gui: false # it appears FindQt6.cmake is not provided by any package - full_build: false # FIXME: test-signalhandler.cpp fails to build since feenableexcept() is missing - fail-fast: false # Prefer quick result - - runs-on: ubuntu-22.04 - - # TODO: is this actually applied to the guest? - env: - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - container: - image: ${{ matrix.image }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software on ubuntu - if: contains(matrix.image, 'ubuntu') - run: | - apt-get update - apt-get install -y cmake g++ make libxml2-utils libpcre3-dev - - - name: Install missing software (gui) on latest ubuntu - if: contains(matrix.image, 'ubuntu') - run: | - apt-get install -y qt6-base-dev qt6-charts-dev qt6-tools-dev - - - name: Install missing software on Alpine - if: contains(matrix.image, 'alpine') - run: | - apk add cmake make g++ pcre-dev - - # needs to be called after the package installation since - # - it doesn't call "apt-get update" - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ matrix.image }} - - - name: Run CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=${{ matrix.with_gui }} -DWITH_QCHART=On -DBUILD_TRIAGE=${{ matrix.with_gui }} -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - - - name: CMake build - if: matrix.full_build - run: | - cmake --build cmake.output -- -j$(nproc) - - - name: Run CMake test - run: | - cmake --build cmake.output --target check -- -j$(nproc) - - build_make: - - strategy: - matrix: - image: ["ubuntu:24.04", "ubuntu:25.10", "alpine:3.23"] - fail-fast: false # Prefer quick result - - runs-on: ubuntu-22.04 - - container: - image: ${{ matrix.image }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software on ubuntu - if: contains(matrix.image, 'ubuntu') - run: | - apt-get update - apt-get install -y g++ make python3 libxml2-utils libpcre3-dev - - - name: Install missing software on Alpine - if: contains(matrix.image, 'alpine') - run: | - apk add make g++ pcre-dev bash python3 libxml2-utils - - # needs to be called after the package installation since - # - it doesn't call "apt-get update" - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ matrix.image }} - - # /usr/lib/ccache/bin - Alpine Linux - - - name: Build cppcheck - run: | - export PATH="/usr/lib/ccache/bin:/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) HAVE_RULES=yes CXXOPTS="-Werror" - - - name: Build test - run: | - export PATH="/usr/lib/ccache/bin:/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) HAVE_RULES=yes CXXOPTS="-Werror" testrunner - - - name: Run test - run: | - make -j$(nproc) HAVE_RULES=yes test - - # requires python3 - - name: Run extra tests - run: | - test/scripts/generate_and_run_more_tests.sh - - # requires which - - name: Validate - run: | - make -j$(nproc) checkCWEEntries validateXML - - - name: Test addons - run: | - ./cppcheck --addon=threadsafety addons/test/threadsafety - ./cppcheck --addon=threadsafety --std=c++03 addons/test/threadsafety diff --git a/.github/workflows/CI-unixish.yml b/.github/workflows/CI-unixish.yml deleted file mode 100644 index 8f355644bc2..00000000000 --- a/.github/workflows/CI-unixish.yml +++ /dev/null @@ -1,734 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CI-unixish - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build_cmake_tinyxml2: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - env: - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Install missing software on ubuntu - if: contains(matrix.os, 'ubuntu') - run: | - sudo apt-get update - sudo apt-get install libxml2-utils libtinyxml2-dev - # qt6-tools-dev-tools for lprodump - # qt6-l10n-tools for lupdate - sudo apt-get install qt6-base-dev libqt6charts6-dev qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools libglx-dev libgl1-mesa-dev - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - # pcre was removed from runner images in November 2022 - brew install coreutils qt@6 tinyxml2 pcre - - - name: CMake build on ubuntu (with GUI / system tinyxml2) - if: contains(matrix.os, 'ubuntu') - run: | - cmake -S . -B cmake.output.tinyxml2 -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_BUNDLED_TINYXML2=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - cmake --build cmake.output.tinyxml2 -- -j$(nproc) - - - name: CMake build on macos (with GUI / system tinyxml2) - if: contains(matrix.os, 'macos') - run: | - cmake -S . -B cmake.output.tinyxml2 -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_BUNDLED_TINYXML2=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DQt6_DIR=$(brew --prefix qt@6)/lib/cmake/Qt6 - cmake --build cmake.output.tinyxml2 -- -j$(nproc) - - - name: Run CMake test (system tinyxml2) - run: | - cmake --build cmake.output.tinyxml2 --target check -- -j$(nproc) - - build_cmake: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - env: - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - # TODO: move latest compiler to separate step - # TODO: bail out on warnings with latest GCC - - name: Set up GCC - uses: egor-tensin/setup-gcc@v1 - if: false # matrix.os == 'ubuntu-22.04' - with: - version: 13 - platform: x64 - - - name: Select compiler - if: false # matrix.os == 'ubuntu-22.04' - run: | - echo "CXX=g++-13" >> $GITHUB_ENV - - - name: Install missing software on ubuntu - if: contains(matrix.os, 'ubuntu') - run: | - sudo apt-get update - sudo apt-get install libxml2-utils - # qt6-tools-dev-tools for lprodump - # qt6-l10n-tools for lupdate - sudo apt-get install qt6-base-dev libqt6charts6-dev qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools libglx-dev libgl1-mesa-dev - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - # pcre was removed from runner images in November 2022 - brew install coreutils qt@6 pcre - - - name: Run CMake on ubuntu (with GUI) - if: contains(matrix.os, 'ubuntu') - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_INSTALL_PREFIX=cppcheck-cmake-install - - - name: Run CMake on macos (with GUI) - if: contains(matrix.os, 'macos') - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_INSTALL_PREFIX=cppcheck-cmake-install -DQt6_DIR=$(brew --prefix qt@6)/lib/cmake/Qt6 - - - name: Run CMake build - run: | - cmake --build cmake.output -- -j$(nproc) - - - name: Run CMake test - run: | - cmake --build cmake.output --target check -- -j$(nproc) - - - name: Run CTest - run: | - pushd cmake.output - ctest --output-on-failure -j$(nproc) - - - name: Run CMake install - run: | - cmake --build cmake.output --target install - - - name: Run CMake on ubuntu (no CLI) - if: matrix.os == 'ubuntu-22.04' - run: | - cmake -S . -B cmake.output_nocli -Werror=dev -DBUILD_TESTING=Off -DBUILD_CLI=Off - - - name: Run CMake on ubuntu (no CLI / with tests) - if: matrix.os == 'ubuntu-22.04' - run: | - # the test and CLI code are too intertwined so for now we need to reject that - if cmake -S . -B cmake.output_nocli_tests -Werror=dev -DBUILD_TESTING=On -DBUILD_CLI=Off; then - exit 1 - else - exit 0 - fi - - - name: Run CMake on ubuntu (no CLI / with GUI) - if: matrix.os == 'ubuntu-22.04' - run: | - cmake -S . -B cmake.output_nocli_gui -Werror=dev -DBUILD_TESTING=Off -DBUILD_CLI=Off -DBUILD_GUI=On - - - name: Run CMake on ubuntu (no GUI) - if: matrix.os == 'ubuntu-22.04' - run: | - cmake -S . -B cmake.output_nogui -Werror=dev -DBUILD_TESTING=Off -DBUILD_GUI=Off - - - name: Run CMake on ubuntu (no GUI / with triage) - if: matrix.os == 'ubuntu-22.04' - run: | - # cannot build triage without GUI - if cmake -S . -B cmake.output_nogui_triage -Werror=dev -DBUILD_TESTING=Off -DBUILD_GUI=Off -DBUILD_TRIAGE=On; then - exit 1 - else - exit 0 - fi - - - name: Run CMake on ubuntu (no CLI / no GUI) - if: matrix.os == 'ubuntu-22.04' - run: | - cmake -S . -B cmake.output_nocli_nogui -Werror=dev -DBUILD_TESTING=Off -DBUILD_GUI=Off - - build_cmake_cxxstd: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - cxxstd: [14, 17, 20] - # FIXME: macos-15 fails to compile with C++20 - # - # /Users/runner/work/cppcheck/cppcheck/cmake.output/gui/test/projectfile/moc_testprojectfile.cpp:84:1: error: 'constinit' specifier is incompatible with C++ standards before C++20 [-Werror,-Wc++20-compat] - # 84 | Q_CONSTINIT const QMetaObject TestProjectFile::staticMetaObject = { { - # | ^ - # /opt/homebrew/opt/qt/lib/QtCore.framework/Headers/qcompilerdetection.h:1409:23: note: expanded from macro 'Q_CONSTINIT' - exclude: - - os: macos-15 - cxxstd: 20 - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - env: - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }}-${{ matrix.cxxstd }} - - - name: Install missing software on ubuntu - if: contains(matrix.os, 'ubuntu') - run: | - sudo apt-get update - sudo apt-get install libxml2-utils - # qt6-tools-dev-tools for lprodump - # qt6-l10n-tools for lupdate - sudo apt-get install qt6-base-dev libqt6charts6-dev qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools libglx-dev libgl1-mesa-dev - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - # pcre was removed from runner images in November 2022 - brew install coreutils qt@6 pcre - - - name: Run CMake on ubuntu (with GUI) - if: contains(matrix.os, 'ubuntu') - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_CXX_STANDARD=${{ matrix.cxxstd }} -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - - - name: Run CMake on macos (with GUI) - if: contains(matrix.os, 'macos') - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_CXX_STANDARD=${{ matrix.cxxstd }} -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DQt6_DIR=$(brew --prefix qt@6)/lib/cmake/Qt6 - - - name: Run CMake build - run: | - cmake --build cmake.output -- -j$(nproc) - - build_uchar: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - brew install coreutils - - - name: Build with Unsigned char - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror -funsigned-char" testrunner - - - name: Test with Unsigned char - run: | - ./testrunner - - build_mathlib: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - brew install coreutils - - - name: Build with TEST_MATHLIB_VALUE - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror" CPPOPTS=-DTEST_MATHLIB_VALUE all - - - name: Test with TEST_MATHLIB_VALUE - run: | - make -j$(nproc) test - - check_nonneg: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - # coreutils contains "g++" (default is "c++") and "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - brew install coreutils - - - name: Check syntax with NONNEG - run: | - make check-nonneg CXXOPTS="-Werror" - - build_cmake_boost: - - strategy: - matrix: - os: [macos-15] # non-macos platforms are already built with Boost in other contexts - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - env: - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Run CMake on macOS (force Boost) - run: | - # make sure we fail when Boost is requested and not available. - # will fail because no package configuration is available. - if cmake -S . -B cmake.output.boost-force-noavail -Werror=dev -DBUILD_TESTING=Off -DUSE_BOOST=On; then - exit 1 - else - exit 0 - fi - - # coreutils contains "nproc" - - name: Install missing software on macOS - run: | - brew install coreutils boost - - - name: Run CMake on macOS (force Boost) - run: | - cmake -S . -B cmake.output.boost-force -Werror=dev -DBUILD_TESTING=Off -DUSE_BOOST=On - - - name: Run CMake on macOS (no Boost) - run: | - # make sure Boost is not used when disabled even though it is available - cmake -S . -B cmake.output.boost-no -Werror=dev -DBUILD_TESTING=Off -DUSE_BOOST=Off - if grep -q '\-DHAVE_BOOST' ./cmake.output.boost-no/compile_commands.json; then - exit 1 - else - exit 0 - fi - - - name: Run CMake on macOS (with Boost) - run: | - cmake -S . -B cmake.output.boost -Werror=dev -DBUILD_TESTING=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - grep -q '\-DHAVE_BOOST' ./cmake.output.boost/compile_commands.json - - - name: Build with CMake on macOS (with Boost) - run: | - cmake --build cmake.output.boost -- -j$(nproc) - - build_cmake_minimum: # TODO: move to docker workflow? - - runs-on: ubuntu-22.04 # use the oldest available runner - - env: - CMAKE_VERSION: 3.22 - CMAKE_VERSION_FULL: 3.22.6 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software - run: | - sudo apt-get update - sudo apt-get install libxml2-utils - # qt6-tools-dev-tools for lprodump - # qt6-l10n-tools for lupdate - sudo apt-get install qt6-base-dev libqt6charts6-dev qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools libglx-dev libgl1-mesa-dev - - - name: Install CMake - run: | - wget https://cmake.org/files/v${{ env.CMAKE_VERSION }}/cmake-${{ env.CMAKE_VERSION_FULL }}-linux-x86_64.tar.gz - tar xf cmake-${{ env.CMAKE_VERSION_FULL }}-linux-x86_64.tar.gz - - - name: Run CMake (without GUI) - run: | - export PATH=cmake-${{ env.CMAKE_VERSION_FULL }}-linux-x86_64/bin:$PATH - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On - - - name: Run CMake (with GUI) - run: | - export PATH=cmake-${{ env.CMAKE_VERSION_FULL }}-linux-x86_64/bin:$PATH - cmake -S . -B cmake.output.gui -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On - - build: - - strategy: - matrix: - os: [ubuntu-22.04, macos-15] - include: - - xdist_n: auto - # FIXME: test_color_tty fails with xdist - see #13278 - - os: macos-15 - xdist_n: '1' - fail-fast: false # Prefer quick result - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Install missing software on ubuntu - if: contains(matrix.os, 'ubuntu') - run: | - sudo apt-get update - sudo apt-get install libxml2-utils - - # packages for strict cfg checks - - name: Install missing software on ubuntu 22.04 (cfg) - if: matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get install libcairo2-dev libcurl4-openssl-dev liblua5.3-dev libssl-dev libsqlite3-dev libcppunit-dev libsigc++-2.0-dev libgtk-3-dev libboost-all-dev libselinux-dev libwxgtk3.0-gtk3-dev xmlstarlet qtbase5-dev - - # coreutils contains "nproc" - - name: Install missing software on macos - if: contains(matrix.os, 'macos') - run: | - # pcre was removed from runner images in November 2022 - brew install coreutils pcre gnu-sed - - - name: Install missing Python packages on ubuntu - if: contains(matrix.os, 'ubuntu') - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pytest - python3 -m pip install pytest-timeout - python3 -m pip install pytest-xdist - python3 -m pip install psutil - - # we need to use -break-system-packages --user because the common approaches do not work. - # using pip works but it appears to install the packages into a different Python installation so they are not found later on. - # using python3 -m pip without the additional flags fails since the packages are being managed by a different tool (brew) and that lacks some of the packages. - # using pipx also does not work. - - name: Install missing Python packages on macos - if: contains(matrix.os, 'macos') - run: | - python3 -m pip install --break-system-packages --user pip --upgrade - python3 -m pip install --break-system-packages --user pytest - python3 -m pip install --break-system-packages --user pytest-timeout - python3 -m pip install --break-system-packages --user pytest-xdist - python3 -m pip install --break-system-packages --user psutil - - - name: Build cppcheck - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror" HAVE_RULES=yes - - - name: Build test - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror" HAVE_RULES=yes testrunner - - - name: Run test - run: | - make -j$(nproc) HAVE_RULES=yes test - - # requires "gnu-sed" installed on macos - - name: Run extra tests - run: | - test/scripts/generate_and_run_more_tests.sh - - - name: Run test/cli - run: | - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} test/cli - - # TODO: use the step below instead - # do not use pushd in this step since we go below the working directory - - name: Run test/cli (symlink) - run: | - cd .. - ln -s cppcheck 'cpp check' - cd 'cpp check/test/cli' - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} - - # FIXME: proj2_test.py fails because of the relative path cleanups in ImportProject::setRelativePaths() - # It fails because the application path used as base path has its symlink resolved by getcwd(). - - name: Run test/cli (symlink) - if: false - run: | - ln -s . 'cpp check' - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} 'cpp check/test/cli' - - - name: Run test/cli (-j2) - run: | - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} test/cli - env: - TEST_CPPCHECK_INJECT_J: 2 - - - name: Run test/cli (--clang) - if: false - run: | - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} test/cli - env: - TEST_CPPCHECK_INJECT_CLANG: clang - - - name: Run test/cli (--cppcheck-build-dir) - run: | - python3 -m pytest -Werror --strict-markers -vv -n ${{ matrix.xdist_n }} test/cli - env: - TEST_CPPCHECK_INJECT_BUILDDIR: injected - - - name: Run cfg tests - if: matrix.os != 'ubuntu-22.04' - run: | - make -j$(nproc) checkcfg - - - name: Run cfg tests (strict) - if: matrix.os == 'ubuntu-22.04' - run: | - make -j$(nproc) checkcfg - env: - STRICT: 1 - - - name: Run --dump test - run: | - ./cppcheck test/testpreprocessor.cpp --dump - xmllint --noout test/testpreprocessor.cpp.dump - - - name: Validate - run: | - make -j$(nproc) checkCWEEntries validateXML - - - name: Test install - run: | - # this is only to test the "install" target - since we did not build with FILESDIR it would not work as intended - make DESTDIR=cppcheck-make-install FILESDIR=/share/Cppcheck install - rm -rf cppcheck-make-install - - - name: Test Signalhandler - run: | - cmake -S . -B build.cmake.signal -Werror=dev -DBUILD_TESTING=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On - cmake --build build.cmake.signal --target test-signalhandler -- -j$(nproc) - # TODO: how to run this without copying the file? - cp build.cmake.signal/bin/test-s* . - python3 -m pytest -Werror --strict-markers -vv test/signal/test-signalhandler.py - rm test-signalhandler - - # no unix backtrace support on MacOs - - name: Test Stacktrace - if: contains(matrix.os, 'ubuntu') - run: | - cmake -S . -B build.cmake.stack -Werror=dev -DBUILD_TESTING=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On - cmake --build build.cmake.stack --target test-stacktrace -- -j$(nproc) - # TODO: how to run this without copying the file? - cp build.cmake.stack/bin/test-s* . - python3 -m pytest -Werror --strict-markers -vv test/signal/test-stacktrace.py - rm test-stacktrace - - # TODO: move to scriptcheck.yml so these are tested with all Python versions? - - name: Test addons - run: | - set -x - ./cppcheck --error-exitcode=1 --inline-suppr --addon=threadsafety addons/test/threadsafety - ./cppcheck --error-exitcode=1 --inline-suppr --addon=threadsafety --std=c++03 addons/test/threadsafety - ./cppcheck --error-exitcode=1 --inline-suppr --addon=misra addons/test/misra/crash*.c - ./cppcheck --error-exitcode=1 --inline-suppr --addon=misra --enable=information addons/test/misra/config*.c - - ./cppcheck --addon=misra --enable=style --inline-suppr --enable=information --error-exitcode=1 addons/test/misra/misra-ctu-*-test.c - pushd addons/test - # We'll force C89 standard to enable an additional verification for - # rules 5.4 and 5.5 which have standard-dependent options. - ../../cppcheck --dump -DDUMMY --suppress=uninitvar --inline-suppr misra/misra-test.c --std=c89 --platform=unix64 - python3 ../misra.py -verify misra/misra-test.c.dump - # Test slight MISRA differences in C11 standard - ../../cppcheck --dump -DDUMMY --suppress=uninitvar --inline-suppr misra/misra-test-c11.c --std=c11 --platform=unix64 - python3 ../misra.py -verify misra/misra-test-c11.c.dump - # TODO: do we need to verify something here? - ../../cppcheck --dump -DDUMMY --suppress=uninitvar --suppress=uninitStructMember --std=c89 misra/misra-test.h - ../../cppcheck --dump misra/misra-test.cpp - python3 ../misra.py -verify misra/misra-test.cpp.dump - python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_ascii.txt -verify misra/misra-test.cpp.dump - python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_utf8.txt -verify misra/misra-test.cpp.dump - python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_windows1250.txt -verify misra/misra-test.cpp.dump - ../../cppcheck --addon=misra --enable=style --platform=avr8 --error-exitcode=1 misra/misra-test-avr8.c - ../../cppcheck --dump misc-test.cpp - python3 ../misc.py -verify misc-test.cpp.dump - ../../cppcheck --dump naming_test.c - python3 ../naming.py --var='[a-z].*' --function='[a-z].*' naming_test.c.dump - ../../cppcheck --dump naming_test.cpp - python3 ../naming.py --var='[a-z].*' --function='[a-z].*' naming_test.cpp.dump - - # TODO: run with "-n auto" when misra_test.py can be run in parallel - - name: test addons (Python) - if: matrix.os != 'ubuntu-22.04' - run: | - python3 -m pytest -Werror --strict-markers -vv -n 1 addons/test - env: - PYTHONPATH: ./addons - - # TODO: run with "-n auto" when misra_test.py can be run in parallel - # we cannot specify -Werror since xml/etree/ElementTree.py in Python 3.10 contains an unclosed file - - name: test addons (Python) - if: matrix.os == 'ubuntu-22.04' - run: | - python3 -m pytest --strict-markers -vv -n 1 addons/test - env: - PYTHONPATH: ./addons - - - name: Build democlient - if: matrix.os == 'ubuntu-22.04' - run: | - warnings="-pedantic -Wall -Wextra -Wcast-qual -Wno-deprecated-declarations -Wfloat-equal -Wmissing-declarations -Wmissing-format-attribute -Wno-long-long -Wpacked -Wredundant-decls -Wundef -Wno-shadow -Wno-missing-field-initializers -Wno-missing-braces -Wno-sign-compare -Wno-multichar" - g++ $warnings -c -Ilib -Iexternals/tinyxml2 democlient/democlient.cpp - - - name: Test disabled executors - if: matrix.os == 'ubuntu-22.04' - run: | - g++ -Ilib -c cli/threadexecutor.cpp -DDISALLOW_THREAD_EXECUTOR - test -z "$(nm threadexecutor.o)" - g++ -Ilib -c cli/processexecutor.cpp -DDISALLOW_PROCESS_EXECUTOR - test -z "$(nm processexecutor.o)" - # TODO: test NO_* defines - - - name: Show all ignored files - if: false # TODO: currently lists all the contents of ignored folders - we only need what actually matched - run: | - git ls-files --others --ignored --exclude-standard - - - name: Check for changed and unversioned files - run: | - # TODO: how to do this with a single command? - git status --ignored=no - git status --ignored=no | grep -q 'working tree clean' - - selfcheck: - needs: build # wait for all tests to be successful first - - runs-on: ubuntu-22.04 # run on the latest image only - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - # qt6-tools-dev-tools for lprodump - # qt6-l10n-tools for lupdate - sudo apt-get install qt6-base-dev libqt6charts6-dev qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools libglx-dev libgl1-mesa-dev - sudo apt-get install libboost-container-dev - - - name: Self check (build) - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - # compile with verification and ast matchers - make -j$(nproc) CXXOPTS="-Werror -g -O2" CPPOPTS="-DCHECK_INTERNAL -DHAVE_BOOST" MATCHCOMPILER=yes VERIFY=1 - - - name: CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_MATCHCOMPILER=Verify -DENABLE_CHECK_INTERNAL=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On - - - name: Generate dependencies - run: | - # make sure auto-generated GUI files exist - make -C cmake.output autogen - make -C cmake.output gui-build-deps triage-build-ui-deps - - - name: Self check - run: | - ./selfcheck.sh diff --git a/.github/workflows/CI-windows.yml b/.github/workflows/CI-windows.yml deleted file mode 100644 index c993c57753a..00000000000 --- a/.github/workflows/CI-windows.yml +++ /dev/null @@ -1,338 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CI-windows - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -defaults: - run: - shell: cmd - -jobs: - - build_qt: - strategy: - matrix: - os: [windows-2022, windows-2025] - qt_ver: [6.10.0] - fail-fast: false - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up Visual Studio environment - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: x64 - - - name: Install Qt ${{ matrix.qt_ver }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ matrix.qt_ver }} - modules: 'qtcharts' - setup-python: 'false' - cache: true - aqtversion: '==3.1.*' # TODO: remove when aqtinstall 3.2.2 is available - - - name: Run CMake - run: | - rem TODO: enable rules? - rem specify Release build so matchcompiler is used - cmake -S . -B build -Werror=dev -DCMAKE_BUILD_TYPE=Release -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DBUILD_TESTING=Off -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DBUILD_ONLINE_HELP=On -DCMAKE_INSTALL_PREFIX=cppcheck-cmake-install -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - - - name: Build GUI release - run: | - cmake --build build --target cppcheck-gui --config Release || exit /b !errorlevel! - - - name: Deploy GUI - run: | - windeployqt build\bin\Release || exit /b !errorlevel! - del build\bin\Release\cppcheck-gui.ilk || exit /b !errorlevel! - del build\bin\Release\cppcheck-gui.pdb || exit /b !errorlevel! - - # TODO: run GUI tests - - - name: Run CMake install - run: | - cmake --build build --target install - - build_cmake_cxxstd: - strategy: - matrix: - os: [windows-2022, windows-2025] - cxxstd: [14, 17, 20] - fail-fast: false - - runs-on: ${{ matrix.os }} - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up Visual Studio environment - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: x64 - - - name: Run CMake - run: | - cmake -S . -B build.cxxstd -Werror=dev -G "Visual Studio 17 2022" -A x64 -DCMAKE_CXX_STANDARD=${{ matrix.cxxstd }} -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTING=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - - - name: Build - run: | - cmake --build build.cxxstd --config Debug || exit /b !errorlevel! - - build_cmake_minimum: - - runs-on: windows-2022 # use the oldest available runner - - env: - CMAKE_VERSION: 3.22 - CMAKE_VERSION_FULL: 3.22.6 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install CMake - run: | - curl -fsSL https://cmake.org/files/v${{ env.CMAKE_VERSION }}/cmake-${{ env.CMAKE_VERSION_FULL }}-windows-x86_64.zip -o cmake.zip || exit /b !errorlevel! - 7z x cmake.zip || exit /b !errorlevel! - - - name: Set up Visual Studio environment - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: x64 - - - name: Install Qt - uses: jurplel/install-qt-action@v4 - with: - version: 6.10.0 - modules: 'qtcharts' - setup-python: 'false' - cache: true - aqtversion: '==3.1.*' # TODO: remove when aqtinstall 3.2.2 is available - - - name: Run CMake (without GUI) - run: | - :: TODO: enable DHAVE_RULES? - cmake-${{ env.CMAKE_VERSION_FULL }}-windows-x86_64\bin\cmake.exe -S . -B cmake.output -G "Visual Studio 17 2022" -A x64 -DHAVE_RULES=Off -DBUILD_TESTING=On - - - name: Run CMake (with GUI) - run: | - :: TODO: enable DHAVE_RULES? - cmake-${{ env.CMAKE_VERSION_FULL }}-windows-x86_64\bin\cmake.exe -S . -B cmake.output.gui -G "Visual Studio 17 2022" -A x64 -DHAVE_RULES=Off -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On - - build: - strategy: - matrix: - os: [windows-2022, windows-2025] - config: [debug, release] - fail-fast: false - - runs-on: ${{ matrix.os }} - - env: - # see https://www.pcre.org/original/changelog.txt - PCRE_VERSION: 8.45 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up Python - if: matrix.config == 'release' - uses: actions/setup-python@v5 - with: - python-version: '3.14' - check-latest: true - - - name: Set up Visual Studio environment - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: x64 - - - name: Cache PCRE - id: cache-pcre - uses: actions/cache@v4 - with: - path: | - externals\pcre.h - externals\pcre.lib - externals\pcre64.lib - key: pcre-${{ env.PCRE_VERSION }}-x64-bin-win - - - name: Download PCRE - if: steps.cache-pcre.outputs.cache-hit != 'true' - run: | - curl -fsSL https://github.com/pfultz2/pcre/archive/refs/tags/%PCRE_VERSION%.zip -o pcre-%PCRE_VERSION%.zip || exit /b !errorlevel! - - - name: Install PCRE - if: steps.cache-pcre.outputs.cache-hit != 'true' - run: | - @echo on - 7z x pcre-%PCRE_VERSION%.zip || exit /b !errorlevel! - cd pcre-%PCRE_VERSION% || exit /b !errorlevel! - git apply --ignore-space-change ..\externals\pcre.patch || exit /b !errorlevel! - cmake . -G "NMake Makefiles" -DCMAKE_BUILD_TYPE=Release -DPCRE_BUILD_PCRECPP=Off -DPCRE_BUILD_TESTS=Off -DPCRE_BUILD_PCREGREP=Off -DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - nmake || exit /b !errorlevel! - copy pcre.h ..\externals || exit /b !errorlevel! - copy pcre.lib ..\externals\pcre64.lib || exit /b !errorlevel! - env: - CL: /MP - - - name: Install missing Python packages - if: matrix.config == 'release' - run: | - python -m pip install pip --upgrade || exit /b !errorlevel! - python -m pip install pytest || exit /b !errorlevel! - python -m pip install pytest-custom_exit_code || exit /b !errorlevel! - python -m pip install pytest-timeout || exit /b !errorlevel! - python -m pip install pytest-xdist || exit /b !errorlevel! - python -m pip install psutil || exit /b !errorlevel! - - - name: Build CLI debug configuration using MSBuild - if: matrix.config == 'debug' - run: | - :: cmake --build build --target check --config Debug || exit /b !errorlevel! - msbuild -m cppcheck.sln /p:Configuration=Debug-PCRE;Platform=x64 -maxcpucount || exit /b !errorlevel! - env: - _CL_: /WX - - - name: Run Debug test - if: matrix.config == 'debug' - run: .\bin\debug\testrunner.exe || exit /b !errorlevel! - - - name: Build CLI release configuration using MSBuild - if: matrix.config == 'release' - run: | - :: cmake --build build --target check --config Release || exit /b !errorlevel! - msbuild -m cppcheck.sln /p:Configuration=Release-PCRE;Platform=x64 -maxcpucount || exit /b !errorlevel! - env: - _CL_: /WX - - - name: Run Release test - if: matrix.config == 'release' - run: .\bin\testrunner.exe || exit /b !errorlevel! - - - name: Prepare test/cli - if: matrix.config == 'release' - run: | - :: since FILESDIR is not set copy the binary to the root so the addons are found - :: copy .\build\bin\Release\cppcheck.exe .\cppcheck.exe || exit /b !errorlevel! - copy .\bin\cppcheck.exe .\cppcheck.exe || exit /b !errorlevel! - copy .\bin\cppcheck-core.dll .\cppcheck-core.dll || exit /b !errorlevel! - - - name: Run test/cli - if: matrix.config == 'release' - run: | - python -m pytest -Werror --strict-markers -vv -n auto test/cli || exit /b !errorlevel! - - - name: Run test/cli (-j2) - if: matrix.config == 'release' - run: | - python -m pytest -Werror --strict-markers -vv -n auto test/cli || exit /b !errorlevel! - env: - TEST_CPPCHECK_INJECT_J: 2 - - # TODO: install clang - - name: Run test/cli (--clang) - if: false # matrix.config == 'release' - run: | - python -m pytest -Werror --strict-markers -vv -n auto test/cli || exit /b !errorlevel! - env: - TEST_CPPCHECK_INJECT_CLANG: clang - - - name: Run test/cli (--cppcheck-build-dir) - if: matrix.config == 'release' - run: | - python -m pytest -Werror --strict-markers -vv -n auto test/cli || exit /b !errorlevel! - env: - TEST_CPPCHECK_INJECT_BUILDDIR: injected - - # TODO: test with Release configuration? - - name: Test SEH wrapper - if: matrix.config == 'release' - run: | - cmake -S . -B build.cmake.seh -Werror=dev -DBUILD_TESTING=On -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - cmake --build build.cmake.seh --target test-sehwrapper || exit /b !errorlevel! - :: TODO: how to run this without copying the file? - copy build.cmake.seh\bin\Debug\test-sehwrapper.exe . || exit /b !errorlevel! - python3 -m pytest -Werror --strict-markers -vv test/seh/test-sehwrapper.py || exit /b !errorlevel! - del test-sehwrapper.exe || exit /b !errorlevel! - - - name: Test addons - if: matrix.config == 'release' - run: | - echo on - .\cppcheck --addon=threadsafety addons\test\threadsafety || exit /b !errorlevel! - .\cppcheck --addon=threadsafety --std=c++03 addons\test\threadsafety || exit /b !errorlevel! - .\cppcheck --addon=misra --enable=style --inline-suppr --enable=information --error-exitcode=1 addons\test\misra\misra-ctu-*-test.c || exit /b !errorlevel! - cd addons\test - rem We'll force C89 standard to enable an additional verification for - rem rules 5.4 and 5.5 which have standard-dependent options. - ..\..\cppcheck --dump -DDUMMY --suppress=uninitvar --inline-suppr misra\misra-test.c --std=c89 --platform=unix64 || exit /b !errorlevel! - python3 ..\misra.py -verify misra\misra-test.c.dump || exit /b !errorlevel! - rem Test slight MISRA differences in C11 standard - ..\..\cppcheck --dump -DDUMMY --suppress=uninitvar --inline-suppr misra\misra-test-c11.c --std=c11 --platform=unix64 || exit /b !errorlevel! - python3 ..\misra.py -verify misra\misra-test-c11.c.dump || exit /b !errorlevel! - rem TODO: do we need to verify something here? - ..\..\cppcheck --dump -DDUMMY --suppress=uninitvar --suppress=uninitStructMember --std=c89 misra\misra-test.h || exit /b !errorlevel! - ..\..\cppcheck --dump misra\misra-test.cpp || exit /b !errorlevel! - python3 ..\misra.py -verify misra\misra-test.cpp.dump || exit /b !errorlevel! - python3 ..\misra.py --rule-texts=misra\misra2012_rules_dummy_ascii.txt -verify misra\misra-test.cpp.dump || exit /b !errorlevel! - python3 ..\misra.py --rule-texts=misra\misra2012_rules_dummy_utf8.txt -verify misra\misra-test.cpp.dump || exit /b !errorlevel! - python3 ..\misra.py --rule-texts=misra\misra2012_rules_dummy_windows1250.txt -verify misra\misra-test.cpp.dump || exit /b !errorlevel! - ..\..\cppcheck --addon=misra --enable=style --platform=avr8 --error-exitcode=1 misra\misra-test-avr8.c || exit /b !errorlevel! - ..\..\cppcheck --dump misc-test.cpp || exit /b !errorlevel! - python3 ..\misc.py -verify misc-test.cpp.dump || exit /b !errorlevel! - ..\..\cppcheck --dump naming_test.c || exit /b !errorlevel! - rem TODO: fix this - does not fail on Linux - rem python3 ..\naming.py --var='[a-z].*' --function='[a-z].*' naming_test.c.dump || exit /b !errorlevel! - ..\..\cppcheck --dump naming_test.cpp || exit /b !errorlevel! - python3 ..\naming.py --var='[a-z].*' --function='[a-z].*' naming_test.cpp.dump || exit /b !errorlevel! - - # TODO: run with "-n auto" when misra_test.py can be run in parallel - - name: test addons (Python) - if: matrix.config == 'release' - run: | - python -m pytest -Werror --strict-markers -vv -n 1 addons/test || exit /b !errorlevel! - env: - PYTHONPATH: ./addons - - - name: Check Windows test syntax - if: matrix.config == 'debug' - run: | - cd test\cfg - cl.exe windows.cpp -DUNICODE=1 -D_UNICODE=1 /Zs || exit /b !errorlevel! - cl.exe mfc.cpp /EHsc /Zs || exit /b !errorlevel! - - - name: Show all ignored files - if: false # TODO: currently lists all the contents of ignored folders - we only need what actually matched - run: | - git ls-files --others --ignored --exclude-standard || exit /b !errorlevel! - - - name: Check for changed and unversioned files - run: | - :: TODO: how to do this with a single command? - git status --ignored=no - :: TODO: make this work - :: git status --ignored=no | grep -q 'working tree clean' diff --git a/.github/workflows/asan.yml b/.github/workflows/asan.yml deleted file mode 100644 index 4609aa88621..00000000000 --- a/.github/workflows/asan.yml +++ /dev/null @@ -1,143 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: address sanitizer - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - QT_VERSION: 6.10.0 - ASAN_OPTIONS: detect_stack_use_after_return=1 - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.14' - check-latest: true - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y cmake make libpcre3-dev libboost-container-dev libxml2-utils - sudo apt-get install -y libcups2-dev # required for Qt6PrintSupport in CMake since Qt 6.7.3 - - - name: Install clang - run: | - sudo apt-get purge --auto-remove llvm python3-lldb-14 llvm-14 - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - cache: true - - - name: Install missing Python packages - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pytest - python3 -m pip install pytest-timeout - python3 -m pip install pytest-xdist - python3 -m pip install psutil - - - name: CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_MATCHCOMPILER=Verify -DANALYZE_ADDRESS=On -DENABLE_CHECK_INTERNAL=On -DUSE_BOOST=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DFILESDIR= -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - env: - CC: clang-22 - CXX: clang++-22 - - - name: Build cppcheck - run: | - cmake --build cmake.output --target cppcheck -- -j $(nproc) - - - name: Build test - run: | - cmake --build cmake.output --target testrunner -- -j $(nproc) - - - name: Build GUI tests - run: | - cmake --build cmake.output --target gui-tests -- -j $(nproc) - - - name: Run tests - run: ./cmake.output/bin/testrunner - - - name: Run cfg tests - run: | - cmake --build cmake.output --target checkcfg -- -j $(nproc) - - - name: Run CTest - run: | - ctest --test-dir cmake.output --output-on-failure -j$(nproc) - - - name: Run test/cli - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - - - name: Run test/cli (-j2) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_J: 2 - - - name: Run test/cli (--clang) - if: false - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_CLANG: clang - - - name: Run test/cli (--cppcheck-build-dir) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_BUILDDIR: injected - - - name: Generate dependencies - if: false - run: | - # make sure auto-generated GUI files exist - make -C cmake.output autogen - make -C cmake.output gui-build-deps triage-build-ui-deps - - # TODO: this is currently way too slow (~60 minutes) to enable it - # TODO: only fail the step on sanitizer issues - since we use processes it will only fail the underlying process which will result in an cppcheckError - - name: Self check - if: false - run: | - ./selfcheck_san.sh ./cmake.output diff --git a/.github/workflows/buildman.yml b/.github/workflows/buildman.yml deleted file mode 100644 index b0b399dd851..00000000000 --- a/.github/workflows/buildman.yml +++ /dev/null @@ -1,65 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: Build manual - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - convert_via_pandoc: - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - run: | - mkdir output - - - uses: docker://pandoc/latex:3.6.3 - with: - args: --output=output/manual.html man/manual.md - - - uses: docker://pandoc/latex:3.6.3 - with: - args: --output=output/manual.pdf man/manual.md - - - uses: docker://pandoc/latex:3.6.3 - with: - args: --output=output/manual-premium.pdf man/manual-premium.md - - - uses: actions/upload-artifact@v4 - with: - name: output - path: output - - manpage: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y xsltproc docbook-xsl - - - name: build manpage - run: | - make man - - - uses: actions/upload-artifact@v4 - with: - name: cppcheck.1 - path: cppcheck.1 diff --git a/.github/workflows/cifuzz.yml b/.github/workflows/cifuzz.yml deleted file mode 100644 index 7b462c688f0..00000000000 --- a/.github/workflows/cifuzz.yml +++ /dev/null @@ -1,34 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: CIFuzz - -on: [pull_request] - -permissions: - contents: read - -jobs: - Fuzzing: - runs-on: ubuntu-latest - if: ${{ github.repository_owner == 'danmar' }} - steps: - - name: Build Fuzzers - id: build - uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master - with: - oss-fuzz-project-name: 'cppcheck' - dry-run: false - language: c++ - - name: Run Fuzzers - uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master - with: - oss-fuzz-project-name: 'cppcheck' - fuzz-seconds: 300 - dry-run: false - language: c++ - - name: Upload Crash - uses: actions/upload-artifact@v4 - if: failure() && steps.build.outcome == 'success' - with: - name: artifacts - path: ./out/artifacts diff --git a/.github/workflows/clang-tidy.yml b/.github/workflows/clang-tidy.yml deleted file mode 100644 index c4f8cc0cf6b..00000000000 --- a/.github/workflows/clang-tidy.yml +++ /dev/null @@ -1,93 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: clang-tidy - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - schedule: - - cron: '0 0 * * 0' - workflow_dispatch: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - QT_VERSION: 6.10.0 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software - run: | - sudo apt-get update - sudo apt-get install -y cmake make - sudo apt-get install -y libpcre3-dev - sudo apt-get install -y libgl-dev # fixes missing dependency for Qt in CMake - - - name: Install clang - run: | - sudo apt-get purge --auto-remove llvm python3-lldb-14 llvm-14 - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - sudo apt-get install -y clang-tidy-22 - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - install-deps: false - cache: true - - - name: Verify clang-tidy configuration - run: | - clang-tidy-22 --verify-config - - - name: Prepare CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_COMPILE_WARNING_AS_ERROR=On - env: - CC: clang-22 - CXX: clang++-22 - - - name: Prepare CMake dependencies - run: | - # make sure the auto-generated GUI sources exist - make -C cmake.output autogen - # make sure the precompiled headers exist - make -C cmake.output/cli cmake_pch.hxx.pch - make -C cmake.output/gui cmake_pch.hxx.pch - make -C cmake.output/lib cmake_pch.hxx.pch - make -C cmake.output/test cmake_pch.hxx.pch - - - name: Clang-Tidy - if: ${{ github.event.schedule == '' && github.event_name != 'workflow_dispatch' }} - run: | - cmake --build cmake.output --target run-clang-tidy 2> /dev/null - - - name: Clang Static Analyzer - if: ${{ github.event.schedule != '' || github.event_name == 'workflow_dispatch' }} - run: | - cmake --build cmake.output --target run-clang-tidy-csa 2> /dev/null - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Compilation Database - path: ./cmake.output/compile_commands.json diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 12e758d2c9e..00000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,52 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: "CodeQL" - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-22.04 - permissions: - security-events: write - - strategy: - fail-fast: false - matrix: - # Override automatic language detection by changing the below list - # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['cpp', 'python'] - # Learn more... - # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - - - name: Build cppcheck - if: matrix.language == 'cpp' - run: | - make -j$(nproc) CXXOPTS="-Werror" HAVE_RULES=yes CPPCHK_GLIBCXX_DEBUG= cppcheck - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index 13f56172a80..00000000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,71 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: Coverage - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ runner.os }} - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install libxml2-utils lcov - - - name: Install missing Python packages on ubuntu - run: | - python -m pip install pip --upgrade - python -m pip install lcov_cobertura - - - name: Compile instrumented - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror -g -fprofile-arcs -ftest-coverage" HAVE_RULES=yes CPPCHK_GLIBCXX_DEBUG= all - - - name: Run instrumented tests - run: | - ./testrunner - test/cfg/runtests.sh - - - name: Generate coverage report - run: | - gcov lib/*.cpp -o lib/ - lcov --directory ./ --capture --output-file lcov_tmp.info -b ./ - lcov --extract lcov_tmp.info "$(pwd)/*" --output-file lcov.info - genhtml lcov.info -o coverage_report --frame --legend --demangle-cpp - - - uses: actions/upload-artifact@v4 - with: - name: Coverage results - path: coverage_report - - - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - # file: ./coverage.xml # optional - flags: unittests # optional - name: ${{ github.repository }} # optional - fail_ci_if_error: true # optional (default = false): diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml deleted file mode 100644 index 3c07b61d7c7..00000000000 --- a/.github/workflows/coverity.yml +++ /dev/null @@ -1,41 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: Coverity - -on: - schedule: - - cron: "0 0 * * *" - -permissions: - contents: read - -jobs: - scan: - runs-on: ubuntu-latest - if: ${{ github.repository_owner == 'danmar' }} - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install qtbase5-dev qttools5-dev libqt5charts5-dev libboost-container-dev - - name: Download Coverity build tool - run: | - wget -c -N https://scan.coverity.com/download/linux64 --post-data "token=${{ secrets.COVERITY_SCAN_TOKEN }}&project=cppcheck" -O coverity_tool.tar.gz - mkdir coverity_tool - tar xzf coverity_tool.tar.gz --strip 1 -C coverity_tool - - name: Build with Coverity build tool - run: | - export PATH=`pwd`/coverity_tool/bin:$PATH - cov-build --dir cov-int make CPPCHK_GLIBCXX_DEBUG= - - name: Submit build result to Coverity Scan - run: | - tar czvf cov.tar.gz cov-int - curl --form token=${{ secrets.COVERITY_SCAN_TOKEN }} \ - --form email=daniel.marjamaki@gmail.com \ - --form file=@cov.tar.gz \ - --form version="Commit $GITHUB_SHA" \ - --form description="Development" \ - https://scan.coverity.com/builds?project=cppcheck diff --git a/.github/workflows/cppcheck-premium.yml b/.github/workflows/cppcheck-premium.yml deleted file mode 100644 index 5cb63ca4d5e..00000000000 --- a/.github/workflows/cppcheck-premium.yml +++ /dev/null @@ -1,72 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: cppcheck-premium - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - workflow_dispatch: - inputs: - premium_version: - description: 'Cppcheck Premium version' - -permissions: - contents: read - security-events: write - -jobs: - - build: - runs-on: ubuntu-24.04 # run on the latest image only - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Download cppcheckpremium release - run: | - premium_version=${{ inputs.premium_version }} - if [ -z $premium_version ]; then - premium_version=25.8.3 - #wget https://files.cppchecksolutions.com/devdrop/cppcheckpremium-$premium_version-amd64.tar.gz -O cppcheckpremium.tar.gz - wget https://files.cppchecksolutions.com/$premium_version/ubuntu-24.04/cppcheckpremium-$premium_version-amd64.tar.gz -O cppcheckpremium.tar.gz - else - wget https://files.cppchecksolutions.com/$premium_version/ubuntu-24.04/cppcheckpremium-$premium_version-amd64.tar.gz -O cppcheckpremium.tar.gz - fi - tar xzf cppcheckpremium.tar.gz - mv cppcheckpremium-$premium_version cppcheckpremium - - - name: Generate a license file - run: | - echo cppcheck > cppcheck.lic - echo 261231 >> cppcheck.lic - echo 80000 >> cppcheck.lic - echo 4b64673f03fb6230 >> cppcheck.lic - echo path:lib >> cppcheck.lic - - - name: Check - run: | - cppcheckpremium/premiumaddon --check-loc-license cppcheck.lic > cppcheck-premium-loc - cppcheckpremium/cppcheck --premium=safety-off -j$(nproc) -D__GNUC__ -D__CPPCHECK__ --suppressions-list=cppcheckpremium-suppressions --platform=unix64 --enable=style --premium=misra-c++-2023 --premium=cert-c++-2016 --inline-suppr lib --error-exitcode=0 --output-format=sarif 2> results.sarif - - - name: Cat results - run: | - #sed -i 's|"security-severity":.*||' results.sarif - cat results.sarif - - - uses: actions/upload-artifact@v4 - with: - name: results - path: results.sarif - - - name: Upload report - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: results.sarif - category: cppcheckpremium diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml deleted file mode 100644 index fd491c0ec0e..00000000000 --- a/.github/workflows/format.yml +++ /dev/null @@ -1,55 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: format - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - UNCRUSTIFY_VERSION: 0.80.1 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Cache uncrustify - uses: actions/cache@v4 - id: cache-uncrustify - with: - path: | - ~/uncrustify - key: ${{ runner.os }}-uncrustify-${{ env.UNCRUSTIFY_VERSION }} - - - name: build uncrustify - if: steps.cache-uncrustify.outputs.cache-hit != 'true' - run: | - set -x - wget https://github.com/uncrustify/uncrustify/archive/refs/tags/uncrustify-${{ env.UNCRUSTIFY_VERSION }}.tar.gz - tar xzvf uncrustify-${{ env.UNCRUSTIFY_VERSION }}.tar.gz - cd uncrustify-uncrustify-${{ env.UNCRUSTIFY_VERSION }} - cmake -S . -B build -DCMAKE_BUILD_TYPE=Release - cmake --build build -- -j$(nproc) -s - mkdir ~/uncrustify - cp build/uncrustify ~/uncrustify/ - - - name: Uncrustify check - run: | - UNCRUSTIFY=~/uncrustify/uncrustify ./runformat - git diff - git diff | diff - /dev/null &> /dev/null diff --git a/.github/workflows/iwyu.yml b/.github/workflows/iwyu.yml deleted file mode 100644 index ed6971128f5..00000000000 --- a/.github/workflows/iwyu.yml +++ /dev/null @@ -1,267 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: include-what-you-use - -on: - schedule: - - cron: '0 0 * * 0' - workflow_dispatch: - -permissions: - contents: read - -jobs: - iwyu: - - strategy: - matrix: - # "opensuse/tumbleweed:latest" / "fedora:rawhide" / "debian:unstable" / "archlinux:latest" - include: - - os: ubuntu-22.04 - image: "fedora:rawhide" - stdlib: libstdc++ - - os: ubuntu-22.04 - image: "fedora:rawhide" - stdlib: libc++ - - os: macos-26 - image: "" - stdlib: libc++ # no libstdc++ on macOS - mapping_file_opt: '-Xiwyu --mapping_file=$(realpath ./macos.imp)' - fail-fast: false - - runs-on: ${{ matrix.os }} - if: ${{ github.repository_owner == 'danmar' }} - - container: - image: ${{ matrix.image }} - - env: - QT_VERSION: 6.10.0 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software on debian/ubuntu - if: contains(matrix.image, 'debian') - run: | - apt-get update - apt-get install -y cmake clang make libpcre3-dev - apt-get install -y libgl-dev # fixes missing dependency for Qt in CMake - apt-get install -y iwyu - - - name: Install missing software on archlinux - if: contains(matrix.image, 'archlinux') - run: | - set -x - pacman -Sy - pacman -S cmake make clang pcre --noconfirm - pacman -S libglvnd --noconfirm # fixes missing dependency for Qt in CMake - pacman-key --init - pacman-key --recv-key 3056513887B78AEB --keyserver keyserver.ubuntu.com - pacman-key --lsign-key 3056513887B78AEB - pacman -U 'https://cdn-mirror.chaotic.cx/chaotic-aur/chaotic-keyring.pkg.tar.zst' 'https://cdn-mirror.chaotic.cx/chaotic-aur/chaotic-mirrorlist.pkg.tar.zst' --noconfirm - echo "[chaotic-aur]" >> /etc/pacman.conf - echo "Include = /etc/pacman.d/chaotic-mirrorlist" >> /etc/pacman.conf - pacman -Sy - pacman -S include-what-you-use --noconfirm - ln -s iwyu-tool /usr/sbin/iwyu_tool - - - name: Install missing software on Fedora - if: contains(matrix.image, 'fedora') - run: | - dnf install -y cmake clang pcre-devel - dnf install -y libglvnd-devel # fixes missing dependency for Qt in CMake - dnf install -y p7zip-plugins # required as fallback for py7zr in Qt installation - dnf install -y python3-pip # fixes missing pip module in jurplel/install-qt-action - dnf install -y python3-devel # fixes building of wheels for jurplel/install-qt-action - dnf install -y cairo-devel gtk3-devel libcurl-devel lua-devel openssl-devel python3-devel sqlite-devel boost-devel cppunit-devel libsigc++20-devel # for strict cfg checks - dnf install -y iwyu - ln -s iwyu_tool.py /usr/bin/iwyu_tool - - - name: Install missing software on Fedora (libc++) - if: contains(matrix.image, 'fedora') && matrix.stdlib == 'libc++' - run: | - dnf install -y libcxx-devel - - - name: Install missing software on OpenSUSE - if: contains(matrix.image, 'opensuse') - run: | - zypper install -y cmake clang pcre-devel - zypper install -y include-what-you-use-tools - ln -s iwyu_tool.py /usr/bin/iwyu_tool - - # coreutils contains "nproc" - - name: Install missing software on macOS - if: contains(matrix.os, 'macos') - run: | - brew install include-what-you-use pcre coreutils - # on Apple Silicon files are symlinked under /opt/homebrew/bin - ln -s /opt/homebrew/bin/iwyu_tool.py /usr/local/bin/iwyu_tool - - # Fails on OpenSUSE: - # Warning: Failed to restore: Tar failed with error: Unable to locate executable file: tar. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. - # Also the shell is broken afterwards: - # OCI runtime exec failed: exec failed: unable to start container process: exec: "sh": executable file not found in $PATH: unknown - # - # On macos-26 we need to perform the Python setup because the default installation is managed externally managed - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: ${{ contains(matrix.os, 'macos') }} - install-deps: false - cache: true - - - name: Generate macOS mappings - if: contains(matrix.os, 'macos') - run: | - set -x - - wget https://raw.githubusercontent.com/include-what-you-use/include-what-you-use/master/mapgen/iwyu-mapgen-apple-libc.py - python3 iwyu-mapgen-apple-libc.py $(xcrun --show-sdk-path)/usr/include > macos.imp - - - name: Prepare CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_BUILD_TYPE=Release -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCPPCHK_GLIBCXX_DEBUG=Off -DUSE_MATCHCOMPILER=Off -DEXTERNALS_AS_SYSTEM=On -DUSE_LIBCXX=${{ matrix.stdlib == 'libc++' }} - env: - CC: clang - CXX: clang++ - - # Fails on Debian: - # /__w/cppcheck/Qt/6.7.0/gcc_64/libexec/rcc: error while loading shared libraries: libglib-2.0.so.0: cannot open shared object file: No such file or directory - - name: Prepare CMake dependencies - run: | - # make sure the auto-generated GUI sources exist - make -C cmake.output autogen - # make sure the precompiled headers exist - #make -C cmake.output/cli cmake_pch.hxx.pch - #make -C cmake.output/gui cmake_pch.hxx.pch - #make -C cmake.output/lib cmake_pch.hxx.pch - #make -C cmake.output/test cmake_pch.hxx.pch - # make sure the auto-generated GUI dependencies exist - make -C cmake.output gui-build-deps - make -C cmake.output triage-build-ui-deps - - - name: iwyu_tool - run: | - iwyu_tool -p cmake.output -j $(nproc) -- -w -Xiwyu --max_line_length=1024 -Xiwyu --comment_style=long -Xiwyu --quoted_includes_first -Xiwyu --update_comments ${{ matrix.mapping_file_opt }} ${{ matrix.clang_inc }} > iwyu.log - - # TODO: run with all configurations - - name: test/cfg - if: matrix.stdlib == 'libstdc++' - run: | - # TODO: redirect to log - ./test/cfg/runtests.sh - env: - IWYU: include-what-you-use - IWYU_CLANG_INC: ${{ matrix.clang_inc }} - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Compilation Database (include-what-you-use - ${{ matrix.os }} ${{ matrix.stdlib }}) - path: ./cmake.output/compile_commands.json - - - uses: actions/upload-artifact@v4 - if: ${{ contains(matrix.os, 'macos') && (success() || failure()) }} - with: - name: macOS Mappings - path: | - ./iwyu-mapgen-apple-libc.py - ./macos.imp - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Logs (include-what-you-use - ${{ matrix.os }} ${{ matrix.stdlib }}) - path: ./*.log - - clang-include-cleaner: - - strategy: - matrix: - stdlib: [libstdc++, libc++] - include: - - stdlib: libstdc++ - use_libcxx: Off - - stdlib: libc++ - use_libcxx: On - fail-fast: false - - runs-on: ubuntu-22.04 - if: ${{ github.repository_owner == 'danmar' }} - - env: - QT_VERSION: 6.10.0 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install missing software - run: | - sudo apt-get update - sudo apt-get install -y cmake make libpcre3-dev - sudo apt-get install -y libgl-dev # missing dependency for using Qt in CMake - - - name: Install clang - run: | - sudo apt-get purge --auto-remove llvm python3-lldb-14 llvm-14 - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - sudo apt-get install -y clang-tools-22 - - - name: Install libc++ - if: matrix.stdlib == 'libc++' - run: | - sudo apt-get install -y libc++-22-dev - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - install-deps: false - cache: true - - - name: Prepare CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_BUILD_TYPE=Release -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCPPCHK_GLIBCXX_DEBUG=Off -DUSE_MATCHCOMPILER=Off -DEXTERNALS_AS_SYSTEM=On -DUSE_LIBCXX=${{ matrix.use_libcxx }} - env: - CC: clang-22 - CXX: clang++-22 - - - name: Prepare CMake dependencies - run: | - # make sure the auto-generated GUI sources exist - make -C cmake.output autogen - # make sure the precompiled headers exist - #make -C cmake.output/cli cmake_pch.hxx.pch - #make -C cmake.output/gui cmake_pch.hxx.pch - #make -C cmake.output/lib cmake_pch.hxx.pch - #make -C cmake.output/test cmake_pch.hxx.pch - # make sure the auto-generated GUI dependencies exist - make -C cmake.output gui-build-deps - - - name: clang-include-cleaner - run: | - # TODO: run multi-threaded - find $PWD/cli $PWD/lib $PWD/test $PWD/gui -maxdepth 1 -name "*.cpp" | xargs -t -n 1 clang-include-cleaner-22 --print=changes --extra-arg=-w --extra-arg=-stdlib=${{ matrix.stdlib }} -p cmake.output > clang-include-cleaner.log 2>&1 - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Compilation Database (clang-include-cleaner - ${{ matrix.stdlib }}) - path: ./cmake.output/compile_commands.json - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Logs (clang-include-cleaner - ${{ matrix.stdlib }}) - path: ./*.log diff --git a/.github/workflows/release-windows-mingw.yml b/.github/workflows/release-windows-mingw.yml deleted file mode 100644 index 3b9b836347f..00000000000 --- a/.github/workflows/release-windows-mingw.yml +++ /dev/null @@ -1,69 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: release-windows-mingw - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -defaults: - run: - shell: msys2 {0} - -jobs: - # TODO: add CMake build - build_mingw: - strategy: - matrix: - # only use the latest windows-* as the installed toolchain is identical - os: [windows-2025] - fail-fast: false - - runs-on: ${{ matrix.os }} - - timeout-minutes: 19 # max + 3*std of the last 7K runs - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up MSYS2 - uses: msys2/setup-msys2@v2 - with: - release: false # use pre-installed - # TODO: install mingw-w64-x86_64-make and use mingw32.make instead - currently fails with "Windows Subsystem for Linux has no installed distributions." - install: >- - mingw-w64-x86_64-lld - make - mingw-w64-x86_64-gcc - python - - - name: Build cppcheck - run: | - export PATH="/mingw64/lib/ccache/bin:$PATH" - # set RDYNAMIC to work around broken MinGW detection - make VERBOSE=1 RDYNAMIC=-lshlwapi -j$(nproc) CXXFLAGS=-O2 MATCHCOMPILER=yes cppcheck - - - name: Package - run: | - mkdir cppcheck-mingw - cp cppcheck.exe cppcheck-mingw/ - cp -R cfg platforms cppcheck-mingw/ - cp /mingw64/bin/libgcc_s_seh-1.dll cppcheck-mingw/ - cp /mingw64/bin/libstdc*.dll cppcheck-mingw/ - cp /mingw64/bin/libwinpthread-1.dll cppcheck-mingw/ - - - uses: actions/upload-artifact@v4 - with: - name: cppcheck-mingw - path: cppcheck-mingw diff --git a/.github/workflows/release-windows.yml b/.github/workflows/release-windows.yml deleted file mode 100644 index 607c8434baa..00000000000 --- a/.github/workflows/release-windows.yml +++ /dev/null @@ -1,215 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: release-windows - -on: - push: - tags: - - '2.*' - schedule: - - cron: '0 0 * * *' - workflow_dispatch: - -permissions: - contents: read - -defaults: - run: - shell: cmd - -jobs: - - build: - - runs-on: windows-2025 - if: ${{ github.repository_owner == 'danmar' }} - - env: - PYTHON_VERSION: 3.14 - # see https://www.pcre.org/original/changelog.txt - PCRE_VERSION: 8.45 - QT_VERSION: 6.10.0 - BOOST_MINOR_VERSION: 89 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - check-latest: true - - - name: Set up Visual Studio environment - uses: ilammy/msvc-dev-cmd@v1 - - - name: Download PCRE - run: | - curl -fsSL https://github.com/pfultz2/pcre/archive/refs/tags/%PCRE_VERSION%.zip -o pcre-%PCRE_VERSION%.zip || exit /b !errorlevel! - - - name: Install PCRE - run: | - @echo on - 7z x pcre-%PCRE_VERSION%.zip || exit /b !errorlevel! - cd pcre-%PCRE_VERSION% || exit /b !errorlevel! - git apply --ignore-space-change ..\externals\pcre.patch || exit /b !errorlevel! - cmake . -G "Visual Studio 17 2022" -A x64 -DPCRE_BUILD_PCRECPP=OFF -DPCRE_BUILD_PCREGREP=OFF -DPCRE_BUILD_TESTS=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - msbuild -m PCRE.sln -p:Configuration=Release -p:Platform=x64 || exit /b !errorlevel! - copy pcre.h ..\externals || exit /b !errorlevel! - copy Release\pcre.lib ..\externals\pcre64.lib || exit /b !errorlevel! - - - name: Download Boost - run: | - curl -fsSL https://archives.boost.io/release/1.%BOOST_MINOR_VERSION%.0/source/boost_1_%BOOST_MINOR_VERSION%_0.7z -o boost.zip || exit /b !errorlevel! - - - name: Install Boost - run: | - @echo on - 7z x boost.zip boost_1_%BOOST_MINOR_VERSION%_0/boost || exit /b !errorlevel! - ren boost_1_%BOOST_MINOR_VERSION%_0 boost || exit /b !errorlevel! - - # available modules: https://github.com/miurahr/aqtinstall/blob/master/docs/getting_started.rst#installing-modules - # available tools: https://github.com/miurahr/aqtinstall/blob/master/docs/getting_started.rst#installing-tools - - name: Install Qt - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - tools: 'tools_opensslv3_x64' - - # TODO: build with multiple threads - - name: Build x64 release GUI - run: | - :: TODO: enable rules? - :: specify Release build so matchcompiler is used - cmake -S . -B build -Werror=dev -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=Off -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_ONLINE_HELP=On -DUSE_BOOST=ON -DBOOST_INCLUDEDIR=%GITHUB_WORKSPACE%\boost -DCMAKE_COMPILE_WARNING_AS_ERROR=On || exit /b !errorlevel! - cmake --build build --target cppcheck-gui --config Release || exit /b !errorlevel! - - # TODO: package PDBs - - name: Deploy app - run: | - windeployqt build\bin\Release || exit /b !errorlevel! - del build\bin\Release\cppcheck-gui.ilk || exit /b !errorlevel! - del build\bin\Release\cppcheck-gui.pdb || exit /b !errorlevel! - - - uses: actions/upload-artifact@v4 - with: - name: deploy - path: build\bin\Release - - - name: Matchcompiler - run: python tools\matchcompiler.py --write-dir lib || exit /b !errorlevel! - - # TODO: build with multiple threads - - name: Build CLI x64 release configuration using MSBuild - run: msbuild -m cppcheck.sln -t:cli -p:Configuration=Release-PCRE -p:Platform=x64 -p:HaveBoost=HAVE_BOOST -p:BoostInclude=%GITHUB_WORKSPACE%\boost || exit /b !errorlevel! - env: - _CL_: /WX - - - uses: actions/upload-artifact@v4 - with: - name: bin - path: bin - - - name: Install missing Python packages - run: | - pip install -U pyinstaller || exit /b !errorlevel! - - # TODO: include in installer? - - name: Compile misra.py executable - run: | - cd addons || exit /b !errorlevel! - pyinstaller --hidden-import xml --hidden-import xml.etree --hidden-import xml.etree.ElementTree misra.py || exit /b !errorlevel! - del *.spec || exit /b !errorlevel! - - # TODO: include in installer? - - name: Compile cppcheck-htmlreport executable - run: | - cd htmlreport || exit /b !errorlevel! - pyinstaller cppcheck-htmlreport || exit /b !errorlevel! - del *.spec || exit /b !errorlevel! - - # TODO: test the compiled Python files - - - name: Collect files - run: | - @echo on - move build\bin\Release win_installer\files || exit /b !errorlevel! - copy AUTHORS win_installer\files\authors.txt || exit /b !errorlevel! - copy win_installer\GPLv3.txt win_installer\files\ || exit /b !errorlevel! - copy externals\picojson\LICENSE win_installer\files\picojson-license.txt || exit /b !errorlevel! - copy externals\simplecpp\LICENSE win_installer\files\simplecpp-license.txt || exit /b !errorlevel! - copy externals\tinyxml2\LICENSE win_installer\files\tinyxml2-license.txt || exit /b !errorlevel! - copy addons\dist\misra\*.* win_installer\files\addons || exit /b !errorlevel! - copy bin\cppcheck.exe win_installer\files || exit /b !errorlevel! - copy bin\cppcheck-core.dll win_installer\files || exit /b !errorlevel! - :: mkdir win_installer\files\help || exit /b !errorlevel! - xcopy /s gui\help win_installer\files\help || exit /b !errorlevel! - copy gui\help\online-help.qhc win_installer\files\ || exit /b !errorlevel! - copy gui\help\online-help.qch win_installer\files\ || exit /b !errorlevel! - del win_installer\files\cfg\*.rng || exit /b !errorlevel! - del win_installer\files\platforms\*.rng || exit /b !errorlevel! - del win_installer\files\translations\*.qm || exit /b !errorlevel! - move build\gui\*.qm win_installer\files\translations || exit /b !errorlevel! - copy htmlreport\dist\cppcheck-htmlreport\*.* win_installer\files || exit /b !errorlevel! - :: copy libcrypto-3-x64.dll and libssl-3-x64.dll - copy %RUNNER_WORKSPACE%\Qt\Tools\OpenSSLv3\Win_x64\bin\lib*.dll win_installer\files || exit /b !errorlevel! - - - uses: actions/upload-artifact@v4 - with: - name: collect - path: win_installer\files - - - name: Build Installer - run: | - cd win_installer || exit /b !errorlevel! - :: Read ProductVersion - for /f "tokens=4 delims= " %%a in ('find "ProductVersion" productInfo.wxi') do set PRODUCTVER=%%a - :: Remove double quotes - set PRODUCTVER=%PRODUCTVER:"=% - @echo ProductVersion="%PRODUCTVER%" || exit /b !errorlevel! - msbuild -m cppcheck.wixproj -p:Platform=x64,ProductVersion=%PRODUCTVER%.${{ github.run_number }} || exit /b !errorlevel! - - - uses: actions/upload-artifact@v4 - with: - name: installer - path: win_installer/Build/ - - - name: Clean up deploy - run: | - @echo on - :: del win_installer\files\addons\*.dll || exit /b !errorlevel! - del win_installer\files\addons\*.doxyfile || exit /b !errorlevel! - del win_installer\files\addons\*.md || exit /b !errorlevel! - :: del win_installer\files\addons\*.pyd || exit /b !errorlevel! - :: del win_installer\files\addons\base_library.zip || exit /b !errorlevel! - rmdir /s /q win_installer\files\addons\test || exit /b !errorlevel! - rmdir /s /q win_installer\files\addons\doc || exit /b !errorlevel! - :: rmdir /s /q win_installer\files\bearer || exit /b !errorlevel! - rmdir /s /q win_installer\files\generic || exit /b !errorlevel! - rmdir /s /q win_installer\files\help || exit /b !errorlevel! - rmdir /s /q win_installer\files\iconengines || exit /b !errorlevel! - rmdir /s /q win_installer\files\imageformats || exit /b !errorlevel! - rmdir /s /q win_installer\files\networkinformation || exit /b !errorlevel! - :: rmdir /s /q win_installer\files\printsupport || exit /b !errorlevel! - rmdir /s /q win_installer\files\sqldrivers || exit /b !errorlevel! - rmdir /s /q win_installer\files\tls || exit /b !errorlevel! - ren win_installer\files\translations lang || exit /b !errorlevel! - del win_installer\files\d3dcompiler_47.dll || exit /b !errorlevel! - del win_installer\files\dxcompiler.dll || exit /b !errorlevel! - del win_installer\files\dxil.dll || exit /b !errorlevel! - del win_installer\files\dmake.exe || exit /b !errorlevel! - del win_installer\files\dmake.pdb || exit /b !errorlevel! - :: del win_installer\files\libEGL.dll || exit /b !errorlevel! - :: del win_installer\files\libGLESv2.dll || exit /b !errorlevel! - del win_installer\files\opengl32sw.dll || exit /b !errorlevel! - del win_installer\files\Qt6Svg.dll || exit /b !errorlevel! - del win_installer\files\vc_redist.x64.exe || exit /b !errorlevel! - - - uses: actions/upload-artifact@v4 - with: - name: portable - path: win_installer\files diff --git a/.github/workflows/selfcheck.yml b/.github/workflows/selfcheck.yml deleted file mode 100644 index ec52b15f939..00000000000 --- a/.github/workflows/selfcheck.yml +++ /dev/null @@ -1,204 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: selfcheck - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - QT_VERSION: 6.10.0 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ runner.os }} - - - name: Install missing software - run: | - sudo apt-get update - sudo apt-get install clang-14 - sudo apt-get install libboost-container-dev - sudo apt-get install valgrind - sudo apt-get install -y libgl-dev # fixes missing dependency for Qt in CMake - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - install-deps: false - cache: true - - # TODO: cache this - perform same build as for the other self check - - name: Self check (build) - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - # valgrind cannot handle DWARF 5 yet so force version 4 - # work around performance regression with -inline-deferral - make -j$(nproc) CXXOPTS="-Werror -O2 -gdwarf-4" CPPOPTS="-DHAVE_BOOST -mllvm -inline-deferral" MATCHCOMPILER=yes CPPCHK_GLIBCXX_DEBUG= - env: - CC: clang-14 - CXX: clang++-14 - - # unusedFunction - start - - name: CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=ON -DWITH_QCHART=ON -DBUILD_TRIAGE=On -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCPPCHK_GLIBCXX_DEBUG=Off - - - name: Generate dependencies - run: | - # make sure auto-generated GUI files exist - make -C cmake.output autogen - # make sure the precompiled headers exist - make -C cmake.output lib/CMakeFiles/cppcheck-core.dir/cmake_pch.hxx.cxx - make -C cmake.output test/CMakeFiles/testrunner.dir/cmake_pch.hxx.cxx - # make sure the auto-generated GUI dependencies exist - make -C cmake.output gui-build-deps - - - name: Self check (unusedFunction) - if: false # TODO: fails with preprocessorErrorDirective - see #10667 - run: | - ./cppcheck -q --template=selfcheck --error-exitcode=1 --library=cppcheck-lib --library=qt -D__CPPCHECK__ -D__GNUC__ -DQT_VERSION=0x060000 -DQ_MOC_OUTPUT_REVISION=69 -DQT_CHARTS_LIB -DQT_MOC_HAS_STRINGDATA --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr - env: - DISABLE_VALUEFLOW: 1 - UNUSEDFUNCTION_ONLY: 1 - # unusedFunction - end - - # the following steps are duplicated from above since setting up the build node in a parallel step takes longer than the actual steps - - # unusedFunction notest - start - - name: CMake (no test) - run: | - cmake -S . -B cmake.output.notest -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=Off -DBUILD_GUI=ON -DBUILD_TRIAGE=On -DWITH_QCHART=ON -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCPPCHK_GLIBCXX_DEBUG=Off - - - name: Generate dependencies (no test) - run: | - # make sure auto-generated GUI files exist - make -C cmake.output.notest autogen - # make sure the precompiled headers exist - make -C cmake.output.notest lib/CMakeFiles/cppcheck-core.dir/cmake_pch.hxx.cxx - # make sure the auto-generated GUI dependencies exist - make -C cmake.output.notest gui-build-deps - - - name: Self check (unusedFunction / no test) - run: | - ./cppcheck -q --template=selfcheck --error-exitcode=1 --library=cppcheck-lib --library=qt -D__CPPCHECK__ -D__GNUC__ -DQT_VERSION=0x060000 -DQ_MOC_OUTPUT_REVISION=69 -DQT_CHARTS_LIB -DQT_MOC_HAS_STRINGDATA --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output.notest/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr - env: - DISABLE_VALUEFLOW: 1 - UNUSEDFUNCTION_ONLY: 1 - # unusedFunction notest - end - - # unusedFunction notest nogui - start - - name: CMake (no test / no gui) - run: | - cmake -S . -B cmake.output.notest_nogui -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=Off -DENABLE_CHECK_INTERNAL=On -DCPPCHK_GLIBCXX_DEBUG=Off - - - name: Generate dependencies (no test / no gui) - run: | - # make sure the precompiled headers exist - make -C cmake.output.notest_nogui lib/CMakeFiles/cppcheck-core.dir/cmake_pch.hxx.cxx - - - name: Self check (unusedFunction / no test / no gui) - run: | - supprs="--suppress=unusedFunction:lib/errorlogger.h:197 --suppress=unusedFunction:lib/importproject.cpp:1531 --suppress=unusedFunction:lib/importproject.cpp:1555" - ./cppcheck -q --template=selfcheck --error-exitcode=1 --library=cppcheck-lib -D__CPPCHECK__ -D__GNUC__ --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output.notest_nogui/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr $supprs - env: - DISABLE_VALUEFLOW: 1 - UNUSEDFUNCTION_ONLY: 1 - # unusedFunction notest nogui - end - - # unusedFunction notest nocli - start - - name: CMake (no test / no cli) - run: | - cmake -S . -B cmake.output.notest_nocli -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=Off -DBUILD_CLI=Off -DBUILD_GUI=ON -DWITH_QCHART=ON -DBUILD_TRIAGE=On -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCPPCHK_GLIBCXX_DEBUG=Off - - - name: Generate dependencies (no test / no cli) - run: | - # make sure auto-generated GUI files exist - make -C cmake.output.notest_nocli autogen - # make sure the precompiled headers exist - make -C cmake.output.notest_nocli lib/CMakeFiles/cppcheck-core.dir/cmake_pch.hxx.cxx - # make sure the auto-generated GUI dependencies exist - make -C cmake.output.notest_nocli gui-build-deps - - - name: Self check (unusedFunction / no test / no cli) - if: false # TODO: the findings are currently too intrusive - run: | - ./cppcheck -q --template=selfcheck --error-exitcode=1 --library=cppcheck-lib --library=qt -D__CPPCHECK__ -D__GNUC__ -DQT_VERSION=0x060000 -DQ_MOC_OUTPUT_REVISION=69 -DQT_CHARTS_LIB -DQT_MOC_HAS_STRINGDATA --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output.notest_nocli/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr - env: - DISABLE_VALUEFLOW: 1 - UNUSEDFUNCTION_ONLY: 1 - # unusedFunction notest nocli - end - - # unusedFunction notest nocli nogui - start - - name: CMake (no test / no cli / no gui) - run: | - cmake -S . -B cmake.output.notest_nocli_nogui -Werror=dev -DHAVE_RULES=On -DBUILD_TESTING=Off -DBUILD_CLI=Off -DBUILD_GUI=Off -DENABLE_CHECK_INTERNAL=On -DCPPCHK_GLIBCXX_DEBUG=Off - - - name: Generate dependencies (no test / no cli / no gui) - run: | - # make sure the precompiled headers exist - make -C cmake.output.notest_nocli_nogui lib/CMakeFiles/cppcheck-core.dir/cmake_pch.hxx.cxx - - - name: Self check (unusedFunction / no test / no cli / no gui) - if: false # TODO: the findings are currently too intrusive - run: | - ./cppcheck -q --template=selfcheck --error-exitcode=1 --library=cppcheck-lib --library=qt -D__CPPCHECK__ -D__GNUC__ --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output.notest_nocli_nogui/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr - env: - DISABLE_VALUEFLOW: 1 - UNUSEDFUNCTION_ONLY: 1 - # unusedFunction notest nocli nogui - end - - - name: Fetch corpus - run: | - wget https://github.com/danmar/cppcheck/archive/refs/tags/2.8.tar.gz - tar xvf 2.8.tar.gz - - - name: CMake (corpus / no test) - run: | - cmake -S cppcheck-2.8 -B cmake.output.corpus -DHAVE_RULES=On -DBUILD_TESTING=Off -DBUILD_GUI=ON -DUSE_QT6=On -DWITH_QCHART=ON -DENABLE_CHECK_INTERNAL=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_POLICY_VERSION_MINIMUM=3.5 - - - name: Generate dependencies (corpus) - run: | - # make sure auto-generated GUI files exist - make -C cmake.output.corpus autogen - # make sure the precompiled headers exist - make -C cmake.output.corpus lib/CMakeFiles/lib_objs.dir/cmake_pch.hxx.cxx - # make sure the auto-generated GUI dependencies exist - make -C cmake.output.corpus gui-build-deps - - - name: Self check (unusedFunction / corpus / no test / callgrind) - run: | - # TODO: fix -rp so the suppressions actually work - valgrind --tool=callgrind ./cppcheck --template=selfcheck --error-exitcode=0 --library=cppcheck-lib --library=qt -D__GNUC__ -DQT_VERSION=0x060000 -DQ_MOC_OUTPUT_REVISION=69 -DQT_CHARTS_LIB -DQT_MOC_HAS_STRINGDATA --enable=unusedFunction,information --exception-handling -rp=. --project=cmake.output.corpus/compile_commands.json --suppressions-list=.selfcheck_unused_suppressions --inline-suppr 2>callgrind.log || (cat callgrind.log && false) - cat callgrind.log - callgrind_annotate --auto=no > callgrind.annotated.log - head -50 callgrind.annotated.log - env: - DISABLE_VALUEFLOW: 1 - - - uses: actions/upload-artifact@v4 - with: - name: Callgrind Output - path: ./callgrind.* diff --git a/.github/workflows/tsan.yml b/.github/workflows/tsan.yml deleted file mode 100644 index 72b1764d11d..00000000000 --- a/.github/workflows/tsan.yml +++ /dev/null @@ -1,146 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: thread sanitizer - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - QT_VERSION: 6.10.0 - TSAN_OPTIONS: halt_on_error=1 - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.14' - check-latest: true - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y cmake make libpcre3-dev libboost-container-dev libxml2-utils - sudo apt-get install -y libcups2-dev # required for Qt6PrintSupport in CMake since Qt 6.7.3 - - - name: Install clang - run: | - sudo apt-get purge --auto-remove llvm python3-lldb-14 llvm-14 - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - cache: true - - - name: Install missing Python packages - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pytest - python3 -m pip install pytest-timeout - python3 -m pip install pytest-xdist - python3 -m pip install psutil - - - name: CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_MATCHCOMPILER=Verify -DANALYZE_THREAD=On -DENABLE_CHECK_INTERNAL=On -DUSE_BOOST=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=Off -DDISABLE_DMAKE=On -DFILESDIR= -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - env: - CC: clang-22 - CXX: clang++-22 - - - name: Build cppcheck - run: | - cmake --build cmake.output --target cppcheck -- -j $(nproc) - - - name: Build test - run: | - cmake --build cmake.output --target testrunner -- -j $(nproc) - - - name: Build GUI tests - run: | - cmake --build cmake.output --target gui-tests -- -j $(nproc) - - - name: Run tests - run: ./cmake.output/bin/testrunner - - - name: Run cfg tests - run: | - cmake --build cmake.output --target checkcfg -- -j $(nproc) - - - name: Run CTest - if: false # TODO: test-filelist fails with data race in pthread_cond_destroy - run: | - ctest --test-dir cmake.output --output-on-failure -j$(nproc) - - - name: Run test/cli - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_EXECUTOR: thread - - - name: Run test/cli (-j2) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_J: 2 - - - name: Run test/cli (--clang) - if: false - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_CLANG: clang - - - name: Run test/cli (--cppcheck-build-dir) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_BUILDDIR: injected - - - name: Generate dependencies - if: false - run: | - # make sure auto-generated GUI files exist - make -C cmake.output autogen - make -C cmake.output gui-build-deps triage-build-ui-deps - - # TODO: disabled for now as it takes around 40 minutes to finish - # set --error-exitcode=0 so we only fail on sanitizer issues - since it uses threads for execution it will exit the whole process on the first issue - - name: Self check - if: false - run: | - ./selfcheck_san.sh ./cmake.output "--executor=thread --error-exitcode=0" diff --git a/.github/workflows/ubsan.yml b/.github/workflows/ubsan.yml deleted file mode 100644 index 5afc5feb1f9..00000000000 --- a/.github/workflows/ubsan.yml +++ /dev/null @@ -1,140 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: undefined behaviour sanitizers - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - env: - QT_VERSION: 6.10.0 - UBSAN_OPTIONS: print_stacktrace=1:halt_on_error=1:report_error_type=1 - # TODO: figure out why there are cache misses with PCH enabled - CCACHE_SLOPPINESS: pch_defines,time_macros - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ github.job }}-${{ matrix.os }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.14' - check-latest: true - - - name: Install missing software on ubuntu - run: | - sudo apt-get update - sudo apt-get install -y cmake make libpcre3-dev libboost-container-dev libxml2-utils - sudo apt-get install -y libcups2-dev # required for Qt6PrintSupport in CMake since Qt 6.7.3 - - - name: Install clang - run: | - sudo apt-get purge --auto-remove llvm python3-lldb-14 llvm-14 - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - - - name: Install Qt ${{ env.QT_VERSION }} - uses: jurplel/install-qt-action@v4 - with: - version: ${{ env.QT_VERSION }} - modules: 'qtcharts' - setup-python: 'false' - cache: true - - - name: Install missing Python packages - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pytest - python3 -m pip install pytest-timeout - python3 -m pip install pytest-xdist - python3 -m pip install psutil - - - name: CMake - run: | - cmake -S . -B cmake.output -Werror=dev -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHAVE_RULES=On -DBUILD_TESTING=On -DBUILD_GUI=On -DWITH_QCHART=On -DBUILD_TRIAGE=On -DUSE_MATCHCOMPILER=Verify -DANALYZE_UNDEFINED=On -DENABLE_CHECK_INTERNAL=On -DUSE_BOOST=On -DCPPCHK_GLIBCXX_DEBUG=Off -DCMAKE_DISABLE_PRECOMPILE_HEADERS=On -DCMAKE_GLOBAL_AUTOGEN_TARGET=On -DDISABLE_DMAKE=On -DFILESDIR= -DCMAKE_COMPILE_WARNING_AS_ERROR=On -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - env: - CC: clang-22 - CXX: clang++-22 - - - name: Build cppcheck - run: | - cmake --build cmake.output --target cppcheck -- -j $(nproc) - - - name: Build test - run: | - cmake --build cmake.output --target testrunner -- -j $(nproc) - - - name: Build GUI tests - run: | - cmake --build cmake.output --target gui-tests -- -j $(nproc) - - - name: Run tests - run: ./cmake.output/bin/testrunner - - - name: Run cfg tests - run: | - cmake --build cmake.output --target checkcfg -- -j $(nproc) - - - name: Run CTest - run: | - ctest --test-dir cmake.output --output-on-failure -j$(nproc) - - - name: Run test/cli - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - - - name: Run test/cli (-j2) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_J: 2 - - - name: Run test/cli (--clang) - if: false - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_CLANG: clang - - - name: Run test/cli (--cppcheck-build-dir) - run: | - pwd=$(pwd) - TEST_CPPCHECK_EXE_LOOKUP_PATH="$pwd/cmake.output" python3 -m pytest -Werror --strict-markers -vv -n auto test/cli - env: - TEST_CPPCHECK_INJECT_BUILDDIR: injected - - - name: Generate dependencies - run: | - # make sure auto-generated GUI files exist - make -C cmake.output autogen - make -C cmake.output gui-build-deps triage-build-ui-deps - - # TODO: only fail the step on sanitizer issues - since we use processes it will only fail the underlying process which will result in an cppcheckError - - name: Self check - run: | - ./selfcheck_san.sh ./cmake.output diff --git a/.github/workflows/valgrind.yml b/.github/workflows/valgrind.yml deleted file mode 100644 index e0192351906..00000000000 --- a/.github/workflows/valgrind.yml +++ /dev/null @@ -1,65 +0,0 @@ -# Syntax reference https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions -# Environment reference https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners -name: valgrind - -on: - push: - branches: - - 'main' - - 'releases/**' - - '2.*' - tags: - - '2.*' - pull_request: - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ${{ github.workflow }}-${{ runner.os }} - - - name: Install missing software - run: | - sudo apt-get update - sudo apt-get install libxml2-utils - sudo apt-get install valgrind - sudo apt-get install libboost-container-dev - sudo apt-get install debuginfod - - - name: Build cppcheck - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror -O1 -g" CPPOPTS="-DHAVE_BOOST" HAVE_RULES=yes MATCHCOMPILER=yes CPPCHK_GLIBCXX_DEBUG= - - - name: Build test - run: | - export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" - make -j$(nproc) CXXOPTS="-Werror -O1 -g" CPPOPTS="-DHAVE_BOOST" HAVE_RULES=yes MATCHCOMPILER=yes CPPCHK_GLIBCXX_DEBUG= testrunner - - - name: Run valgrind - run: | - ec=0 - valgrind --error-limit=yes --leak-check=full --num-callers=50 --show-reachable=yes --track-origins=yes --suppressions=valgrind/testrunner.supp --gen-suppressions=all -s --log-fd=9 --error-exitcode=42 ./testrunner TestGarbage TestOther TestSimplifyTemplate TestRegEx 9>memcheck.log || ec=1 - cat memcheck.log - exit $ec - # TODO: debuginfod.ubuntu.com is currently not responding to any requests causing it to run into a 40(!) minute timeout - #env: - # DEBUGINFOD_URLS: https://debuginfod.ubuntu.com - - - uses: actions/upload-artifact@v4 - if: success() || failure() - with: - name: Logs - path: ./*.log From b78e3e6f1e5d6a8186aaf93d489ff38008bb0f65 Mon Sep 17 00:00:00 2001 From: firewave Date: Mon, 30 Mar 2026 12:59:39 +0200 Subject: [PATCH 2/3] scriptcheck: run `pyupgrade` [skip ci] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Tomasz Kłoczko --- .github/workflows/scriptcheck.yml | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/.github/workflows/scriptcheck.yml b/.github/workflows/scriptcheck.yml index 844b1d5c2f3..046612fb292 100644 --- a/.github/workflows/scriptcheck.yml +++ b/.github/workflows/scriptcheck.yml @@ -80,14 +80,9 @@ jobs: - name: Install missing software on ubuntu (Python 3) run: | - # shellcheck cannot be installed via pip - # ERROR: Could not find a version that satisfies the requirement shellcheck (from versions: none) - # ERROR: No matching distribution found for shellcheck - sudo apt-get install shellcheck python -m pip install pip --upgrade python -m pip install natsort python -m pip install pexpect - python -m pip install 'pylint<4.1.0' python -m pip install unittest2 python -m pip install pytest python -m pip install pytest-xdist @@ -96,6 +91,16 @@ jobs: python -m pip install psutil python -m pip install setuptools + - name: Install missing software on ubuntu (Python 3 / latest) + if: matrix.python-latest + run: | + # shellcheck cannot be installed via pip + # ERROR: Could not find a version that satisfies the requirement shellcheck (from versions: none) + # ERROR: No matching distribution found for shellcheck + sudo apt-get install shellcheck + python -m pip install 'pylint<4.1.0' + python -m pip install pyupgrade + - name: run Shellcheck if: matrix.python-latest run: | @@ -107,6 +112,12 @@ jobs: shopt -s globstar pylint --jobs $(nproc) --py-version 3.7 addons/**/*.py htmlreport/cppcheck-htmlreport htmlreport/**/*.py test/**/*.py tools/**/*.py + - name: run pyupgrade + if: matrix.python-latest + run: | + pyupgrade --py38-plus --exit-zero-even-if-changed addons/**/*.py htmlreport/cppcheck-htmlreport htmlreport/**/*.py test/**/*.py tools/**/*.py + git diff --exit-code + - name: check .json files if: matrix.python-latest run: | From c139cd71173be628919378e79ee02f75d1e01cde Mon Sep 17 00:00:00 2001 From: firewave Date: Mon, 30 Mar 2026 13:06:35 +0200 Subject: [PATCH 3/3] pyugrade [skip ci] --- addons/cppcheck.py | 3 +- addons/cppcheckdata.py | 38 +- addons/misra.py | 31 +- addons/misra_9.py | 2 +- addons/naming.py | 4 +- addons/namingng.py | 4 +- addons/threadsafety.py | 2 +- addons/y2038.py | 3 +- htmlreport/cppcheck-htmlreport | 35 +- test/cli/clang-import_test.py | 23 +- test/cli/dumpfile_test.py | 14 +- test/cli/helloworld_test.py | 11 +- test/cli/inline-suppress-polyspace_test.py | 9 +- test/cli/inline-suppress_test.py | 63 +- test/cli/lookup_test.py | 348 +++++----- test/cli/metrics_test.py | 2 +- test/cli/more-projects_test.py | 230 +++---- test/cli/other_test.py | 721 ++++++++++----------- test/cli/performance_test.py | 23 +- test/cli/premium_test.py | 29 +- test/cli/proj2_test.py | 3 +- test/cli/project_test.py | 8 +- test/cli/qml_test.py | 9 +- test/cli/rules_test.py | 50 +- test/cli/sarif_test.py | 6 +- test/cli/suppress-syntaxError_test.py | 1 - test/cli/testutils.py | 4 +- test/cli/unused_function_test.py | 35 +- test/cli/whole-program_test.py | 62 +- test/scripts/extracttests.py | 4 +- test/seh/test-sehwrapper.py | 2 +- test/signal/test-signalhandler.py | 2 +- test/signal/test-stacktrace.py | 2 +- tools/MT-Unsafe.py | 4 +- tools/bisect/bisect_common.py | 6 +- tools/bisect/bisect_hang.py | 8 +- tools/bisect/bisect_res.py | 2 +- tools/compare-valueflow-options.py | 10 +- tools/compare_ast_symdb.py | 14 +- tools/creduce.py | 8 +- tools/daca2-download.py | 4 +- tools/daca2-getpackages.py | 2 +- tools/donate-cpu-server.py | 128 ++-- tools/donate-cpu.py | 32 +- tools/donate_cpu_lib.py | 52 +- tools/get_checkers.py | 12 +- tools/listErrorsWithoutCWE.py | 1 - tools/matchcompiler.py | 8 +- tools/parse-glibc.py | 6 +- tools/reduce.py | 8 +- tools/test-my-pr.py | 8 +- tools/trac-keywords.py | 1 - tools/triage_py/triage_version.py | 24 +- 53 files changed, 1052 insertions(+), 1069 deletions(-) diff --git a/addons/cppcheck.py b/addons/cppcheck.py index 735269d32c5..ed780d24d48 100644 --- a/addons/cppcheck.py +++ b/addons/cppcheck.py @@ -1,4 +1,3 @@ - import cppcheckdata import sys import os @@ -35,7 +34,7 @@ def runcheckers(): for cfg in data.iterconfigurations(): if not args.quiet: - print('Checking %s, config %s...' % (dumpfile, cfg.name)) + print('Checking {}, config {}...'.format(dumpfile, cfg.name)) for c in __checkers__: __errorid__ = c.__name__ c(cfg, data) diff --git a/addons/cppcheckdata.py b/addons/cppcheckdata.py index 06028e2005c..22ffcab5e8a 100755 --- a/addons/cppcheckdata.py +++ b/addons/cppcheckdata.py @@ -28,7 +28,7 @@ 'extra': ''} sys.stdout.write(json.dumps(msg) + '\n') else: - sys.stderr.write('%s [%s]\n' % (message, error_id)) + sys.stderr.write('{} [{}]\n'.format(message, error_id)) sys.exit(1) from xml.etree import ElementTree @@ -91,7 +91,7 @@ def __repr__(self): attrs = ["str", "file", "linenr"] return "{}({})".format( "Directive", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) class MacroUsage: @@ -127,7 +127,7 @@ def __repr__(self): attrs = ["name", "file", "linenr", "column", "usefile", "useline", "usecolumn", "isKnownValue"] return "{}({})".format( "MacroUsage", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -156,7 +156,7 @@ def __repr__(self): attrs = ["file", "linenr", "column", "E", "result"] return "{}({})".format( "PreprocessorIfCondition", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) class ValueType: @@ -203,7 +203,7 @@ def __repr__(self): "constness", "pointer"] return "{}({})".format( "ValueType", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -450,7 +450,7 @@ def __repr__(self): "linenr", "column"] return "{}({})".format( "Token", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def setId(self, IdMap): @@ -608,7 +608,7 @@ def __repr__(self): "nestedInId", "nestedIn", "type", "definedType", "isExecutable", "functions"] return "{}({})".format( "Scope", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def setId(self, IdMap): @@ -688,7 +688,7 @@ def __repr__(self): "isAttributeNoreturn", "overriddenFunction", "nestedIn", "argumentId"] return "{}({})".format( "Function", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def setId(self, IdMap): @@ -783,7 +783,7 @@ def __repr__(self): "isReference", "isStatic", "isVolatile", "constness"] return "{}({})".format( "Variable", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def setId(self, IdMap): @@ -911,7 +911,7 @@ def __repr__(self): "bufferSize", "containerSize", "condition", "valueKind"] return "{}({})".format( "Value", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -938,7 +938,7 @@ def __repr__(self): attrs = ["Id", "values"] return "{}({})".format( "ValueFlow", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -978,7 +978,7 @@ def __repr__(self): attrs = ["errorId", "fileName", "lineNumber", "symbolName", "lineBegin", "lineEnd","suppressionType"] return "{}({})".format( "Suppression", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def isMatch(self, file, line, message, errorId): @@ -1144,7 +1144,7 @@ def __repr__(self): "long_bit", "long_long_bit", "pointer_bit"] return "{}({})".format( "Platform", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -1176,7 +1176,7 @@ def __repr__(self): attrs = ["c", "cpp", "posix"] return "{}({})".format( "Standards", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -1421,7 +1421,7 @@ def __repr__(self): attrs = ["configurations", "platform"] return "{}({})".format( "CppcheckData", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -1522,7 +1522,7 @@ def get_files(args): """Return dump_files, ctu_info_files""" all_files = args.dumpfile if args.file_list: - with open(args.file_list, 'rt') as f: + with open(args.file_list) as f: for line in f.readlines(): all_files.append(line.rstrip()) dump_files = [] @@ -1686,12 +1686,12 @@ def reportError(location, severity, message, addon, errorId, extra='', columnOve 'extra': extra} sys.stdout.write(json.dumps(msg) + '\n') else: - if is_suppressed(location, message, '%s-%s' % (addon, errorId)): + if is_suppressed(location, message, '{}-{}'.format(addon, errorId)): return loc = '[%s:%i]' % (location.file, location.linenr) if len(extra) > 0: message += ' (' + extra + ')' - sys.stderr.write('%s (%s) %s [%s-%s]\n' % (loc, severity, message, addon, errorId)) + sys.stderr.write('{} ({}) {} [{}-{}]\n'.format(loc, severity, message, addon, errorId)) global EXIT_CODE EXIT_CODE = 1 @@ -1702,7 +1702,7 @@ def reportSummary(dumpfile, summary_type, summary_data): else: # dumpfile ends with ".dump" ctu_info_file = dumpfile[:-4] + "ctu-info" - with open(ctu_info_file, 'at') as f: + with open(ctu_info_file, 'a') as f: f.write(json.dumps(msg) + '\n') diff --git a/addons/misra.py b/addons/misra.py index 45332d58681..25ed36ee96d 100755 --- a/addons/misra.py +++ b/addons/misra.py @@ -13,7 +13,6 @@ # # Total number of rules: 143 -from __future__ import print_function import cppcheckdata import itertools @@ -694,9 +693,9 @@ def get_essential_type_from_value(value, is_signed): range_max = (1 << bits) - 1 sign = 'signed' if is_signed else 'unsigned' if is_signed and value < 0 and value >= range_min: - return '%s %s' % (sign, t) + return '{} {}'.format(sign, t) if value >= 0 and value <= range_max: - return '%s %s' % (sign, t) + return '{} {}'.format(sign, t) return None def getEssentialType(expr): @@ -707,7 +706,7 @@ def getEssentialType(expr): if expr.str[0] == "'" and expr.str[-1] == "'": if len(expr.str) == 3 or (len(expr.str) == 4 and expr.str[1] == '\\'): return 'char' - return '%s %s' % (expr.valueType.sign, expr.valueType.type) + return '{} {}'.format(expr.valueType.sign, expr.valueType.type) if expr.variable or isCast(expr): typeToken = expr.variable.typeStartToken if expr.variable else expr.next @@ -723,7 +722,7 @@ def getEssentialType(expr): if expr.valueType.isIntegral(): if (expr.valueType.sign is None) and expr.valueType.type == 'char': return 'char' - return '%s %s' % (expr.valueType.sign, expr.valueType.type) + return '{} {}'.format(expr.valueType.sign, expr.valueType.type) elif expr.isNumber: # Appendix D, D.6 The essential type of literal constants @@ -734,7 +733,7 @@ def getEssentialType(expr): return expr.valueType.type if expr.valueType.isIntegral(): if expr.valueType.type != 'int': - return '%s %s' % (expr.valueType.sign, expr.valueType.type) + return '{} {}'.format(expr.valueType.sign, expr.valueType.type) return get_essential_type_from_value(expr.getKnownIntValue(), expr.valueType.sign == 'signed') elif expr.str in ('<', '<=', '>=', '>', '==', '!=', '&&', '||', '!'): @@ -1334,7 +1333,7 @@ def __repr__(self): attrs = ["name", "args", "expansionList"] return "{}({})".format( "Define", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -1511,7 +1510,7 @@ def __repr__(self): attrs = ["verify", "quiet", "show_summary", "verify"] return "{}({})".format( "MisraSettings", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @@ -1573,7 +1572,7 @@ def __repr__(self): "suppressionStats", "stdversion", "severity"] return "{}({})".format( "MisraChecker", - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) def get_num_significant_naming_chars(self, cfg): @@ -3518,7 +3517,7 @@ def misra_config(self, data): has_var = True continue unknown_constant = True - self.report_config_error(tok, 'Unknown constant {}, please review configuration'.format(t.str)) + self.report_config_error(tok, f'Unknown constant {t.str}, please review configuration') if t.isArithmeticalOp: tokens += [t.astOperand1, t.astOperand2] if not unknown_constant and not has_var: @@ -4505,7 +4504,7 @@ def reportError(self, location, num1, num2): if self.severity: cppcheck_severity = self.severity - this_violation = '{}-{}-{}-{}'.format(location.file, location.linenr, location.column, ruleNum) + this_violation = f'{location.file}-{location.linenr}-{location.column}-{ruleNum}' # If this is new violation then record it and show it. If not then # skip it since it has already been displayed. @@ -4531,7 +4530,7 @@ def loadRuleTexts(self, filename): encodings = ['ascii', 'utf-8', 'windows-1250', 'windows-1252'] for e in encodings: try: - file_stream = open(filename, 'r', encoding=e) + file_stream = open(filename, encoding=e) file_stream.readlines() file_stream.seek(0) except UnicodeDecodeError: @@ -4544,10 +4543,10 @@ def loadRuleTexts(self, filename): print('If you know the codec please report it to the developers so the list can be enhanced.') print('Trying with default codec now and ignoring errors if possible ...') try: - file_stream = open(filename, 'rt', errors='ignore') + file_stream = open(filename, errors='ignore') except TypeError: # Python 2 does not support the errors parameter - file_stream = open(filename, 'rt') + file_stream = open(filename) rule = None rule_line_number = 0 @@ -4697,7 +4696,7 @@ def fillVerifyExpected(verify_expected, tok): for cfgNumber, cfg in enumerate(data.iterconfigurations()): if not self.settings.quiet: - self.printStatus('Checking %s, config %s...' % (dumpfile, cfg.name)) + self.printStatus('Checking {}, config {}...'.format(dumpfile, cfg.name)) self.executeCheck(102, self.misra_1_2, cfg) if not path_premium_addon: @@ -4880,7 +4879,7 @@ def is_different_file(loc1, loc2): try: for filename in ctu_info_files: - for line in open(filename, 'rt'): + for line in open(filename): s = self.read_ctu_info_line(line) if s is None: continue diff --git a/addons/misra_9.py b/addons/misra_9.py index 1ca1b7ce3e0..4bbd8d52746 100644 --- a/addons/misra_9.py +++ b/addons/misra_9.py @@ -33,7 +33,7 @@ def __repr__(self): "ElementDef", self.getLongName(), inits, - ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) + ", ".join(f"{a}={repr(getattr(self, a))}" for a in attrs) ) @property diff --git a/addons/naming.py b/addons/naming.py index 2893779f2e0..c11d524f356 100755 --- a/addons/naming.py +++ b/addons/naming.py @@ -16,7 +16,7 @@ def validate_regex(expr): try: re.compile(expr) except re.error: - print('Error: "{}" is not a valid regular expression.'.format(expr)) + print(f'Error: "{expr}" is not a valid regular expression.') sys.exit(1) @@ -51,7 +51,7 @@ def reportError(token, severity, msg, errorId): data = cppcheckdata.CppcheckData(arg) for cfg in data.iterconfigurations(): - print('Checking %s, config %s...' % (arg, cfg.name)) + print('Checking {}, config {}...'.format(arg, cfg.name)) if RE_VARNAME: for var in cfg.variables: if var.access == 'Private': diff --git a/addons/namingng.py b/addons/namingng.py index 9da177321ef..d0e050cc9b4 100755 --- a/addons/namingng.py +++ b/addons/namingng.py @@ -317,7 +317,7 @@ def check_variable_naming(conf,cfg,debugprint): print("Sign: " + str(var.nameToken.valueType.sign)) print("variable type: " + varType) print("\n") - print("\t-- {} {}".format(varType, str(var.nameToken.str))) + print(f"\t-- {varType} {str(var.nameToken.str)}") if conf.skip_one_char_variables and len(var.nameToken.str) == 1: continue @@ -355,7 +355,7 @@ def check_function_naming(conf,cfg,debugprint): prev = prev.previous retval = prev.str + retval if debugprint: - print("\t:: {} {}".format(retval, token.function.name)) + print(f"\t:: {retval} {token.function.name}") if retval and retval in conf.function_prefixes: if not token.function.name.startswith(conf.function_prefixes[retval]): diff --git a/addons/threadsafety.py b/addons/threadsafety.py index 9475e1a5b0d..10686822260 100755 --- a/addons/threadsafety.py +++ b/addons/threadsafety.py @@ -343,7 +343,7 @@ def check_MTunsafe(cfg): for cfg in data.iterconfigurations(): if not args.quiet: srcfile = data.files[0] - print('Checking %s, config %s...' % (srcfile, cfg.name)) + print('Checking {}, config {}...'.format(srcfile, cfg.name)) check_MTunsafe(cfg) checkstatic(cfg) diff --git a/addons/y2038.py b/addons/y2038.py index b17d41dd330..a2feae5ca08 100755 --- a/addons/y2038.py +++ b/addons/y2038.py @@ -31,7 +31,6 @@ # $ cppcheck --dump file.c && python3 y2038.py file.c.dump # -from __future__ import print_function import cppcheckdata import sys @@ -342,7 +341,7 @@ def check_y2038_safe(dumpfile, quiet=False): for cfg in data.iterconfigurations(): if not quiet: - print('Checking %s, config %s...' % (srcfile, cfg.name)) + print('Checking {}, config {}...'.format(srcfile, cfg.name)) safe_ranges = [] safe = -1 time_bits_defined = False diff --git a/htmlreport/cppcheck-htmlreport b/htmlreport/cppcheck-htmlreport index 5044443a0a9..f1086543761 100755 --- a/htmlreport/cppcheck-htmlreport +++ b/htmlreport/cppcheck-htmlreport @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -from __future__ import unicode_literals from datetime import date import argparse @@ -487,7 +486,7 @@ def git_blame(errors, path, file, blame_options): if last_line == 0: return {} - first_line = next((error for error in errors if error['line'] > 0))['line'] + first_line = next(error for error in errors if error['line'] > 0)['line'] full_path = os.path.join(path, file) path, filename = os.path.split(full_path) @@ -562,29 +561,29 @@ def tr_str(td_th, line, id, cwe, severity, classification, guideline, message, t else: items.insert(0,line) for item in items: - ret += '<%s>%s' % (td_th, item, td_th) + ret += '<{}>{}'.format(td_th, item, td_th) if message_class: message_attribute = ' class="%s"' % message_class else: message_attribute = '' - ret += '<%s%s>%s' % (td_th, message_attribute, html_escape(message), td_th) + ret += '<{}{}>{}'.format(td_th, message_attribute, html_escape(message), td_th) if timestamp: - ret += '<%s>%s' % (td_th, timestamp, td_th) + ret += '<{}>{}'.format(td_th, timestamp, td_th) for field in add_author: if field == 'name': - ret += '<%s>%s' % (td_th, html_escape(author), td_th) + ret += '<{}>{}'.format(td_th, html_escape(author), td_th) elif field == 'email': - ret += '<%s>%s' % (td_th, html_escape(author_mail), td_th) + ret += '<{}>{}'.format(td_th, html_escape(author_mail), td_th) elif field == 'date': - ret += '<%s>%s' % (td_th, date, td_th) + ret += '<{}>{}'.format(td_th, date, td_th) if tr_class: tr_attributes = ' class="%s"' % tr_class else: tr_attributes = '' - return '%s' % (tr_attributes, ret) + return '{}'.format(tr_attributes, ret) def to_css_selector(tag): @@ -875,9 +874,9 @@ def main() -> None: source_filename = os.path.join(source_dir, filename) try: - with io.open(source_filename, 'r', encoding=options.source_encoding) as input_file: + with open(source_filename, encoding=options.source_encoding) as input_file: content = input_file.read() - except IOError: + except OSError: if error['id'] != 'unmatchedSuppression': sys.stderr.write("ERROR: Source file '%s' not found.\n" % source_filename) @@ -895,7 +894,7 @@ def main() -> None: encoding=options.source_encoding) htmlFormatter.errors = errors - with io.open(os.path.join(options.report_dir, htmlfile), 'w', encoding='utf-8') as output_file: + with open(os.path.join(options.report_dir, htmlfile), 'w', encoding='utf-8') as output_file: output_file.write(HTML_HEAD % (options.title, htmlFormatter.get_style_defs('.highlight'), @@ -934,7 +933,7 @@ def main() -> None: # all the errors created. print('Creating index.html') - with io.open(os.path.join(options.report_dir, 'index.html'), + with open(os.path.join(options.report_dir, 'index.html'), 'w') as output_file: stats_count = 0 @@ -1006,7 +1005,7 @@ def main() -> None: for filename, data in sorted(files.items()): file_error = filename in decode_errors or filename.endswith('*') is_file = filename != '' and not file_error - row_content = filename if file_error else "%s" % (data['htmlfile'], filename) + row_content = filename if file_error else "{}".format(data['htmlfile'], filename) htmlfile = data.get('htmlfile') if is_file else None output_file.write("\n ") @@ -1064,7 +1063,7 @@ def main() -> None: sys.stderr.write("\nConsider changing source-encoding (for example: \"htmlreport ... --source-encoding=\"iso8859-1\"\"\n") print('Creating style.css file') - with io.open(os.path.join(options.report_dir, 'style.css'), 'w') as css_file: + with open(os.path.join(options.report_dir, 'style.css'), 'w') as css_file: css_file.write(STYLE_FILE) print("Creating stats.html (statistics)\n") @@ -1082,7 +1081,7 @@ def main() -> None: # get top ten for each severity SEVERITIES = "error", "warning", "portability", "performance", "style", "unusedFunction", "information", "missingInclude", "internal" - with io.open(os.path.join(options.report_dir, 'stats.html'), 'w') as stats_file: + with open(os.path.join(options.report_dir, 'stats.html'), 'w') as stats_file: stats_file.write(HTML_HEAD % (options.title, '', options.title, ': Statistics')) stats_file.write(HTML_HEAD_END) @@ -1133,7 +1132,7 @@ def main() -> None: if options.checkers_report: print("Creating checkers.html (checkers report)\n") - with io.open(os.path.join(options.report_dir, 'checkers.html'), 'w') as checkers_file: + with open(os.path.join(options.report_dir, 'checkers.html'), 'w') as checkers_file: checkers_file.write(HTML_HEAD % (options.title, '', options.title, ': Checkers')) checkers_file.write(HTML_HEAD_END) @@ -1141,7 +1140,7 @@ def main() -> None: checkers_file.write(HTML_MENU.replace('id="menu"', 'id="menu_index"', 1).replace("Defects:", "Back to summary", 1) % ('')) checkers_file.write(HTML_MENU_END.replace("content", "content_index", 1)) - with io.open(options.checkers_report, 'r', encoding=options.source_encoding) as checkers_report: + with open(options.checkers_report, encoding=options.source_encoding) as checkers_report: content = checkers_report.read() checkers_file.write("
\n")
diff --git a/test/cli/clang-import_test.py b/test/cli/clang-import_test.py
index e4ec19a5ada..5b44e239be4 100644
--- a/test/cli/clang-import_test.py
+++ b/test/cli/clang-import_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest test-clang-import.py
 
 import os
@@ -117,12 +116,12 @@ def test_ast(tmpdir):
 
 def test_log(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     args = ['--clang', test_file]
     out_lines = [
-        'Checking {} ...'.format(test_file).replace('\\', '/'),
+        f'Checking {test_file} ...'.replace('\\', '/'),
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -130,7 +129,7 @@ def test_log(tmpdir):
 
 def test_warning(tmpdir):  # #12424
     test_file = os.path.join(tmpdir, 'test_2')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''void f() {}''')
 
     exitcode, stdout, stderr = cppcheck(['-q', '--enable=warning', '--clang', test_file])
@@ -141,7 +140,7 @@ def test_warning(tmpdir):  # #12424
 
 def __test_cmd(tmp_path, file_name, extra_args, stdout_exp_1, content=''):
     test_file = tmp_path / file_name
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(content)
 
     args = [
@@ -160,8 +159,8 @@ def __test_cmd(tmp_path, file_name, extra_args, stdout_exp_1, content=''):
     assert exitcode == 0, stderr if not stdout else stdout
     assert stderr == ''
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(file_name),
-        'clang -fsyntax-only -Xclang -ast-dump -fno-color-diagnostics {}{}'.format(stdout_exp_1, file_name)
+        f'Checking {file_name} ...',
+        f'clang -fsyntax-only -Xclang -ast-dump -fno-color-diagnostics {stdout_exp_1}{file_name}'
     ]
 
 
@@ -199,7 +198,7 @@ def test_cmd_def(tmp_path):
 
 def test_cmd_include(tmp_path):
     inc_file = tmp_path / 'inc.h'
-    with open(inc_file, 'wt'):
+    with open(inc_file, 'w'):
         pass
     __test_cmd(tmp_path, 'test.cpp',['--include=inc.h'], '-x c++ --include inc.h')
 
@@ -220,7 +219,7 @@ def test_cmd_std_c(tmp_path):  # #13129
 def test_cmd_std_c_builddir(tmp_path):  # #13129
     build_dir = tmp_path / 'b1'
     os.makedirs(build_dir)
-    __test_cmd(tmp_path, 'test.cpp',['--std=c89', '--std=c++14', '--cppcheck-build-dir={}'.format(build_dir)], '-x c++ -std=c++14')
+    __test_cmd(tmp_path, 'test.cpp',['--std=c89', '--std=c++14', f'--cppcheck-build-dir={build_dir}'], '-x c++ -std=c++14')
 
 
 def test_cmd_std_cpp(tmp_path):  # #13129
@@ -249,7 +248,7 @@ def test_cmd_std_cpp_enforce_alias(tmp_path):  # #13128/#13129/#13130
 
 def test_debug_clang_output(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """
 void f() {}
@@ -279,7 +278,7 @@ def test_debug_clang_output_failure_exitcode(tmp_path):
     # TranslationUnitDecl 0x6127d5d9d4e8 <> 
     # ...
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """void f()
 {
@@ -300,6 +299,6 @@ def test_debug_clang_output_failure_exitcode(tmp_path):
     stderr_lines = stderr.splitlines()
     assert len(stderr_lines) > 5, stderr_lines
     assert (stderr_lines[0] ==
-            "Failed to execute 'clang -fsyntax-only -Xclang -ast-dump -fno-color-diagnostics -x c {} 2>&1' - (exitcode: 1 / output: {}:3:12: error: indirection requires pointer operand ('int' invalid)".format(test_file, test_file))
+            f"Failed to execute 'clang -fsyntax-only -Xclang -ast-dump -fno-color-diagnostics -x c {test_file} 2>&1' - (exitcode: 1 / output: {test_file}:3:12: error: indirection requires pointer operand ('int' invalid)")
     assert stdout.find('TranslationUnitDecl') != -1, stdout
     assert stdout.find(str(test_file)) != -1, stdout
\ No newline at end of file
diff --git a/test/cli/dumpfile_test.py b/test/cli/dumpfile_test.py
index 932c4363a30..97386350a16 100644
--- a/test/cli/dumpfile_test.py
+++ b/test/cli/dumpfile_test.py
@@ -10,7 +10,7 @@
 
 def test_libraries(tmpdir):  #13701
     test_file = str(tmpdir / 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('x=1;\n')
 
     args = ['--library=posix', '--dump', test_file]
@@ -18,7 +18,7 @@ def test_libraries(tmpdir):  #13701
 
     dumpfile = test_file + '.dump'
     assert os.path.isfile(dumpfile)
-    with open(dumpfile, 'rt') as f:
+    with open(dumpfile) as f:
         dump = f.read()
     assert '' in dump
     assert dump.find('') < dump.find('' in dump
diff --git a/test/cli/helloworld_test.py b/test/cli/helloworld_test.py
index 7ccc113af43..916dfefaf30 100644
--- a/test/cli/helloworld_test.py
+++ b/test/cli/helloworld_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest test-helloworld.py
 
 import os
@@ -118,7 +117,7 @@ def test_addon_with_gui_project(tmp_path):
     args = [
         '--template=cppcheck1',
         '--enable=style',
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=tmp_path)
     filename = os.path.join('helloworld', 'main.c')
@@ -273,7 +272,7 @@ def test_suppress_project_relative(tmp_path):
                             suppressions=[{'fileName':'main.c', 'id':'zerodiv'}])
 
     args = [
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
 
     ret, stdout, stderr = cppcheck(args, cwd=tmp_path)
@@ -289,7 +288,7 @@ def test_suppress_project_absolute(tmp_path):
                             suppressions=[{'fileName':'main.c', 'id':'zerodiv'}])
 
     args = [
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
 
     ret, stdout, stderr = cppcheck(args)
@@ -336,7 +335,7 @@ def test_checkers_report(tmpdir):
 
     cppcheck(args, cwd=__script_dir)
 
-    with open(filename, 'rt') as f:
+    with open(filename) as f:
         data = f.read().splitlines()
         assert 'No   CheckAutoVariables::assignFunctionArg                     require:style,warning' in data, json.dumps(data, indent=4)
         assert 'Yes  CheckAutoVariables::autoVariables' in data, json.dumps(data, indent=4)
@@ -345,7 +344,7 @@ def test_checkers_report(tmpdir):
         '--enable=style'
     ]
     cppcheck(args, cwd=__script_dir)
-    with open(filename, 'rt') as f:
+    with open(filename) as f:
         data = f.read().splitlines()
         # checker has been activated by --enable=style
         assert 'Yes  CheckAutoVariables::assignFunctionArg' in data, json.dumps(data, indent=4)
diff --git a/test/cli/inline-suppress-polyspace_test.py b/test/cli/inline-suppress-polyspace_test.py
index 889bf231ef3..46ce704e306 100644
--- a/test/cli/inline-suppress-polyspace_test.py
+++ b/test/cli/inline-suppress-polyspace_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest inline-suppress-polyspace_test.py
 
 import os
@@ -9,7 +8,7 @@
 
 def test_unmatched_polyspace_suppression(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('int f(void); /* polyspace MISRA2012:8.2 */\n')
 
     args = ['--addon=misra', '--template=simple', '--enable=style,information', '--inline-suppr', 'test.c']
@@ -22,7 +21,7 @@ def test_unmatched_polyspace_suppression(tmp_path):
 
 def test_1(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('int f(); /* polyspace MISRA2012:8.2 */\n')
 
     args = ['--addon=misra', '--template=simple', '--enable=style,information', '--inline-suppr', 'test.c']
@@ -35,7 +34,7 @@ def test_1(tmp_path):
 
 def test_block(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('/* polyspace +1 MISRA2012:8.2 */\n'
                 'int f();\n' # <- suppression applies to this line
                 'int g();\n') # <- suppression does not apply to this line
@@ -50,7 +49,7 @@ def test_block(tmp_path):
 
 def test_begin_end(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('/* polyspace-begin MISRA2012:8.2 */\n'
                 'int f();\n'
                 '/* polyspace-end MISRA2012:8.2 */\n')
diff --git a/test/cli/inline-suppress_test.py b/test/cli/inline-suppress_test.py
index 7b8be839df4..7156ae688e1 100644
--- a/test/cli/inline-suppress_test.py
+++ b/test/cli/inline-suppress_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest inline-suppress_test.py
 
 import json
@@ -23,7 +22,7 @@ def __create_unused_function_compile_commands(tmpdir):
     compdb_path = os.path.join(tmpdir, 'proj-inline-suppress-unusedFunction')
     os.makedirs(compdb_path)
     compile_commands = os.path.join(compdb_path, 'compile_commands.json')
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(j, indent=4))
     return compile_commands
 
@@ -50,7 +49,7 @@ def test_2():
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        '{}3.cpp:4:19: error: Division by zero. [zerodiv]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}3.cpp:4:19: error: Division by zero. [zerodiv]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -77,12 +76,12 @@ def test_unmatched_suppression():
         '--inline-suppr',
         '--enable=information',
         '--error-exitcode=1',
-        '{}2.c'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}2.c'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        '{}2.c:2:0: information: Unmatched suppression: some_warning_id [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}2.c:2:0: information: Unmatched suppression: some_warning_id [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -95,12 +94,12 @@ def test_unmatched_suppression_path_with_extra_stuff():
         '--inline-suppr',
         '--enable=information',
         '--error-exitcode=1',
-        '{}2.c'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}2.c'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        '{}2.c:2:0: information: Unmatched suppression: some_warning_id [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}2.c:2:0: information: Unmatched suppression: some_warning_id [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -110,12 +109,12 @@ def test_backwards_compatibility():
     args = [
         '-q',
         '--template=simple',
-        '{}3.cpp'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}3.cpp'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        '{}3.cpp:4:19: error: Division by zero. [zerodiv]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}3.cpp:4:19: error: Division by zero. [zerodiv]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -124,7 +123,7 @@ def test_backwards_compatibility():
         '-q',
         '--template=simple',
         '--inline-suppr',
-        '{}3.cpp'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}3.cpp'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
@@ -140,7 +139,7 @@ def __test_compile_commands_unused_function(tmpdir, use_j):
         '--template=simple',
         '--enable=all',
         '--error-exitcode=1',
-        '--project={}'.format(compdb_file)
+        f'--project={compdb_file}'
     ]
     if use_j:
         args.append('-j2')
@@ -150,7 +149,7 @@ def __test_compile_commands_unused_function(tmpdir, use_j):
     proj_path_sep = os.path.join(__script_dir, 'proj-inline-suppress-unusedFunction') + os.path.sep
     lines = stderr.splitlines()
     assert lines == [
-        "{}B.cpp:6:9: style: The function 'unusedFunctionTest' is never used. [unusedFunction]".format(proj_path_sep)
+        f"{proj_path_sep}B.cpp:6:9: style: The function 'unusedFunctionTest' is never used. [unusedFunction]"
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -173,7 +172,7 @@ def __test_compile_commands_unused_function_suppression(tmpdir, use_j):
         '--enable=all',
         '--inline-suppr',
         '--error-exitcode=1',
-        '--project={}'.format(compdb_file)
+        f'--project={compdb_file}'
     ]
     if use_j:
         args.append('-j2')
@@ -230,10 +229,10 @@ def test_build_dir(tmpdir):
     args = [
         '-q',
         '--template=simple',
-        '--cppcheck-build-dir={}'.format(tmpdir),
+        f'--cppcheck-build-dir={tmpdir}',
         '--enable=all',
         '--inline-suppr',
-        '{}4.c'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}4.c'
     ]
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
@@ -252,7 +251,7 @@ def test_build_dir_jobs_suppressions(tmpdir): #14064
     args = [
         '-q',
         '--template=simple',
-        '--cppcheck-build-dir={}'.format(tmpdir),
+        f'--cppcheck-build-dir={tmpdir}',
         '--enable=style',
         '--inline-suppr',
         '-j4',
@@ -282,10 +281,10 @@ def __test_build_dir_unused_template(tmpdir, extra_args):
     args = [
         '-q',
         '--template=simple',
-        '--cppcheck-build-dir={}'.format(tmpdir),
+        f'--cppcheck-build-dir={tmpdir}',
         '--enable=all',
         '--inline-suppr',
-        '{}template.cpp'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}template.cpp'
     ]
 
     args = args + extra_args
@@ -317,7 +316,7 @@ def test_suppress_unmatched_inline_suppression():  # 11172
         '--enable=information',
         '--suppress=unmatchedSuppression',
         '--inline-suppr',
-        '{}2.c'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}2.c'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
@@ -381,7 +380,7 @@ def test_duplicate_cmd_j(tmp_path):
 # no error as inline suppressions are handled separately
 def __test_duplicate_file(tmp_path, extra_args):
     suppr_file =  tmp_path / 'suppressions'
-    with open(suppr_file, 'wt') as f:
+    with open(suppr_file, 'w') as f:
         f.write('unreadVariable')
 
     args = [
@@ -389,7 +388,7 @@ def __test_duplicate_file(tmp_path, extra_args):
         '--template=simple',
         '--enable=all',
         '--inline-suppr',
-        '--suppressions-list={}'.format(suppr_file),
+        f'--suppressions-list={suppr_file}',
         'proj-inline-suppress/4.c'
     ]
 
@@ -431,8 +430,8 @@ def __test_unused_function_unmatched(tmpdir, extra_args):
     lines = stderr.splitlines()
     lines.sort()
     assert lines == [
-        '{}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'.format(__proj_inline_suppres_path),
-        '{}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: unusedFunction [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]',
+        f'{__proj_inline_suppres_path}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: unusedFunction [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -450,20 +449,20 @@ def test_unused_function_unmatched_j(tmpdir):
 def test_unused_function_unmatched_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_function_unmatched(tmpdir, ['-j1', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_unused_function_unmatched(tmpdir, ['-j1', f'--cppcheck-build-dir={build_dir}'])
 
 
 def test_unused_function_unmatched_builddir_j_thread(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_function_unmatched(tmpdir, ['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=thread'])
+    __test_unused_function_unmatched(tmpdir, ['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=thread'])
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='ProcessExecutor not available on Windows')
 def test_unused_function_unmatched_builddir_j_process(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_function_unmatched(tmpdir, ['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=process'])
+    __test_unused_function_unmatched(tmpdir, ['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=process'])
 
 
 # do not report unmatched unusedFunction inline suppressions when unusedFunction check is disabled
@@ -478,7 +477,7 @@ def test_unused_function_disabled_unmatched():
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     assert stderr.splitlines() == [
-        '{}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -496,8 +495,8 @@ def test_unmatched_cfg():
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     assert stderr.splitlines() == [
-        '{}cfg.c:5:0: information: Unmatched suppression: id [unmatchedSuppression]'.format(__proj_inline_suppres_path),
-        '{}cfg.c:9:0: information: Unmatched suppression: id [unmatchedSuppression]'.format(__proj_inline_suppres_path),
+        f'{__proj_inline_suppres_path}cfg.c:5:0: information: Unmatched suppression: id [unmatchedSuppression]',
+        f'{__proj_inline_suppres_path}cfg.c:9:0: information: Unmatched suppression: id [unmatchedSuppression]',
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -518,7 +517,7 @@ def test_unused_function_disabled_unmatched_j():
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     assert stderr.splitlines() == [
-        '{}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}unusedFunctionUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -536,7 +535,7 @@ def test_misra_disabled_unmatched():  #14232
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     assert stderr.splitlines() == [
-        '{}misraUnmatched.c:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}misraUnmatched.c:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
@@ -554,7 +553,7 @@ def test_premium_disabled_unmatched():  #13663
 
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     assert stderr.splitlines() == [
-        '{}premiumUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'.format(__proj_inline_suppres_path)
+        f'{__proj_inline_suppres_path}premiumUnmatched.cpp:5:0: information: Unmatched suppression: uninitvar [unmatchedSuppression]'
     ]
     assert stdout == ''
     assert ret == 0, stdout
\ No newline at end of file
diff --git a/test/cli/lookup_test.py b/test/cli/lookup_test.py
index cab96a7f6bb..edbfd29c3e5 100644
--- a/test/cli/lookup_test.py
+++ b/test/cli/lookup_test.py
@@ -8,19 +8,19 @@
 
 def __remove_std_lookup_log(l : list, exepath):
     l.remove("looking for library 'std.cfg'")
-    l.remove("looking for library '{}/std.cfg'".format(exepath))
-    l.remove("looking for library '{}/cfg/std.cfg'".format(exepath))
+    l.remove(f"looking for library '{exepath}/std.cfg'")
+    l.remove(f"looking for library '{exepath}/cfg/std.cfg'")
     return l
 
 
 def __create_gui_project(tmpdir):
     file_name = 'test.c'
     test_file = os.path.join(tmpdir, file_name)
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     project_file = os.path.join(tmpdir, 'project.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
 """
 
@@ -36,20 +36,20 @@ def __create_gui_project(tmpdir):
 def __create_compdb(tmpdir):
     file_name = 'test.c'
     test_file = os.path.join(tmpdir, file_name)
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     compilation_db = [
         {
             "directory": str(tmpdir),
-            "command": "c++ -o {}.o -c {}".format(os.path.basename(file_name), file_name),
+            "command": f"c++ -o {os.path.basename(file_name)}.o -c {file_name}",
             "file": file_name,
-            "output": "{}.o".format(os.path.basename(file_name))
+            "output": f"{os.path.basename(file_name)}.o"
         }
     ]
 
     compile_commands = os.path.join(tmpdir, 'compile_commands.json')
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(compilation_db))
 
     return compile_commands, test_file
@@ -57,7 +57,7 @@ def __create_compdb(tmpdir):
 
 def test_lib_lookup(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=gnu', test_file])
@@ -68,15 +68,15 @@ def test_lib_lookup(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'gnu.cfg'",
-        "looking for library '{}/gnu.cfg'".format(exepath),
-        "looking for library '{}/cfg/gnu.cfg'".format(exepath),
-        'Checking {} ...'.format(test_file)
+        f"looking for library '{exepath}/gnu.cfg'",
+        f"looking for library '{exepath}/cfg/gnu.cfg'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_lib_lookup_ext(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=gnu.cfg', test_file])
@@ -87,15 +87,15 @@ def test_lib_lookup_ext(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'gnu.cfg'",
-        "looking for library '{}/gnu.cfg'".format(exepath),
-        "looking for library '{}/cfg/gnu.cfg'".format(exepath),
-        'Checking {} ...'.format(test_file)
+        f"looking for library '{exepath}/gnu.cfg'",
+        f"looking for library '{exepath}/cfg/gnu.cfg'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_lib_lookup_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library=none', test_file])
@@ -107,8 +107,8 @@ def test_lib_lookup_notfound(tmpdir):
     assert lines == [
         # TODO: specify which folder is actually used for lookup here
         "looking for library 'none.cfg'",
-        "looking for library '{}/none.cfg'".format(exepath),
-        "looking for library '{}/cfg/none.cfg'".format(exepath),
+        f"looking for library '{exepath}/none.cfg'",
+        f"looking for library '{exepath}/cfg/none.cfg'",
         "library not found: 'none'",
         "cppcheck: Failed to load library configuration file 'none'. File not found"
     ]
@@ -117,7 +117,7 @@ def test_lib_lookup_notfound(tmpdir):
 def test_lib_lookup_notfound_project(tmpdir):  # #13938
     project_file, _ = __create_gui_project(tmpdir)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library=none', '--project={}'.format(project_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library=none', f'--project={project_file}'])
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
         exepath = exepath.replace('\\', '/')
@@ -127,8 +127,8 @@ def test_lib_lookup_notfound_project(tmpdir):  # #13938
         # TODO: needs to look relative to the project first
         # TODO: specify which folder is actually used for lookup here
         "looking for library 'none.cfg'",
-        "looking for library '{}/none.cfg'".format(exepath),
-        "looking for library '{}/cfg/none.cfg'".format(exepath),
+        f"looking for library '{exepath}/none.cfg'",
+        f"looking for library '{exepath}/cfg/none.cfg'",
         "library not found: 'none'",
         "cppcheck: Failed to load library configuration file 'none'. File not found"
     ]
@@ -137,7 +137,7 @@ def test_lib_lookup_notfound_project(tmpdir):  # #13938
 def test_lib_lookup_notfound_compdb(tmpdir):
     compdb_file, _ = __create_compdb(tmpdir)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library=none', '--project={}'.format(compdb_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library=none', f'--project={compdb_file}'])
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
         exepath = exepath.replace('\\', '/')
@@ -146,8 +146,8 @@ def test_lib_lookup_notfound_compdb(tmpdir):
     assert lines == [
         # TODO: specify which folder is actually used for lookup here
         "looking for library 'none.cfg'",
-        "looking for library '{}/none.cfg'".format(exepath),
-        "looking for library '{}/cfg/none.cfg'".format(exepath),
+        f"looking for library '{exepath}/none.cfg'",
+        f"looking for library '{exepath}/cfg/none.cfg'",
         "library not found: 'none'",
         "cppcheck: Failed to load library configuration file 'none'. File not found"
     ]
@@ -155,7 +155,7 @@ def test_lib_lookup_notfound_compdb(tmpdir):
 
 def test_lib_lookup_ext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=none.cfg', test_file])
@@ -166,8 +166,8 @@ def test_lib_lookup_ext_notfound(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'none.cfg'",
-        "looking for library '{}/none.cfg'".format(exepath),
-        "looking for library '{}/cfg/none.cfg'".format(exepath),
+        f"looking for library '{exepath}/none.cfg'",
+        f"looking for library '{exepath}/cfg/none.cfg'",
         "library not found: 'none.cfg'",
         "cppcheck: Failed to load library configuration file 'none.cfg'. File not found"
     ]
@@ -175,7 +175,7 @@ def test_lib_lookup_ext_notfound(tmpdir):
 
 def test_lib_lookup_relative_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=config/gnu.xml', test_file])
@@ -186,8 +186,8 @@ def test_lib_lookup_relative_notfound(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'config/gnu.xml'",
-        "looking for library '{}/config/gnu.xml'".format(exepath),
-        "looking for library '{}/cfg/config/gnu.xml'".format(exepath),
+        f"looking for library '{exepath}/config/gnu.xml'",
+        f"looking for library '{exepath}/cfg/config/gnu.xml'",
         "library not found: 'config/gnu.xml'",
         "cppcheck: Failed to load library configuration file 'config/gnu.xml'. File not found"
     ]
@@ -195,7 +195,7 @@ def test_lib_lookup_relative_notfound(tmpdir):
 
 def test_lib_lookup_relative_noext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=config/gnu', test_file])
@@ -206,8 +206,8 @@ def test_lib_lookup_relative_noext_notfound(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'config/gnu.cfg'",
-        "looking for library '{}/config/gnu.cfg'".format(exepath),
-        "looking for library '{}/cfg/config/gnu.cfg'".format(exepath),
+        f"looking for library '{exepath}/config/gnu.cfg'",
+        f"looking for library '{exepath}/cfg/config/gnu.cfg'",
         "library not found: 'config/gnu'",
         "cppcheck: Failed to load library configuration file 'config/gnu'. File not found"
     ]
@@ -215,52 +215,52 @@ def test_lib_lookup_relative_noext_notfound(tmpdir):
 
 def test_lib_lookup_absolute(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     cfg_file = os.path.join(tmpdir, 'test.cfg')
-    with open(cfg_file, 'wt') as f:
+    with open(cfg_file, 'w') as f:
         f.write('''
 
 
 
         ''')
 
-    exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library={}'.format(cfg_file), test_file])
+    exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', f'--library={cfg_file}', test_file])
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
         exepath = exepath.replace('\\', '/')
     assert exitcode == 0, stdout if stdout else stderr
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
-        "looking for library '{}'".format(cfg_file),
-        'Checking {} ...'.format(test_file)
+        f"looking for library '{cfg_file}'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_lib_lookup_absolute_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     cfg_file = os.path.join(tmpdir, 'test.cfg')
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', '--library={}'.format(cfg_file), test_file])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=library', f'--library={cfg_file}', test_file])
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
         exepath = exepath.replace('\\', '/')
     assert exitcode == 1, stdout
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
-        "looking for library '{}'".format(cfg_file),
-        "library not found: '{}'".format(cfg_file),
-        "cppcheck: Failed to load library configuration file '{}'. File not found".format(cfg_file)
+        f"looking for library '{cfg_file}'",
+        f"library not found: '{cfg_file}'",
+        f"cppcheck: Failed to load library configuration file '{cfg_file}'. File not found"
     ]
 
 
 def test_lib_lookup_nofile(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     # make sure we do not produce an error when the attempted lookup path is a directory and not a file
@@ -275,20 +275,20 @@ def test_lib_lookup_nofile(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'gtk.cfg'",
-        "looking for library '{}/gtk.cfg'".format(exepath),
-        "looking for library '{}/cfg/gtk.cfg'".format(exepath),
-        'Checking {} ...'.format(test_file)
+        f"looking for library '{exepath}/gtk.cfg'",
+        f"looking for library '{exepath}/cfg/gtk.cfg'",
+        f'Checking {test_file} ...'
     ]
 
 
 # make sure we bail out when we encounter an invalid file
 def test_lib_lookup_invalid(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     gnu_cfg_file = os.path.join(tmpdir, 'gnu.cfg')
-    with open(gnu_cfg_file, 'wt') as f:
+    with open(gnu_cfg_file, 'w') as f:
         f.write('''{}''')
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=gnu', test_file], cwd=tmpdir)
@@ -307,7 +307,7 @@ def test_lib_lookup_invalid(tmpdir):
 
 def test_lib_lookup_multi(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=library', '--library=posix,gnu', test_file])
@@ -318,18 +318,18 @@ def test_lib_lookup_multi(tmpdir):
     lines = __remove_std_lookup_log(stdout.splitlines(), exepath)
     assert lines == [
         "looking for library 'posix.cfg'",
-        "looking for library '{}/posix.cfg'".format(exepath),
-        "looking for library '{}/cfg/posix.cfg'".format(exepath),
+        f"looking for library '{exepath}/posix.cfg'",
+        f"looking for library '{exepath}/cfg/posix.cfg'",
         "looking for library 'gnu.cfg'",
-        "looking for library '{}/gnu.cfg'".format(exepath),
-        "looking for library '{}/cfg/gnu.cfg'".format(exepath),
-        'Checking {} ...'.format(test_file)
+        f"looking for library '{exepath}/gnu.cfg'",
+        f"looking for library '{exepath}/cfg/gnu.cfg'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_platform_lookup_builtin(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform=unix64', test_file])
@@ -337,14 +337,14 @@ def test_platform_lookup_builtin(tmpdir):
     lines = stdout.splitlines()
     # built-in platform are not being looked up
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
 
 
 @pytest.mark.skip  # TODO: fails when not run from the root folder
 def test_platform_lookup(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform=avr8', test_file])
@@ -355,16 +355,16 @@ def test_platform_lookup(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'avr8'",
-        "try to load platform file '{}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/avr8.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/avr8.xml' ... Success".format(cwd),
-        'Checking {} ...'.format(test_file)
+        f"try to load platform file '{cwd}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/avr8.xml",
+        f"try to load platform file '{cwd}/platforms/avr8.xml' ... Success",
+        f'Checking {test_file} ...'
     ]
 
 
 @pytest.mark.skip  # TODO: fails when not run from the root folder
 def test_platform_lookup_ext(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform=avr8.xml', test_file])
@@ -375,15 +375,15 @@ def test_platform_lookup_ext(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'avr8.xml'",
-        "try to load platform file '{}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/avr8.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/avr8.xml' ... Success".format(cwd),
-        'Checking {} ...'.format(test_file)
+        f"try to load platform file '{cwd}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/avr8.xml",
+        f"try to load platform file '{cwd}/platforms/avr8.xml' ... Success",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_platform_lookup_path(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     cppcheck = 'cppcheck' # No path
@@ -396,10 +396,10 @@ def format_path(p):
         return p.replace('\\', '/').replace('"', '\'')
     def try_fail(f):
         f = format_path(f)
-        return "try to load platform file '{}' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}".format(f, f)
+        return f"try to load platform file '{f}' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={f}"
     def try_success(f):
         f = format_path(f)
-        return "try to load platform file '{}' ... Success".format(f)
+        return f"try to load platform file '{f}' ... Success"
     lines = stdout.replace('\\', '/').replace('"', '\'').splitlines()
     assert lines == [
         "looking for platform 'avr8.xml'",
@@ -407,13 +407,13 @@ def try_success(f):
         try_fail(os.path.join(tmpdir, 'platforms', 'avr8.xml')),
         try_fail(os.path.join(path, 'avr8.xml')),
         try_success(os.path.join(path, 'platforms', 'avr8.xml')),
-        'Checking {} ...'.format(format_path(test_file))
+        f'Checking {format_path(test_file)} ...'
     ]
 
 
 def test_platform_lookup_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none', test_file])
@@ -426,10 +426,10 @@ def test_platform_lookup_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'none'",
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/none.xml",
+        f"try to load platform file '{cwd}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/none.xml",
+        f"try to load platform file '{exepath}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/none.xml",
+        f"try to load platform file '{exepath}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/none.xml",
         "cppcheck: error: unrecognized platform: 'none'."
     ]
 
@@ -440,7 +440,7 @@ def test_platform_lookup_notfound_project(tmpdir):  # #13939
     project_file, _ = __create_gui_project(tmpdir)
     project_path = os.path.dirname(project_file)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none', '--project={}'.format(project_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none', f'--project={project_file}'])
     cwd = os.getcwd()
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
@@ -451,13 +451,13 @@ def test_platform_lookup_notfound_project(tmpdir):  # #13939
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'none'",
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(project_path, project_path),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(project_path, project_path),
+        f"try to load platform file '{project_path}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={project_path}/none.xml",
+        f"try to load platform file '{project_path}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={project_path}/platforms/none.xml",
         # TODO: the following lookups are in CWD - is this intended?
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/none.xml",
+        f"try to load platform file '{cwd}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/none.xml",
+        f"try to load platform file '{exepath}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/none.xml",
+        f"try to load platform file '{exepath}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/none.xml",
         "cppcheck: error: unrecognized platform: 'none'."
     ]
 
@@ -465,7 +465,7 @@ def test_platform_lookup_notfound_project(tmpdir):  # #13939
 def test_platform_lookup_notfound_compdb(tmpdir):
     compdb_file, _ = __create_compdb(tmpdir)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none', '--project={}'.format(compdb_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none', f'--project={compdb_file}'])
     cwd = os.getcwd()
     exepath = os.path.dirname(exe)
     if sys.platform == 'win32':
@@ -475,17 +475,17 @@ def test_platform_lookup_notfound_compdb(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'none'",
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/none.xml",
+        f"try to load platform file '{cwd}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/none.xml",
+        f"try to load platform file '{exepath}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/none.xml",
+        f"try to load platform file '{exepath}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/none.xml",
         "cppcheck: error: unrecognized platform: 'none'."
     ]
 
 
 def test_platform_lookup_ext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=none.xml', test_file])
@@ -498,17 +498,17 @@ def test_platform_lookup_ext_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'none.xml'",
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/none.xml",
+        f"try to load platform file '{cwd}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/none.xml",
+        f"try to load platform file '{exepath}/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/none.xml",
+        f"try to load platform file '{exepath}/platforms/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/none.xml",
         "cppcheck: error: unrecognized platform: 'none.xml'."
     ]
 
 
 def test_platform_lookup_relative_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=platform/none.xml', test_file])
@@ -521,17 +521,17 @@ def test_platform_lookup_relative_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'platform/none.xml'",
-        "try to load platform file '{}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platform/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/platform/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platform/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/platform/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platform/none.xml",
+        f"try to load platform file '{cwd}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/platform/none.xml",
+        f"try to load platform file '{exepath}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platform/none.xml",
+        f"try to load platform file '{exepath}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/platform/none.xml",
         "cppcheck: error: unrecognized platform: 'platform/none.xml'."
     ]
 
 
 def test_platform_lookup_relative_noext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=platform', '--platform=platform/none', test_file])
@@ -544,56 +544,56 @@ def test_platform_lookup_relative_noext_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'platform/none'",
-        "try to load platform file '{}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platform/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/platform/none.xml".format(cwd, cwd),
-        "try to load platform file '{}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platform/none.xml".format(exepath, exepath),
-        "try to load platform file '{}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/platforms/platform/none.xml".format(exepath, exepath),
+        f"try to load platform file '{cwd}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platform/none.xml",
+        f"try to load platform file '{cwd}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/platforms/platform/none.xml",
+        f"try to load platform file '{exepath}/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platform/none.xml",
+        f"try to load platform file '{exepath}/platforms/platform/none.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={exepath}/platforms/platform/none.xml",
         "cppcheck: error: unrecognized platform: 'platform/none'."
     ]
 
 
 def test_platform_lookup_absolute(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     platform_file = os.path.join(tmpdir, 'test.xml')
-    with open(platform_file, 'wt') as f:
+    with open(platform_file, 'w') as f:
         f.write('''
 
         ''')
 
-    exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform={}'.format(platform_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', f'--platform={platform_file}', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for platform '{}'".format(platform_file),
-        "try to load platform file '{}' ... Success".format(platform_file),
-        'Checking {} ...'.format(test_file)
+        f"looking for platform '{platform_file}'",
+        f"try to load platform file '{platform_file}' ... Success",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_platform_lookup_absolute_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     platform_file = os.path.join(tmpdir, 'test.xml')
 
-    exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform={}'.format(platform_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', f'--platform={platform_file}', test_file])
     assert exitcode == 1, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for platform '{}'".format(platform_file),
-        "try to load platform file '{}' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}".format(platform_file, platform_file),
-        "cppcheck: error: unrecognized platform: '{}'.".format(platform_file)
+        f"looking for platform '{platform_file}'",
+        f"try to load platform file '{platform_file}' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={platform_file}",
+        f"cppcheck: error: unrecognized platform: '{platform_file}'."
     ]
 
 
 @pytest.mark.skip  # TODO: fails when not run from the root folder
 def test_platform_lookup_nofile(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     # make sure we do not produce an error when the attempted lookup path is a directory and not a file
@@ -608,19 +608,19 @@ def test_platform_lookup_nofile(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'avr8'",
-        "try to load platform file '{}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={}/avr8.xml".format(cwd, cwd),
-        "try to load platform file '{}/platforms/avr8.xml' ... Success".format(cwd),
-        'Checking {}1 ...'.format(test_file)
+        f"try to load platform file '{cwd}/avr8.xml' ... Error=XML_ERROR_FILE_NOT_FOUND ErrorID=3 (0x3) Line number=0: filename={cwd}/avr8.xml",
+        f"try to load platform file '{cwd}/platforms/avr8.xml' ... Success",
+        f'Checking {test_file}1 ...'
     ]
 
 
 def test_platform_lookup_invalid(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     avr8_file = os.path.join(tmpdir, 'avr8.xml')
-    with open(avr8_file, 'wt') as f:
+    with open(avr8_file, 'w') as f:
         f.write('''{}''')
 
     exitcode, stdout, stderr = cppcheck(['--debug-lookup=platform', '--platform=avr8', test_file], cwd=tmpdir)
@@ -631,14 +631,14 @@ def test_platform_lookup_invalid(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for platform 'avr8'",
-        "try to load platform file '{}/avr8.xml' ... Error=XML_ERROR_PARSING_TEXT ErrorID=8 (0x8) Line number=1".format(cwd),
+        f"try to load platform file '{cwd}/avr8.xml' ... Error=XML_ERROR_PARSING_TEXT ErrorID=8 (0x8) Line number=1",
         "cppcheck: error: unrecognized platform: 'avr8'."
     ]
 
 
 def test_addon_lookup(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=misra', test_file])
@@ -648,15 +648,15 @@ def test_addon_lookup(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'misra.py'",
-        "looking for addon '{}misra.py'".format(exepath_sep),
-        "looking for addon '{}addons/misra.py'".format(exepath_sep),  # TODO: mixed separators
-        'Checking {} ...'.format(test_file)
+        f"looking for addon '{exepath_sep}misra.py'",
+        f"looking for addon '{exepath_sep}addons/misra.py'",  # TODO: mixed separators
+        f'Checking {test_file} ...'
     ]
 
 
 def test_addon_lookup_ext(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=misra.py', test_file])
@@ -666,15 +666,15 @@ def test_addon_lookup_ext(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'misra.py'",
-        "looking for addon '{}misra.py'".format(exepath_sep),
-        "looking for addon '{}addons/misra.py'".format(exepath_sep),  # TODO: mixed separators
-        'Checking {} ...'.format(test_file)
+        f"looking for addon '{exepath_sep}misra.py'",
+        f"looking for addon '{exepath_sep}addons/misra.py'",  # TODO: mixed separators
+        f'Checking {test_file} ...'
     ]
 
 
 def test_addon_lookup_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none', test_file])
@@ -684,8 +684,8 @@ def test_addon_lookup_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'none.py'",
-        "looking for addon '{}none.py'".format(exepath_sep),
-        "looking for addon '{}addons/none.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}none.py'",
+        f"looking for addon '{exepath_sep}addons/none.py'",  # TODO: mixed separators
         'Did not find addon none.py'
     ]
 
@@ -693,7 +693,7 @@ def test_addon_lookup_notfound(tmpdir):
 def test_addon_lookup_notfound_project(tmpdir):  # #13940 / #13941
     project_file, _ = __create_gui_project(tmpdir)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none', '--project={}'.format(project_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none', f'--project={project_file}'])
     exepath = os.path.dirname(exe)
     exepath_sep = exepath + os.path.sep
     assert exitcode == 1, stdout
@@ -701,8 +701,8 @@ def test_addon_lookup_notfound_project(tmpdir):  # #13940 / #13941
     assert lines == [
         # TODO: needs to look relative to the project file first
         "looking for addon 'none.py'",
-        "looking for addon '{}none.py'".format(exepath_sep),
-        "looking for addon '{}addons/none.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}none.py'",
+        f"looking for addon '{exepath_sep}addons/none.py'",  # TODO: mixed separators
         'Did not find addon none.py'
     ]
 
@@ -710,22 +710,22 @@ def test_addon_lookup_notfound_project(tmpdir):  # #13940 / #13941
 def test_addon_lookup_notfound_compdb(tmpdir):
     compdb_file, _ = __create_compdb(tmpdir)
 
-    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none', '--project={}'.format(compdb_file)])
+    exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none', f'--project={compdb_file}'])
     exepath = os.path.dirname(exe)
     exepath_sep = exepath + os.path.sep
     assert exitcode == 1, stdout
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'none.py'",
-        "looking for addon '{}none.py'".format(exepath_sep),
-        "looking for addon '{}addons/none.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}none.py'",
+        f"looking for addon '{exepath_sep}addons/none.py'",  # TODO: mixed separators
         'Did not find addon none.py'
     ]
 
 
 def test_addon_lookup_ext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=none.py', test_file])
@@ -735,15 +735,15 @@ def test_addon_lookup_ext_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'none.py'",
-        "looking for addon '{}none.py'".format(exepath_sep),
-        "looking for addon '{}addons/none.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}none.py'",
+        f"looking for addon '{exepath_sep}addons/none.py'",  # TODO: mixed separators
         'Did not find addon none.py'
     ]
 
 
 def test_addon_lookup_relative_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=addon/misra.py', test_file])
@@ -753,15 +753,15 @@ def test_addon_lookup_relative_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'addon/misra.py'",
-        "looking for addon '{}addon/misra.py'".format(exepath_sep),
-        "looking for addon '{}addons/addon/misra.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}addon/misra.py'",
+        f"looking for addon '{exepath_sep}addons/addon/misra.py'",  # TODO: mixed separators
         'Did not find addon addon/misra.py'
     ]
 
 
 def test_addon_lookup_relative_noext_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, _, exe = cppcheck_ex(['--debug-lookup=addon', '--addon=addon/misra', test_file])
@@ -771,49 +771,49 @@ def test_addon_lookup_relative_noext_notfound(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'addon/misra.py'",
-        "looking for addon '{}addon/misra.py'".format(exepath_sep),
-        "looking for addon '{}addons/addon/misra.py'".format(exepath_sep),  # TODO: mixed separators
+        f"looking for addon '{exepath_sep}addon/misra.py'",
+        f"looking for addon '{exepath_sep}addons/addon/misra.py'",  # TODO: mixed separators
         'Did not find addon addon/misra.py'
     ]
 
 
 def test_addon_lookup_absolute(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     addon_file = os.path.join(tmpdir, 'test.py')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write('''''')
 
-    exitcode, stdout, stderr = cppcheck(['--debug-lookup=addon', '--addon={}'.format(addon_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--debug-lookup=addon', f'--addon={addon_file}', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for addon '{}'".format(addon_file),
-        'Checking {} ...'.format(test_file)
+        f"looking for addon '{addon_file}'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_addon_lookup_absolute_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     addon_file = os.path.join(tmpdir, 'test.py')
 
-    exitcode, stdout, stderr = cppcheck(['--debug-lookup=addon', '--addon={}'.format(addon_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--debug-lookup=addon', f'--addon={addon_file}', test_file])
     assert exitcode == 1, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for addon '{}'".format(addon_file),
-        'Did not find addon {}'.format(addon_file)
+        f"looking for addon '{addon_file}'",
+        f'Did not find addon {addon_file}'
     ]
 
 
 def test_addon_lookup_nofile(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     # make sure we do not produce an error when the attempted lookup path is a directory and not a file
@@ -829,20 +829,20 @@ def test_addon_lookup_nofile(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'misra.py'",
-        "looking for addon '{}misra.py'".format(exepath_sep),
-        "looking for addon '{}addons/misra.py'".format(exepath_sep),  # TODO: mixed separators
-        'Checking {} ...'.format(test_file)
+        f"looking for addon '{exepath_sep}misra.py'",
+        f"looking for addon '{exepath_sep}addons/misra.py'",  # TODO: mixed separators
+        f'Checking {test_file} ...'
     ]
 
 
 # make sure we bail out when we encounter an invalid file
 def test_addon_lookup_invalid(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     misra_py_file = os.path.join(tmpdir, 'misra.py')
-    with open(misra_py_file, 'wt') as f:
+    with open(misra_py_file, 'w') as f:
         f.write('''''')
 
     exitcode, stdout, stderr = cppcheck(['--debug-lookup=addon', '--addon=misra', test_file], cwd=tmpdir)
@@ -850,7 +850,7 @@ def test_addon_lookup_invalid(tmpdir):
     lines = stdout.splitlines()
     assert lines == [
         "looking for addon 'misra.py'",
-        'Checking {} ...'.format(test_file)  # TODO: should bail out
+        f'Checking {test_file} ...'  # TODO: should bail out
     ]
 
 
@@ -863,11 +863,11 @@ def test_config_lookup(tmpdir):
     shutil.copytree(os.path.join(bin_dir, 'cfg'), os.path.join(tmpdir, 'cfg'))
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     config_file = os.path.join(tmpdir, 'cppcheck.cfg')
-    with open(config_file, 'wt') as f:
+    with open(config_file, 'w') as f:
         f.write('{}')
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=config', test_file], cwd=tmpdir, cppcheck_exe=tmp_cppcheck_exe)
@@ -877,14 +877,14 @@ def test_config_lookup(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for '{}cppcheck.cfg'".format(exepath_sep),
-        'Checking {} ...'.format(test_file)
+        f"looking for '{exepath_sep}cppcheck.cfg'",
+        f'Checking {test_file} ...'
     ]
 
 
 def test_config_lookup_notfound(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=config', test_file])
@@ -894,9 +894,9 @@ def test_config_lookup_notfound(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for '{}cppcheck.cfg'".format(exepath_sep),
+        f"looking for '{exepath_sep}cppcheck.cfg'",
         'no configuration found',
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
 
 
@@ -909,11 +909,11 @@ def test_config_invalid(tmpdir):
     shutil.copytree(os.path.join(bin_dir, 'cfg'), os.path.join(tmpdir, 'cfg'))
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     config_file = os.path.join(tmpdir, 'cppcheck.cfg')
-    with open(config_file, 'wt'):
+    with open(config_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex(['--debug-lookup=config', test_file], cwd=tmpdir, cppcheck_exe=tmp_cppcheck_exe)
@@ -923,7 +923,7 @@ def test_config_invalid(tmpdir):
     assert exitcode == 1, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        "looking for '{}cppcheck.cfg'".format(exepath_sep),
+        f"looking for '{exepath_sep}cppcheck.cfg'",
         'cppcheck: error: could not load cppcheck.cfg - not a valid JSON - syntax error at line 1 near: '
     ]
 
@@ -932,7 +932,7 @@ def test_config_invalid(tmpdir):
 @pytest.mark.parametrize("type,file", [("addon", "misra.py"), ("config", "cppcheck.cfg"), ("library", "gnu.cfg"), ("platform", "avr8.xml")])
 def test_lookup_path(tmpdir, type, file):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     cppcheck = 'cppcheck' # No path
@@ -940,7 +940,7 @@ def test_lookup_path(tmpdir, type, file):
     env = os.environ.copy()
     env['PATH'] = path + (';' if sys.platform == 'win32' else ':') + env.get('PATH', '')
     if type == 'config':
-        with open(os.path.join(path, "cppcheck.cfg"), 'wt') as f:
+        with open(os.path.join(path, "cppcheck.cfg"), 'w') as f:
             f.write('{}')
         exitcode, stdout, stderr, _ = cppcheck_ex(args=[f'--debug-lookup={type}', test_file], cppcheck_exe=cppcheck, cwd=str(tmpdir), env=env)
         os.remove(os.path.join(path, "cppcheck.cfg")) # clean up otherwise other tests may fail
diff --git a/test/cli/metrics_test.py b/test/cli/metrics_test.py
index 6f5c7041dab..e8c444115e8 100644
--- a/test/cli/metrics_test.py
+++ b/test/cli/metrics_test.py
@@ -54,7 +54,7 @@ def test_dummy_metrics_xml_report(tmpdir):
     assert stderr == ''
     assert stdout == f'Checking {source_path} ...\n'
 
-    with open(output_file, 'r') as file:
+    with open(output_file) as file:
         xml = file.read()
 
     for expected in __expected_xml:
diff --git a/test/cli/more-projects_test.py b/test/cli/more-projects_test.py
index 73d738d9cfb..3d509de46f7 100644
--- a/test/cli/more-projects_test.py
+++ b/test/cli/more-projects_test.py
@@ -10,7 +10,7 @@
 def test_project_force_U(tmpdir):
     # 10018
     # -U does not work with compile_commands.json
-    with open(os.path.join(tmpdir, 'bug1.cpp'), 'wt') as f:
+    with open(os.path.join(tmpdir, 'bug1.cpp'), 'w') as f:
         f.write("""
                 int x = 123 / 0;
                 #ifdef MACRO1
@@ -27,7 +27,7 @@ def test_project_force_U(tmpdir):
          "output": "bug1.o"}
     ]
 
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(compilation_db))
 
     # Without -U => both bugs are found
@@ -46,11 +46,11 @@ def __write_cppcheck_project_file(tmpdir, platform=None, importproject=None):
     project_file = os.path.join(tmpdir, 'Project.cppcheck')
 
     if platform is not None:
-        platform = '{}'.format(platform)
+        platform = f'{platform}'
     if importproject is not None:
-        platform = '{}'.format(importproject)
+        platform = f'{importproject}'
 
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
 """
 
@@ -71,10 +71,10 @@ def test_project_custom_platform(tmpdir):
     """
     project_file = __write_cppcheck_project_file(tmpdir, platform='p1.xml')
 
-    with open(os.path.join(tmpdir, 'p1.xml'), 'wt') as f:
+    with open(os.path.join(tmpdir, 'p1.xml'), 'w') as f:
         f.write('\n')
 
-    with open(os.path.join(tmpdir, '1.c'), 'wt') as f:
+    with open(os.path.join(tmpdir, '1.c'), 'w') as f:
         f.write("int x;")
 
     ret, stdout, stderr = cppcheck(['--project=' + project_file, '--template=cppcheck1', '-q'])
@@ -89,7 +89,7 @@ def test_project_empty_platform(tmpdir):
     """
     project_file = __write_cppcheck_project_file(tmpdir, platform='')
 
-    with open(os.path.join(tmpdir, '1.c'), 'wt') as f:
+    with open(os.path.join(tmpdir, '1.c'), 'w') as f:
         f.write("int x;")
 
     ret, stdout, stderr = cppcheck(['--project=' + project_file, '--template=cppcheck1', '-q'])
@@ -104,7 +104,7 @@ def test_project_unspecified_platform(tmpdir):
     """
     project_file = __write_cppcheck_project_file(tmpdir, platform='Unspecified')
 
-    with open(os.path.join(tmpdir, '1.c'), 'wt') as f:
+    with open(os.path.join(tmpdir, '1.c'), 'w') as f:
         f.write("int x;")
 
     ret, stdout, stderr = cppcheck(['--project=' + project_file, '--template=cppcheck1', '-q'])
@@ -119,7 +119,7 @@ def test_project_unknown_platform(tmpdir):
     """
     project_file = __write_cppcheck_project_file(tmpdir, platform='dummy')
 
-    with open(os.path.join(tmpdir, '1.c'), 'wt') as f:
+    with open(os.path.join(tmpdir, '1.c'), 'w') as f:
         f.write("int x;")
 
     ret, stdout, stderr = cppcheck(['--project=' + project_file, '--template=cppcheck1'])
@@ -134,7 +134,7 @@ def test_project_empty_fields(tmpdir):
     """
     project_file = os.path.join(tmpdir, 'Project.cppcheck')
 
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
 """
 
@@ -231,7 +231,7 @@ def test_project_missing_subproject(tmpdir):
 
 
 def test_project_std(tmpdir):
-    with open(os.path.join(tmpdir, 'bug1.cpp'), 'wt') as f:
+    with open(os.path.join(tmpdir, 'bug1.cpp'), 'w') as f:
         f.write("""
                 #if __cplusplus == 201402L
                 int x = 123 / 0;
@@ -249,7 +249,7 @@ def test_project_std(tmpdir):
         }
     ]
 
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(compilation_db))
 
     ret, stdout, stderr = cppcheck(['--project=' + compile_commands, '--enable=all', '-rp=' + str(tmpdir), '--template=cppcheck1'])
@@ -261,7 +261,7 @@ def test_project_std(tmpdir):
 @pytest.mark.skip() # clang-tidy is not available in all cases
 def test_clang_tidy(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 int main(int argc)
                 {
@@ -270,7 +270,7 @@ def test_clang_tidy(tmpdir):
                 """)
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -282,7 +282,7 @@ def test_clang_tidy(tmpdir):
   
 """.format(test_file))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0, stdout
@@ -290,7 +290,7 @@ def test_clang_tidy(tmpdir):
     # TODO: should detect clang-tidy issue
     assert len(lines) == 1
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     assert stderr == ''
 
@@ -303,11 +303,11 @@ def test_clang_tidy(tmpdir):
 ])
 def test_project_file_filter(tmpdir, file_filter):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -316,9 +316,9 @@ def test_project_file_filter(tmpdir, file_filter):
     
 """.format(test_file))
 
-    args = file_filter + ['--project={}'.format(project_file)]
+    args = file_filter + [f'--project={project_file}']
     out_lines = [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -330,14 +330,14 @@ def test_project_file_filter(tmpdir, file_filter):
 ])
 def test_project_file_filter_cpp(tmpdir, file_filter):
     test_file_1 = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_1, 'wt') as f:
+    with open(test_file_1, 'w') as f:
         pass
     test_file_2 = os.path.join(tmpdir, 'test.c')
-    with open(test_file_2, 'wt') as f:
+    with open(test_file_2, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -347,9 +347,9 @@ def test_project_file_filter_cpp(tmpdir, file_filter):
     
 """.format(test_file_1, test_file_2))
 
-    args = file_filter + ['--project={}'.format(project_file)]
+    args = file_filter + [f'--project={project_file}']
     out_lines = [
-        'Checking {} ...'.format(test_file_1)
+        f'Checking {test_file_1} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -361,14 +361,14 @@ def test_project_file_filter_cpp(tmpdir, file_filter):
 ])
 def test_project_file_filter_c(tmpdir, file_filter):
     test_file_1 = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_1, 'wt') as f:
+    with open(test_file_1, 'w') as f:
         pass
     test_file_2 = os.path.join(tmpdir, 'test.c')
-    with open(test_file_2, 'wt') as f:
+    with open(test_file_2, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -378,9 +378,9 @@ def test_project_file_filter_c(tmpdir, file_filter):
     
 """.format(test_file_1, test_file_2))
 
-    args = file_filter + ['--project={}'.format(project_file)]
+    args = file_filter + [f'--project={project_file}']
     out_lines = [
-        'Checking {} ...'.format(test_file_2)
+        f'Checking {test_file_2} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -391,14 +391,14 @@ def test_project_relpath_file_filter_abspath(tmpdir):
     relative paths in project file, absolute path in file filter
     """
     test_file_cpp = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_cpp, 'wt') as f:
+    with open(test_file_cpp, 'w') as f:
         pass
     test_file_c = os.path.join(tmpdir, 'test.c')
-    with open(test_file_c, 'wt') as f:
+    with open(test_file_c, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -412,7 +412,7 @@ def test_project_relpath_file_filter_abspath(tmpdir):
         'Checking test.c ...'
     ]
 
-    args = ['--file-filter={}'.format(test_file_c), '--project=test.cppcheck']
+    args = [f'--file-filter={test_file_c}', '--project=test.cppcheck']
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines, cwd=tmpdir)
 
 
@@ -421,14 +421,14 @@ def test_project_abspath_file_filter_relpath(tmpdir):
     absolute paths in project file, relative path in file filter
     """
     test_file_cpp = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_cpp, 'wt') as f:
+    with open(test_file_cpp, 'w') as f:
         pass
     test_file_c = os.path.join(tmpdir, 'test.c')
-    with open(test_file_c, 'wt') as f:
+    with open(test_file_c, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -439,7 +439,7 @@ def test_project_abspath_file_filter_relpath(tmpdir):
 """.format(test_file_c, test_file_cpp))
 
     out_lines = [
-        'Checking {} ...'.format(test_file_c)
+        f'Checking {test_file_c} ...'
     ]
 
     args = ['--file-filter=test.c', '--project=test.cppcheck']
@@ -462,31 +462,31 @@ def test_project_pathmatch_other_cwd(tmpdir):
     os.mkdir(test_dir_3)
 
     test_file_1 = os.path.join(test_dir_1, 'a-abs.c')
-    with open(test_file_1, 'wt') as f:
+    with open(test_file_1, 'w') as f:
         pass
 
     test_file_2 = os.path.join(test_dir_1, 'a-rel.c')
-    with open(test_file_2, 'wt') as f:
+    with open(test_file_2, 'w') as f:
         pass
 
     test_file_3 = os.path.join(test_dir_2, 'b-abs.c')
-    with open(test_file_3, 'wt') as f:
+    with open(test_file_3, 'w') as f:
         pass
 
     test_file_4 = os.path.join(test_dir_2, 'b-rel.c')
-    with open(test_file_4, 'wt') as f:
+    with open(test_file_4, 'w') as f:
         pass
 
     test_file_5 = os.path.join(test_dir_3, 'b-abs.c')
-    with open(test_file_5, 'wt') as f:
+    with open(test_file_5, 'w') as f:
         pass
 
     test_file_6 = os.path.join(test_dir_3, 'b-rel.c')
-    with open(test_file_6, 'wt') as f:
+    with open(test_file_6, 'w') as f:
         pass
 
     project_file = os.path.join(test_root, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -504,11 +504,11 @@ def test_project_pathmatch_other_cwd(tmpdir):
 """.format(test_file_1, test_file_3, test_file_5))
 
     out_lines = [
-        'Checking {} ...'.format(test_file_5),
+        f'Checking {test_file_5} ...',
         'Checking {} ...'.format(os.path.join("..", "cwd", "b", "b-rel.c")),
     ]
 
-    args = ['--file-filter={}/*/?/**.c*'.format(test_root), '--project=../test.cppcheck']
+    args = [f'--file-filter={test_root}/*/?/**.c*', '--project=../test.cppcheck']
     exitcode, stdout, stderr = cppcheck(args, cwd=test_cwd)
     stdout_lines = stdout.splitlines()
     assert 0 == exitcode
@@ -519,11 +519,11 @@ def test_project_pathmatch_other_cwd(tmpdir):
 
 def test_project_file_filter_no_match(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -532,7 +532,7 @@ def test_project_file_filter_no_match(tmpdir):
     
 """.format(test_file))
 
-    args = ['--file-filter=*.c', '--project={}'.format(project_file)]
+    args = ['--file-filter=*.c', f'--project={project_file}']
     out_lines = [
         'cppcheck: error: could not find any files matching the filter:*.c'
     ]
@@ -542,20 +542,20 @@ def test_project_file_filter_no_match(tmpdir):
 
 def test_project_file_order(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
     test_file_b = os.path.join(tmpdir, 'b.c')
-    with open(test_file_b, 'wt'):
+    with open(test_file_b, 'w'):
         pass
     test_file_c = os.path.join(tmpdir, 'c.c')
-    with open(test_file_c, 'wt'):
+    with open(test_file_c, 'w'):
         pass
     test_file_d = os.path.join(tmpdir, 'd.c')
-    with open(test_file_d, 'wt'):
+    with open(test_file_d, 'w'):
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -567,19 +567,19 @@ def test_project_file_order(tmpdir):
     
 """.format(test_file_c, test_file_d, test_file_b, test_file_a))
 
-    args = ['--project={}'.format(project_file), '-j1']
+    args = [f'--project={project_file}', '-j1']
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_c),
+        f'Checking {test_file_c} ...',
         '1/4 files checked 25% done',
-        'Checking {} ...'.format(test_file_d),
+        f'Checking {test_file_d} ...',
         '2/4 files checked 50% done',
-        'Checking {} ...'.format(test_file_b),
+        f'Checking {test_file_b} ...',
         '3/4 files checked 75% done',
-        'Checking {} ...'.format(test_file_a),
+        f'Checking {test_file_a} ...',
         '4/4 files checked 100% done'
     ]
     assert stderr == ''
@@ -587,11 +587,11 @@ def test_project_file_order(tmpdir):
 
 def test_project_file_duplicate(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -602,30 +602,30 @@ def test_project_file_duplicate(tmpdir):
     
 """.format(test_file_a, test_file_a, tmpdir))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_a)
+        f'Checking {test_file_a} ...'
     ]
     assert stderr == ''
 
 
 def test_project_file_duplicate_2(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
     test_file_b = os.path.join(tmpdir, 'b.c')
-    with open(test_file_b, 'wt'):
+    with open(test_file_b, 'w'):
         pass
     test_file_c = os.path.join(tmpdir, 'c.c')
-    with open(test_file_c, 'wt'):
+    with open(test_file_c, 'w'):
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -641,17 +641,17 @@ def test_project_file_duplicate_2(tmpdir):
     
 """.format(test_file_c, test_file_a, test_file_b, tmpdir, test_file_b, test_file_c, test_file_a, tmpdir))
 
-    args = ['--project={}'.format(project_file), '-j1']
+    args = [f'--project={project_file}', '-j1']
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_c),
+        f'Checking {test_file_c} ...',
         '1/3 files checked 33% done',
-        'Checking {} ...'.format(test_file_a),
+        f'Checking {test_file_a} ...',
         '2/3 files checked 66% done',
-        'Checking {} ...'.format(test_file_b),
+        f'Checking {test_file_b} ...',
         '3/3 files checked 100% done'
     ]
     assert stderr == ''
@@ -659,7 +659,7 @@ def test_project_file_duplicate_2(tmpdir):
 
 def test_project_file_duplicate_3(tmpdir):  # #12834
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     # multiple ways to specify the same file
@@ -671,7 +671,7 @@ def test_project_file_duplicate_3(tmpdir):  # #12834
     in_file_f = os.path.join(tmpdir, 'dummy', '..', 'a.c')
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -686,13 +686,13 @@ def test_project_file_duplicate_3(tmpdir):  # #12834
     
 """.format(in_file_a, in_file_b, in_file_c, in_file_d, in_file_e, in_file_f, tmpdir))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
 
     exitcode, stdout, stderr = cppcheck(args, cwd=tmpdir)
     assert exitcode == 0
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_a)
+        f'Checking {test_file_a} ...'
     ]
     assert stderr == ''
 
@@ -700,7 +700,7 @@ def test_project_file_duplicate_3(tmpdir):  # #12834
 @pytest.mark.skipif(sys.platform != 'win32', reason="requires Windows")
 def test_project_file_duplicate_4(tmpdir):  # #12834
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     # multiple ways to specify the same file
@@ -717,7 +717,7 @@ def test_project_file_duplicate_4(tmpdir):  # #12834
         args2.append(a.replace('\\', '/'))
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -740,23 +740,23 @@ def test_project_file_duplicate_4(tmpdir):  # #12834
 """.format(in_file_a, in_file_b, in_file_c, in_file_d, in_file_e, in_file_f, tmpdir,
                      args2[0], args2[1], args2[2], args2[3], args2[4], args2[5], args2[6]))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
 
     exitcode, stdout, stderr = cppcheck(args, cwd=tmpdir)
     assert exitcode == 0
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_a)
+        f'Checking {test_file_a} ...'
     ]
     assert stderr == ''
 
 def test_project_file_ignore(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -765,7 +765,7 @@ def test_project_file_ignore(tmpdir):
     
 """.format(test_file))
 
-    args = ['-itest.cpp', '--project={}'.format(project_file)]
+    args = ['-itest.cpp', f'--project={project_file}']
     out_lines = [
         'cppcheck: error: could not find or open any of the paths given.',
         'cppcheck: Maybe all paths were ignored?'
@@ -776,11 +776,11 @@ def test_project_file_ignore(tmpdir):
 
 def test_project_file_ignore_2(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -792,7 +792,7 @@ def test_project_file_ignore_2(tmpdir):
     
 """.format(test_file))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
     out_lines = [
         'cppcheck: error: could not find or open any of the paths given.',
         'cppcheck: Maybe all paths were ignored?'
@@ -803,11 +803,11 @@ def test_project_file_ignore_2(tmpdir):
 
 def test_project_file_ignore_3(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -819,7 +819,7 @@ def test_project_file_ignore_3(tmpdir):
     
 """.format(test_file))
 
-    args = ['--project={}'.format(project_file)]
+    args = [f'--project={project_file}']
     out_lines = [
         'cppcheck: error: could not find or open any of the paths given.',
         'cppcheck: Maybe all paths were ignored?'
@@ -830,7 +830,7 @@ def test_project_file_ignore_3(tmpdir):
 
 def test_json_file_ignore(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     compilation_db = [
@@ -841,10 +841,10 @@ def test_json_file_ignore(tmpdir):
     ]
 
     project_file = os.path.join(tmpdir, 'test.json')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(json.dumps(compilation_db))
 
-    args = ['-itest.cpp', '--project={}'.format(project_file)]
+    args = ['-itest.cpp', f'--project={project_file}']
     out_lines = [
         'cppcheck: error: no C or C++ source files found.',
         'cppcheck: all paths were ignored'
@@ -855,7 +855,7 @@ def test_json_file_ignore(tmpdir):
 
 def test_json_file_ignore_2(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     compilation_db = [
@@ -866,10 +866,10 @@ def test_json_file_ignore_2(tmpdir):
     ]
 
     project_file = os.path.join(tmpdir, 'test.json')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(json.dumps(compilation_db))
 
-    args = ['-i{}'.format(test_file), '--project={}'.format(project_file)]
+    args = [f'-i{test_file}', f'--project={project_file}']
     out_lines = [
         'cppcheck: error: no C or C++ source files found.',
         'cppcheck: all paths were ignored'
@@ -881,7 +881,7 @@ def test_json_file_ignore_2(tmpdir):
 @pytest.mark.xfail(strict=True)
 def test_project_D(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #ifndef __GNUC__
 #error "requirement not met"
@@ -889,7 +889,7 @@ def test_project_D(tmpdir):
                 """)
 
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
             """
 
@@ -907,8 +907,8 @@ def test_project_D(tmpdir):
     arg_D = ['-D__GNUC__']
 
     out_expected = [
-        'Checking {} ...'.format(test_file),
-        'Checking {}: __GNUC__=1...'.format(test_file)
+        f'Checking {test_file} ...',
+        f'Checking {test_file}: __GNUC__=1...'
     ]
 
     args1 = args + arg_D
@@ -927,7 +927,7 @@ def test_project_D(tmpdir):
 
 def test_compdb_D(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #ifndef __GNUC__
 #error "requirement not met"
@@ -941,7 +941,7 @@ def test_compdb_D(tmpdir):
          "file": "test.cpp",
          "output": "test.o"}
     ]
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(compilation_db))
 
     args = [
@@ -951,8 +951,8 @@ def test_compdb_D(tmpdir):
     arg_D = ['-D__GNUC__']
 
     out_expected = [
-        'Checking {} ...'.format(test_file),
-        'Checking {}: __GNUC__=1;...'.format(test_file)  # TODO: get rid of extra ;
+        f'Checking {test_file} ...',
+        f'Checking {test_file}: __GNUC__=1;...'  # TODO: get rid of extra ;
     ]
 
     args1 = args + arg_D
@@ -973,7 +973,7 @@ def test_shared_items_project():
 
     args = [
         '--platform=win64',
-        '--project={}'.format(solution_file),
+        f'--project={solution_file}',
         '--project-configuration=Release|x64'
     ]
 
@@ -987,11 +987,11 @@ def test_shared_items_project():
 
 def test_project_file_nested(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     level3_file = tmp_path / 'level3.cppcheck'
-    with open(level3_file, 'wt') as f:
+    with open(level3_file, 'w') as f:
         f.write(
 """
     
@@ -1000,20 +1000,20 @@ def test_project_file_nested(tmp_path):
 """.format(test_file))
 
     level2_file = tmp_path / 'level2.cppcheck'
-    with open(level2_file, 'wt') as f:
+    with open(level2_file, 'w') as f:
         f.write(
 """
     level3.cppcheck
 """)
 
     level1_file = tmp_path / 'level1.cppcheck'
-    with open(level1_file, 'wt') as f:
+    with open(level1_file, 'w') as f:
         f.write(
 """
     level2.cppcheck
 """)
 
-    args = ['--project={}'.format(level1_file)]
+    args = [f'--project={level1_file}']
     out_lines = [
         'cppcheck: error: nested Cppcheck GUI projects are not supported.'
     ]
@@ -1023,11 +1023,11 @@ def test_project_file_nested(tmp_path):
 
 def test_project_file_no_analyze_all_vs_configs(tmp_path):
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     project_path = tmp_path / 'project.cppcheck'
-    with open(project_path, 'wt') as f:
+    with open(project_path, 'w') as f:
         f.write(
 """
     false
@@ -1051,9 +1051,9 @@ def test_project_progress(tmp_path, j, executor):
         pytest.skip("process executor not supported on Windows")
 
     code = 'x = 1;'
-    with open(tmp_path / 'test1.c', 'wt') as f:
+    with open(tmp_path / 'test1.c', 'w') as f:
         f.write(code)
-    with open(tmp_path / 'test2.c', 'wt') as f:
+    with open(tmp_path / 'test2.c', 'w') as f:
         f.write(code)
 
     compilation_db = [
@@ -1069,7 +1069,7 @@ def test_project_progress(tmp_path, j, executor):
 
     project_file = tmp_path / 'compile_commands.json'
 
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(json.dumps(compilation_db))
 
     _, stdout, _ = cppcheck([f'--project={project_file}', f'-j{j}', f'--executor={executor}'])
diff --git a/test/cli/other_test.py b/test/cli/other_test.py
index 26dcca81ec1..0c312b67d5d 100644
--- a/test/cli/other_test.py
+++ b/test/cli/other_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest test-other.py
 
 import os
@@ -23,7 +22,7 @@ def __remove_verbose_log(l : list):
 
 def test_missing_include(tmpdir):  # #11283
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #include "test.h"
 """)
@@ -31,18 +30,18 @@ def test_missing_include(tmpdir):  # #11283
     args = ['--enable=missingInclude', '--template=simple', test_file]
 
     _, _, stderr = cppcheck(args)
-    assert stderr == '{}:2:2: information: Include file: "test.h" not found. [missingInclude]\n'.format(test_file)
+    assert stderr == f'{test_file}:2:2: information: Include file: "test.h" not found. [missingInclude]\n'
 
 
 def __test_missing_include_check_config(tmpdir, use_j):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 #include "test.h"
                 """)
 
     # TODO: -rp is not working requiring the full path in the assert
-    args = '--check-config -rp={} {}'.format(tmpdir, test_file)
+    args = f'--check-config -rp={tmpdir} {test_file}'
     if use_j:
         args = '-j2 ' + args
 
@@ -60,7 +59,7 @@ def test_missing_include_check_config_j(tmpdir):
 
 def test_missing_include_inline_suppr(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 // cppcheck-suppress missingInclude
                 #include "missing.h"
@@ -76,7 +75,7 @@ def test_missing_include_inline_suppr(tmpdir):
 
 def test_preprocessor_error(tmpdir):
     test_file = os.path.join(tmpdir, '10866.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('#error test\nx=1;\n')
     exitcode, _, stderr = cppcheck(['--error-exitcode=1', test_file])
     assert 'preprocessorErrorDirective' in stderr
@@ -92,7 +91,7 @@ def test_preprocessor_error(tmpdir):
 @pytest.mark.parametrize("env,color_expected", [({"CLICOLOR_FORCE":"1"}, True), ({"NO_COLOR": "1", "CLICOLOR_FORCE":"1"}, False)])
 def test_color_non_tty(tmpdir, env, color_expected):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('#error test\nx=1;\n')
     exitcode, stdout, stderr = cppcheck([test_file], env=env)
 
@@ -108,7 +107,7 @@ def test_color_non_tty(tmpdir, env, color_expected):
 @pytest.mark.parametrize("env,color_expected", [({}, True), ({"NO_COLOR": "1"}, False)])
 def test_color_tty(tmpdir, env, color_expected):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('#error test\nx=1;\n')
     exitcode, stdout, stderr = cppcheck([test_file], env=env, tty=True)
 
@@ -132,20 +131,20 @@ def test_invalid_library(tmpdir):
 
 def test_message_j(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("")
 
     args = ['-j2', test_file]
 
     _, stdout, _ = cppcheck(args)
-    assert stdout == "Checking {} ...\n".format(test_file) # we were adding stray \0 characters at the end
+    assert stdout == f"Checking {test_file} ...\n" # we were adding stray \0 characters at the end
 
 # TODO: test missing std.cfg
 
 
 def test_progress(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 int main(int argc)
                 {
@@ -159,7 +158,7 @@ def test_progress(tmpdir):
     pos = stdout.find('\n')
     assert(pos != -1)
     pos += 1
-    assert stdout[:pos] == "Checking {} ...\n".format(test_file)
+    assert stdout[:pos] == f"Checking {test_file} ...\n"
     assert (stdout[pos:] ==
             "progress: Tokenize (typedef) 0%\n"
             "progress: Tokenize (typedef) 12%\n"
@@ -236,7 +235,7 @@ def test_progress(tmpdir):
 
 def test_progress_j(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 int main(int argc)
                 {
@@ -247,13 +246,13 @@ def test_progress_j(tmpdir):
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0, stdout if stdout else stderr
-    assert stdout == "Checking {} ...\n".format(test_file)
+    assert stdout == f"Checking {test_file} ...\n"
     assert stderr == ""
 
 
 def test_execute_addon_failure_py_auto(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 void f();
                 """)
@@ -263,12 +262,12 @@ def test_execute_addon_failure_py_auto(tmpdir):
     # provide empty PATH environment variable so python is not found and execution of addon fails
     env = {'PATH': ''}
     _, _, stderr = cppcheck(args, env)
-    assert stderr == '{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to auto detect python [internalError]\n\n^\n'.format(test_file)
+    assert stderr == f'{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to auto detect python [internalError]\n\n^\n'
 
 
 def test_execute_addon_failure_py_notexist(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 void f();
                 """)
@@ -278,29 +277,29 @@ def test_execute_addon_failure_py_notexist(tmpdir):
 
     _, _, stderr = cppcheck(args)
     ec = 1 if os.name == 'nt' else 127
-    assert stderr == "{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'naming' - exitcode is {} [internalError]\n\n^\n".format(test_file, ec)
+    assert stderr == f"{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'naming' - exitcode is {ec} [internalError]\n\n^\n"
 
 
 def test_execute_addon_failure_json_notexist(tmpdir):
     # specify non-existent python executable so execution of addon fails
     addon_json = os.path.join(tmpdir, 'addon.json')
-    with open(addon_json, 'wt') as f:
+    with open(addon_json, 'w') as f:
         f.write(json.dumps({'executable': 'notexist'}))
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 void f();
                 """)
 
     args = [
-        '--addon={}'.format(addon_json),
+        f'--addon={addon_json}',
         test_file
     ]
 
     _, _, stderr = cppcheck(args)
     ec = 1 if os.name == 'nt' else 127
-    assert stderr == "{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon.json' - exitcode is {} [internalError]\n\n^\n".format(test_file, ec)
+    assert stderr == f"{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon.json' - exitcode is {ec} [internalError]\n\n^\n"
 
 
 @pytest.mark.skipif(sys.platform != "win32", reason="Windows specific issue")
@@ -309,20 +308,20 @@ def test_execute_addon_path_with_spaces(tmpdir):
     addon_dir = os.path.join(tmpdir, 'A Folder')
     addon_script = os.path.join(addon_dir, 'addon.bat')
 
-    with open(addon_json, 'wt') as f:
+    with open(addon_json, 'w') as f:
         f.write(json.dumps({'executable': addon_script }))
 
     os.makedirs(addon_dir, exist_ok=True)
 
-    with open(addon_script, 'wt') as f:
+    with open(addon_script, 'w') as f:
         f.write('@echo {"file":"1.c","linenr":1,"column":1,"severity":"error","message":"hello world","errorId":"hello","addon":"test"}')
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         pass
 
     args = [
-        '--addon={}'.format(addon_json),
+        f'--addon={addon_json}',
         test_file,
     ]
 
@@ -335,34 +334,34 @@ def test_execute_addon_path_with_spaces(tmpdir):
 def test_execute_addon_failure_json_ctu_notexist(tmpdir):
     # specify non-existent python executable so execution of addon fails
     addon_json = os.path.join(tmpdir, 'addon.json')
-    with open(addon_json, 'wt') as f:
+    with open(addon_json, 'w') as f:
         f.write(json.dumps({
             'executable': 'notexist',
             'ctu': True
         }))
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
                 void f(); """)
 
     args = [
         '--template=simple',
-        '--addon={}'.format(addon_json),
+        f'--addon={addon_json}',
         test_file
     ]
 
     _, _, stderr = cppcheck(args)
     ec = 1 if os.name == 'nt' else 127
     assert stderr.splitlines() == [
-        "{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon.json' - exitcode is {} [internalError]".format(test_file, ec),
-        ":0:0: error: Bailing out from analysis: Whole program analysis failed: Failed to execute addon 'addon.json' - exitcode is {} [internalError]".format(ec)
+        f"{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon.json' - exitcode is {ec} [internalError]",
+        f":0:0: error: Bailing out from analysis: Whole program analysis failed: Failed to execute addon 'addon.json' - exitcode is {ec} [internalError]"
     ]
 
 
 def test_execute_addon_file0(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo() {}\n')
 
     args = ['--xml', '--addon=misra', '--enable=style', test_file]
@@ -376,7 +375,7 @@ def test_execute_addon_file0(tmpdir):
 @pytest.mark.skip
 def test_internal_error(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #include 
 
@@ -390,13 +389,13 @@ def test_internal_error(tmpdir):
     args = [test_file]
 
     _, _, stderr = cppcheck(args)
-    assert stderr == '{}:0:0: error: Bailing from out analysis: Checking file failed: converting \'1f\' to integer failed - not an integer [internalError]\n\n^\n'.format(test_file)
+    assert stderr == f'{test_file}:0:0: error: Bailing from out analysis: Checking file failed: converting \'1f\' to integer failed - not an integer [internalError]\n\n^\n'
 
 
 def test_addon_ctu_exitcode(tmpdir):
     """ #12440 - Misra ctu violations found => exit code should be non-zero """
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""typedef enum { BLOCK =  0x80U, } E;""")
     args = ['--addon=misra', '--enable=style', '--error-exitcode=1', test_file]
     exitcode, _, stderr = cppcheck(args)
@@ -407,7 +406,7 @@ def test_addon_ctu_exitcode(tmpdir):
 # TODO: test with -j2
 def test_addon_misra(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 typedef int MISRA_5_6_VIOLATION;
         """)
@@ -418,15 +417,15 @@ def test_addon_misra(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:2:1: style: misra violation (use --rule-texts= to get proper output) [misra-c2012-2.3]\ntypedef int MISRA_5_6_VIOLATION;\n^\n'.format(test_file)
+    assert stderr == f'{test_file}:2:1: style: misra violation (use --rule-texts= to get proper output) [misra-c2012-2.3]\ntypedef int MISRA_5_6_VIOLATION;\n^\n'
 
 
 def test_addon_y2038(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
     # TODO: trigger warning
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 extern void f()
 {
@@ -441,14 +440,14 @@ def test_addon_y2038(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:4:21: warning: time is Y2038-unsafe [y2038-unsafe-call]\n'.format(test_file)
+    assert stderr == f'{test_file}:4:21: warning: time is Y2038-unsafe [y2038-unsafe-call]\n'
 
 
 def test_addon_threadsafety(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 extern const char* f()
 {
@@ -462,15 +461,15 @@ def test_addon_threadsafety(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:4:12: warning: strerror is MT-unsafe [threadsafety-unsafe-call]\n'.format(test_file)
+    assert stderr == f'{test_file}:4:12: warning: strerror is MT-unsafe [threadsafety-unsafe-call]\n'
 
 
 def test_addon_naming(tmpdir):
     # the addon does nothing without a config
     addon_file = os.path.join(tmpdir, 'naming1.json')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 {
     "script": "addons/naming.py",
@@ -481,26 +480,26 @@ def test_addon_naming(tmpdir):
                 """)
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 int Var;
         """)
 
-    args = ['--addon={}'.format(addon_file), '--enable=all', '--disable=unusedFunction', '--template=simple', test_file]
+    args = [f'--addon={addon_file}', '--enable=all', '--disable=unusedFunction', '--template=simple', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:2:1: style: Variable Var violates naming convention [naming-varname]\n'.format(test_file)
+    assert stderr == f'{test_file}:2:1: style: Variable Var violates naming convention [naming-varname]\n'
 
 
 def test_addon_namingng(tmpdir):
     addon_file = os.path.join(tmpdir, 'namingng.json')
     addon_config_file = os.path.join(tmpdir, 'namingng.config.json')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 {
     "script": "addons/namingng.py",
@@ -510,7 +509,7 @@ def test_addon_namingng(tmpdir):
 }
                 """%(addon_config_file).replace('\\','\\\\'))
 
-    with open(addon_config_file, 'wt') as f:
+    with open(addon_config_file, 'w') as f:
         f.write("""
 {
     "RE_FILE": [
@@ -544,14 +543,14 @@ def test_addon_namingng(tmpdir):
 
     test_unguarded_include_file_basename = 'test_unguarded.h'
     test_unguarded_include_file = os.path.join(tmpdir, test_unguarded_include_file_basename)
-    with open(test_unguarded_include_file, 'wt') as f:
+    with open(test_unguarded_include_file, 'w') as f:
         f.write("""
 void InvalidFunctionUnguarded();
 """)
 
     test_include_file_basename = '_test.h'
     test_include_file = os.path.join(tmpdir, test_include_file_basename)
-    with open(test_include_file, 'wt') as f:
+    with open(test_include_file, 'w') as f:
         f.write("""
 #ifndef TEST_H
 #define TEST_H
@@ -566,7 +565,7 @@ def test_addon_namingng(tmpdir):
 
     test_file_basename = 'test_.cpp'
     test_file = os.path.join(tmpdir, test_file_basename)
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #include "%s"
 
@@ -603,69 +602,69 @@ class _clz {
     assert exitcode == 0, stdout if stdout else stderr
     lines = __remove_verbose_log(stdout.splitlines())
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     lines = [line for line in stderr.splitlines() if line != '']
     expect = [
-        '{}:0:0: style: File name {} violates naming convention [namingng-namingConvention]'.format(test_include_file,test_include_file_basename),
+        f'{test_include_file}:0:0: style: File name {test_include_file_basename} violates naming convention [namingng-namingConvention]',
         '^',
-        '{}:2:9: style: include guard naming violation; TEST_H != _TEST_H [namingng-includeGuardName]'.format(test_include_file),
+        f'{test_include_file}:2:9: style: include guard naming violation; TEST_H != _TEST_H [namingng-includeGuardName]',
         '#ifndef TEST_H',
         '        ^',
-        '{}:5:6: style: Function InvalidFunction violates naming convention [namingng-namingConvention]'.format(test_include_file),
+        f'{test_include_file}:5:6: style: Function InvalidFunction violates naming convention [namingng-namingConvention]',
         'void InvalidFunction();',
         '     ^',
-        '{}:6:12: style: Global variable _invalid_extern_global violates naming convention [namingng-namingConvention]'.format(test_include_file),
+        f'{test_include_file}:6:12: style: Global variable _invalid_extern_global violates naming convention [namingng-namingConvention]',
         'extern int _invalid_extern_global;',
         '           ^',
 
-        '{}:0:0: style: File name {} violates naming convention [namingng-namingConvention]'.format(test_unguarded_include_file,test_unguarded_include_file_basename),
+        f'{test_unguarded_include_file}:0:0: style: File name {test_unguarded_include_file_basename} violates naming convention [namingng-namingConvention]',
         '^',
-        '{}:0:0: style: Missing include guard [namingng-includeGuardMissing]'.format(test_unguarded_include_file),
+        f'{test_unguarded_include_file}:0:0: style: Missing include guard [namingng-includeGuardMissing]',
         '^',
-        '{}:2:6: style: Function InvalidFunctionUnguarded violates naming convention [namingng-namingConvention]'.format(test_unguarded_include_file),
+        f'{test_unguarded_include_file}:2:6: style: Function InvalidFunctionUnguarded violates naming convention [namingng-namingConvention]',
         'void InvalidFunctionUnguarded();',
         '     ^',
 
-        '{}:0:0: style: File name {} violates naming convention [namingng-namingConvention]'.format(test_file,test_file_basename),
+        f'{test_file}:0:0: style: File name {test_file_basename} violates naming convention [namingng-namingConvention]',
         '^',
-        '{}:7:26: style: Variable _invalid_arg violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:7:26: style: Variable _invalid_arg violates naming convention [namingng-namingConvention]',
         'void valid_function2(int _invalid_arg);',
         '                         ^',
-        '{}:8:26: style: Variable invalid_arg_ violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:8:26: style: Variable invalid_arg_ violates naming convention [namingng-namingConvention]',
         'void valid_function3(int invalid_arg_);',
         '                         ^',
-        '{}:10:31: style: Variable invalid_arg32 violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:10:31: style: Variable invalid_arg32 violates naming convention [namingng-namingConvention]',
         'void valid_function5(uint32_t invalid_arg32);',
         '                              ^',
-        '{}:4:6: style: Function invalid_function_ violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:4:6: style: Function invalid_function_ violates naming convention [namingng-namingConvention]',
         'void invalid_function_();',
         '     ^',
-        '{}:5:6: style: Function _invalid_function violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:5:6: style: Function _invalid_function violates naming convention [namingng-namingConvention]',
         'void _invalid_function();',
         '     ^',
-        '{}:12:10: style: Function invalid_function7 violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:12:10: style: Function invalid_function7 violates naming convention [namingng-namingConvention]',
         'uint16_t invalid_function7(int valid_arg);',
         '         ^',
-        '{}:15:5: style: Global variable _invalid_global violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:15:5: style: Global variable _invalid_global violates naming convention [namingng-namingConvention]',
         'int _invalid_global;',
         '    ^',
-        '{}:16:12: style: Global variable _invalid_static_global violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:16:12: style: Global variable _invalid_static_global violates naming convention [namingng-namingConvention]',
         'static int _invalid_static_global;',
         '           ^',
-        '{}:20:5: style: Class Constructor _clz violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:20:5: style: Class Constructor _clz violates naming convention [namingng-namingConvention]',
         '    _clz() : _invalid_public(0), _invalid_private(0), priv_good(0), priv_bad_tmp(0) { }',
         '    ^',
-        '{}:21:9: style: Public member variable _invalid_public violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:21:9: style: Public member variable _invalid_public violates naming convention [namingng-namingConvention]',
         '    int _invalid_public;',
         '        ^',
-        '{}:23:10: style: Private member variable _invalid_private violates naming convention: required prefix priv_ missing [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:23:10: style: Private member variable _invalid_private violates naming convention: required prefix priv_ missing [namingng-namingConvention]',
         '    char _invalid_private;',
         '         ^',
-        '{}:25:9: style: Private member variable priv_bad_tmp violates naming convention: illegal suffix _tmp [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:25:9: style: Private member variable priv_bad_tmp violates naming convention: illegal suffix _tmp [namingng-namingConvention]',
         '    int priv_bad_tmp;',
         '        ^',
-        '{}:28:11: style: Namespace _invalid_namespace violates naming convention [namingng-namingConvention]'.format(test_file),
+        f'{test_file}:28:11: style: Namespace _invalid_namespace violates naming convention [namingng-namingConvention]',
         'namespace _invalid_namespace { }',
         '          ^',
     ]
@@ -679,7 +678,7 @@ class _clz {
 def test_addon_namingng_config(tmpdir):
     addon_file = os.path.join(tmpdir, 'namingng.json')
     addon_config_file = os.path.join(tmpdir, 'namingng.config.json')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 {
     "script": "addons/namingng.py",
@@ -689,7 +688,7 @@ def test_addon_namingng_config(tmpdir):
 }
                 """%(addon_config_file).replace('\\','\\\\'))
 
-    with open(addon_config_file, 'wt') as f:
+    with open(addon_config_file, 'w') as f:
         f.write("""
 {
     "RE_FILE": "[^/]*[a-z][a-z0-9_]*[a-z0-9]\\.c\\Z",
@@ -732,7 +731,7 @@ def test_addon_namingng_config(tmpdir):
 
     lines = __remove_verbose_log(stdout.splitlines())
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     lines = stderr.splitlines()
     # ignore the first line, stating that the addon failed to run properly
@@ -762,7 +761,7 @@ def test_addon_namingng_config(tmpdir):
 
 def test_addon_findcasts(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
         extern void f(char c)
         {
@@ -777,14 +776,14 @@ def test_addon_findcasts(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:4:21: information: found a cast [findcasts-cast]\n'.format(test_file)
+    assert stderr == f'{test_file}:4:21: information: found a cast [findcasts-cast]\n'
 
 
 def test_addon_misc(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 extern void f()
 {
@@ -798,77 +797,77 @@ def test_addon_misc(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == '{}:4:28: style: String concatenation in array initialization, missing comma? [misc-stringConcatInArrayInit]\n'.format(test_file)
+    assert stderr == f'{test_file}:4:28: style: String concatenation in array initialization, missing comma? [misc-stringConcatInArrayInit]\n'
 
 
 def test_invalid_addon_json(tmpdir):
     addon_file = os.path.join(tmpdir, 'addon1.json')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
                 """)
 
     test_file = os.path.join(tmpdir, 'file.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
-    args = ['--addon={}'.format(addon_file), test_file]
+    args = [f'--addon={addon_file}', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 1
     lines = stdout.splitlines()
     assert lines == [
-        'Loading {} failed. syntax error at line 2 near: '.format(addon_file)
+        f'Loading {addon_file} failed. syntax error at line 2 near: '
     ]
     assert stderr == ''
 
 
 def test_invalid_addon_py(tmpdir):
     addon_file = os.path.join(tmpdir, 'addon1.py')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 raise Exception()
                 """)
 
     test_file = os.path.join(tmpdir, 'file.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 typedef int MISRA_5_6_VIOLATION;
                 """)
 
-    args = ['--addon={}'.format(addon_file), '--enable=all', '--disable=unusedFunction', test_file]
+    args = [f'--addon={addon_file}', '--enable=all', '--disable=unusedFunction', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0  # TODO: needs to be 1
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
-    assert stderr == "{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 1 [internalError]\n\n^\n".format(test_file)
+    assert stderr == f"{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 1 [internalError]\n\n^\n"
 
 
 # TODO: test with -j2
 def test_invalid_addon_py_verbose(tmpdir):
     addon_file = os.path.join(tmpdir, 'addon1.py')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 raise Exception()
                 """)
 
     test_file = os.path.join(tmpdir, 'file.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 typedef int MISRA_5_6_VIOLATION;
                 """)
 
-    args = ['--addon={}'.format(addon_file), '--enable=all', '--disable=unusedFunction', '--verbose', '-j1', test_file]
+    args = [f'--addon={addon_file}', '--enable=all', '--disable=unusedFunction', '--verbose', '-j1', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0  # TODO: needs to be 1
     lines = __remove_verbose_log(stdout.splitlines())
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     """
 /tmp/pytest-of-user/pytest-11/test_invalid_addon_py_20/file.cpp:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 1: python3 /home/user/CLionProjects/cppcheck/addons/runaddon.py /tmp/pytest-of-user/pytest-11/test_invalid_addon_py_20/addon1.py --cli /tmp/pytest-of-user/pytest-11/test_invalid_addon_py_20/file.cpp.24762.dump
@@ -884,14 +883,14 @@ def test_invalid_addon_py_verbose(tmpdir):
 Exceptio [internalError]
     """
     # /tmp/pytest-of-user/pytest-10/test_invalid_addon_py_20/file.cpp:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 256.: python3 /home/user/CLionProjects/cppcheck/addons/runaddon.py /tmp/pytest-of-user/pytest-10/test_invalid_addon_py_20/addon1.py --cli /tmp/pytest-of-user/pytest-10/test_invalid_addon_py_20/file.cpp.24637.dump
-    assert stderr.startswith("{}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 1: ".format(test_file))
+    assert stderr.startswith(f"{test_file}:0:0: error: Bailing out from analysis: Checking file failed: Failed to execute addon 'addon1' - exitcode is 1: ")
     assert stderr.count('Output:\nTraceback')
     assert stderr.endswith('raise Exception()\nException [internalError]\n\n^\n')
 
 
 def test_addon_result(tmpdir):
     addon_file = os.path.join(tmpdir, 'addon1.py')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write("""
 print("Checking ...")
 print("")
@@ -900,18 +899,18 @@ def test_addon_result(tmpdir):
                 """)
 
     test_file = os.path.join(tmpdir, 'file.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 typedef int MISRA_5_6_VIOLATION;
                 """)
 
-    args = ['--addon={}'.format(addon_file), '--enable=all', '--disable=unusedFunction', test_file]
+    args = [f'--addon={addon_file}', '--enable=all', '--disable=unusedFunction', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0  # TODO: needs to be 1
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     assert stderr == 'test.cpp:1:1: style: msg [addon1-id]\n\n^\n'
 
@@ -920,13 +919,13 @@ def test_addon_result(tmpdir):
 # #11483
 def __test_unused_function_include(tmpdir, extra_args):
     test_cpp_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_cpp_file, 'wt') as f:
+    with open(test_cpp_file, 'w') as f:
         f.write("""
                 #include "test.h"
                 """)
 
     test_h_file = os.path.join(tmpdir, 'test.h')
-    with open(test_h_file, 'wt') as f:
+    with open(test_h_file, 'w') as f:
         f.write("""
                 class A {
                 public:
@@ -947,7 +946,7 @@ class A {
     args += extra_args
 
     _, _, stderr = cppcheck(args)
-    assert stderr == "{}:4:26: style: The function 'f' is never used. [unusedFunction]\n".format(test_h_file)
+    assert stderr == f"{test_h_file}:4:26: style: The function 'f' is never used. [unusedFunction]\n"
 
 
 def test_unused_function_include(tmpdir):
@@ -960,7 +959,7 @@ def test_unused_function_include(tmpdir):
 # TODO: test with multiple files
 def __test_showtime(tmp_path, showtime, exp_res, exp_last, extra_args=None):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """
 void f()
@@ -1045,7 +1044,7 @@ def test_showtime_file_total(tmp_path):
 
 def test_showtime_unique(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """
 void f()
@@ -1091,12 +1090,12 @@ def test_missing_addon(tmpdir):
 
 def test_file_filter(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     args = ['--file-filter=*.cpp', test_file]
     out_lines = [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -1104,15 +1103,15 @@ def test_file_filter(tmpdir):
 
 def test_file_filter_2(tmpdir):
     test_file_1 = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
     test_file_2 = os.path.join(tmpdir, 'test.c')
-    with open(test_file_2, 'wt'):
+    with open(test_file_2, 'w'):
         pass
 
     args = ['--file-filter=*.cpp', test_file_1, test_file_2]
     out_lines = [
-        'Checking {} ...'.format(test_file_1)
+        f'Checking {test_file_1} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -1120,15 +1119,15 @@ def test_file_filter_2(tmpdir):
 
 def test_file_filter_3(tmpdir):
     test_file_1 = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
     test_file_2 = os.path.join(tmpdir, 'test.c')
-    with open(test_file_2, 'wt'):
+    with open(test_file_2, 'w'):
         pass
 
     args = ['--file-filter=*.c', test_file_1, test_file_2]
     out_lines = [
-        'Checking {} ...'.format(test_file_2)
+        f'Checking {test_file_2} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -1136,7 +1135,7 @@ def test_file_filter_3(tmpdir):
 
 def test_file_filter_no_match(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     args = ['--file-filter=*.c', test_file]
@@ -1149,16 +1148,16 @@ def test_file_filter_no_match(tmpdir):
 
 def test_file_order(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
     test_file_b = os.path.join(tmpdir, 'b.c')
-    with open(test_file_b, 'wt'):
+    with open(test_file_b, 'w'):
         pass
     test_file_c = os.path.join(tmpdir, 'c.c')
-    with open(test_file_c, 'wt'):
+    with open(test_file_c, 'w'):
         pass
     test_file_d = os.path.join(tmpdir, 'd.c')
-    with open(test_file_d, 'wt'):
+    with open(test_file_d, 'w'):
         pass
 
     args = [test_file_c, test_file_d, test_file_b, test_file_a, '-j1']
@@ -1167,13 +1166,13 @@ def test_file_order(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_c),
+        f'Checking {test_file_c} ...',
         '1/4 files checked 25% done',
-        'Checking {} ...'.format(test_file_d),
+        f'Checking {test_file_d} ...',
         '2/4 files checked 50% done',
-        'Checking {} ...'.format(test_file_b),
+        f'Checking {test_file_b} ...',
         '3/4 files checked 75% done',
-        'Checking {} ...'.format(test_file_a),
+        f'Checking {test_file_a} ...',
         '4/4 files checked 100% done'
     ]
     assert stderr == ''
@@ -1181,27 +1180,27 @@ def test_file_order(tmpdir):
 
 def test_markup(tmpdir):
     test_file_1 = os.path.join(tmpdir, 'test_1.qml')
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
     test_file_2 = os.path.join(tmpdir, 'test_2.cpp')
-    with open(test_file_2, 'wt'):
+    with open(test_file_2, 'w'):
         pass
     test_file_3 = os.path.join(tmpdir, 'test_3.qml')
-    with open(test_file_3, 'wt'):
+    with open(test_file_3, 'w'):
         pass
     test_file_4 = os.path.join(tmpdir, 'test_4.cpp')
-    with open(test_file_4, 'wt'):
+    with open(test_file_4, 'w'):
         pass
 
     args = ['--library=qt', test_file_1, test_file_2, test_file_3, test_file_4, '-j1']
     out_lines = [
-        'Checking {} ...'.format(test_file_2),
+        f'Checking {test_file_2} ...',
         '1/4 files checked 25% done',
-        'Checking {} ...'.format(test_file_4),
+        f'Checking {test_file_4} ...',
         '2/4 files checked 50% done',
-        'Checking {} ...'.format(test_file_1),
+        f'Checking {test_file_1} ...',
         '3/4 files checked 75% done',
-        'Checking {} ...'.format(test_file_3),
+        f'Checking {test_file_3} ...',
         '4/4 files checked 100% done'
     ]
 
@@ -1210,16 +1209,16 @@ def test_markup(tmpdir):
 
 def test_markup_j(tmpdir):
     test_file_1 = os.path.join(tmpdir, 'test_1.qml')
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
     test_file_2 = os.path.join(tmpdir, 'test_2.cpp')
-    with open(test_file_2, 'wt'):
+    with open(test_file_2, 'w'):
         pass
     test_file_3 = os.path.join(tmpdir, 'test_3.qml')
-    with open(test_file_3, 'wt'):
+    with open(test_file_3, 'w'):
         pass
     test_file_4 = os.path.join(tmpdir, 'test_4.cpp')
-    with open(test_file_4, 'wt'):
+    with open(test_file_4, 'w'):
         pass
 
     args = ['--library=qt', '-j2', test_file_1, test_file_2, test_file_3, test_file_4]
@@ -1233,17 +1232,17 @@ def test_markup_j(tmpdir):
         '2/4 files checked 50% done',
         '3/4 files checked 75% done',
         '4/4 files checked 100% done',
-        'Checking {} ...'.format(test_file_1),
-        'Checking {} ...'.format(test_file_2),
-        'Checking {} ...'.format(test_file_3),
-        'Checking {} ...'.format(test_file_4)
+        f'Checking {test_file_1} ...',
+        f'Checking {test_file_2} ...',
+        f'Checking {test_file_3} ...',
+        f'Checking {test_file_4} ...'
     ]
     assert stderr == ''
 
 
 def test_valueflow_debug(tmpdir):
     test_file_cpp = os.path.join(tmpdir, 'test_1.cpp')
-    with open(test_file_cpp, 'wt') as f:
+    with open(test_file_cpp, 'w') as f:
         f.write("""
 #include "test.h"
 
@@ -1254,7 +1253,7 @@ def test_valueflow_debug(tmpdir):
 """
                 )
     test_file_h = os.path.join(tmpdir, 'test.h')
-    with open(test_file_h, 'wt') as f:
+    with open(test_file_h, 'w') as f:
         f.write("""
 #include "test2.h"
 inline void f1()
@@ -1264,7 +1263,7 @@ def test_valueflow_debug(tmpdir):
 """
                 )
     test_file_h_2 = os.path.join(tmpdir, 'test2.h')
-    with open(test_file_h_2, 'wt') as f:
+    with open(test_file_h_2, 'w') as f:
         f.write("""
 inline void f2()
 {
@@ -1327,7 +1326,7 @@ def test_valueflow_debug(tmpdir):
 
 def test_file_duplicate(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     args = [test_file_a, test_file_a, str(tmpdir)]
@@ -1336,20 +1335,20 @@ def test_file_duplicate(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_a)
+        f'Checking {test_file_a} ...'
     ]
     assert stderr == ''
 
 
 def test_file_duplicate_2(tmpdir):
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
     test_file_b = os.path.join(tmpdir, 'b.c')
-    with open(test_file_b, 'wt'):
+    with open(test_file_b, 'w'):
         pass
     test_file_c = os.path.join(tmpdir, 'c.c')
-    with open(test_file_c, 'wt'):
+    with open(test_file_c, 'w'):
         pass
 
     args = [test_file_c, test_file_a, test_file_b, str(tmpdir), test_file_b, test_file_c, test_file_a, str(tmpdir), '-j1']
@@ -1358,11 +1357,11 @@ def test_file_duplicate_2(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file_c),
+        f'Checking {test_file_c} ...',
         '1/3 files checked 33% done',
-        'Checking {} ...'.format(test_file_a),
+        f'Checking {test_file_a} ...',
         '2/3 files checked 66% done',
-        'Checking {} ...'.format(test_file_b),
+        f'Checking {test_file_b} ...',
         '3/3 files checked 100% done'
     ]
     assert stderr == ''
@@ -1370,7 +1369,7 @@ def test_file_duplicate_2(tmpdir):
 
 def test_file_duplicate_3(tmpdir):  # #12834
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     # multiple ways to specify the same file
@@ -1395,7 +1394,7 @@ def test_file_duplicate_3(tmpdir):  # #12834
 @pytest.mark.skipif(sys.platform != 'win32', reason="requires Windows")
 def test_file_duplicate_4(tmpdir):  # #12834
     test_file_a = os.path.join(tmpdir, 'a.c')
-    with open(test_file_a, 'wt'):
+    with open(test_file_a, 'w'):
         pass
 
     # multiple ways to specify the same file
@@ -1423,7 +1422,7 @@ def test_file_duplicate_4(tmpdir):  # #12834
 
 def test_file_ignore(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     args = ['-itest.cpp', test_file]
@@ -1440,12 +1439,12 @@ def test_build_dir_j_memleak(tmpdir): #12111
     os.mkdir(build_dir)
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('int main() {}')
 
-    args = ['--cppcheck-build-dir={}'.format(build_dir), '-j2', test_file]
+    args = [f'--cppcheck-build-dir={build_dir}', '-j2', test_file]
     out_lines = [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines)
@@ -1453,21 +1452,21 @@ def test_build_dir_j_memleak(tmpdir): #12111
 
 def __test_addon_json_invalid(tmpdir, addon_json, expected):
     addon_file = os.path.join(tmpdir, 'invalid.json')
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write(addon_json)
 
     test_file = os.path.join(tmpdir, 'file.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
-    args = ['--addon={}'.format(addon_file), test_file]
+    args = [f'--addon={addon_file}', test_file]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 1
     lines = stdout.splitlines()
     assert len(lines) == 1
     assert lines == [
-        'Loading {} failed. {}'.format(addon_file, expected)
+        f'Loading {addon_file} failed. {expected}'
     ]
     assert stderr == ''
 
@@ -1506,7 +1505,7 @@ def test_addon_json_invalid_script_2(tmpdir):
 
 def test_unknown_extension(tmpdir):
     test_file = os.path.join(tmpdir, 'test_2')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 void f() { }
 ''')
@@ -1519,7 +1518,7 @@ def test_unknown_extension(tmpdir):
 
 def test_rule_file_define_multiple(tmpdir):
     rule_file = os.path.join(tmpdir, 'rule_file.xml')
-    with open(rule_file, 'wt') as f:
+    with open(rule_file, 'w') as f:
         f.write("""
 
     
@@ -1542,32 +1541,32 @@ def test_rule_file_define_multiple(tmpdir):
 """)
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 #define DEF_1
 #define DEF_2
 void f() { }
 ''')
 
-    exitcode, stdout, stderr = cppcheck(['--template=simple', '--rule-file={}'.format(rule_file), '-DDEF_3', test_file])
+    exitcode, stdout, stderr = cppcheck(['--template=simple', f'--rule-file={rule_file}', '-DDEF_3', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: DEF_1',
         'Processing rule: DEF_2',
-        'Checking {}: DEF_3=1...'.format(test_file)
+        f'Checking {test_file}: DEF_3=1...'
     ]
     lines = stderr.splitlines()
     assert lines == [
-        "{}:2:0: error: found 'DEF_1' [ruleId1]".format(test_file),
-        "{}:3:0: error: define2 [ruleId2]".format(test_file)
+        f"{test_file}:2:0: error: found 'DEF_1' [ruleId1]",
+        f"{test_file}:3:0: error: define2 [ruleId2]"
     ]
 
 
 def test_rule_file_define(tmpdir):
     rule_file = os.path.join(tmpdir, 'rule_file.xml')
-    with open(rule_file, 'wt') as f:
+    with open(rule_file, 'w') as f:
         f.write("""
 
     define
@@ -1576,31 +1575,31 @@ def test_rule_file_define(tmpdir):
 """)
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 #define DEF_1
 #define DEF_2
 void f() { }
 ''')
 
-    exitcode, stdout, stderr = cppcheck(['--template=simple', '--rule-file={}'.format(rule_file), '-DDEF_3', test_file])
+    exitcode, stdout, stderr = cppcheck(['--template=simple', f'--rule-file={rule_file}', '-DDEF_3', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: DEF_.',
-        'Checking {}: DEF_3=1...'.format(test_file)
+        f'Checking {test_file}: DEF_3=1...'
     ]
     lines = stderr.splitlines()
     assert lines == [
-        "{}:2:0: style: found 'DEF_1' [rule]".format(test_file),
-        "{}:3:0: style: found 'DEF_2' [rule]".format(test_file)
+        f"{test_file}:2:0: style: found 'DEF_1' [rule]",
+        f"{test_file}:3:0: style: found 'DEF_2' [rule]"
     ]
 
 
 def test_rule_file_normal(tmpdir):
     rule_file = os.path.join(tmpdir, 'rule_file.xml')
-    with open(rule_file, 'wt') as f:
+    with open(rule_file, 'w') as f:
         f.write("""
 
     int
@@ -1608,7 +1607,7 @@ def test_rule_file_normal(tmpdir):
 """)
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 #define DEF_1
 #define DEF_2
@@ -1616,22 +1615,22 @@ def test_rule_file_normal(tmpdir):
 void f(i32) { }
 ''')
 
-    exitcode, stdout, stderr = cppcheck(['--template=simple', '--rule-file={}'.format(rule_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--template=simple', f'--rule-file={rule_file}', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: int',
     ]
     lines = stderr.splitlines()
     assert lines == [
-        "{}:5:0: style: found 'int' [rule]".format(test_file)
+        f"{test_file}:5:0: style: found 'int' [rule]"
     ]
 
 
 def test_rule_file_raw(tmpdir):
     rule_file = os.path.join(tmpdir, 'rule_file.xml')
-    with open(rule_file, 'wt') as f:
+    with open(rule_file, 'w') as f:
         f.write("""
 
     raw
@@ -1640,7 +1639,7 @@ def test_rule_file_raw(tmpdir):
 """)
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 #define DEF_1
 #define DEF_2
@@ -1648,23 +1647,23 @@ def test_rule_file_raw(tmpdir):
 void f(i32) { }
 ''')
 
-    exitcode, stdout, stderr = cppcheck(['--template=simple', '--rule-file={}'.format(rule_file), test_file])
+    exitcode, stdout, stderr = cppcheck(['--template=simple', f'--rule-file={rule_file}', test_file])
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: i32',
     ]
     lines = stderr.splitlines()
     assert lines == [
-        "{}:4:0: style: found 'i32' [rule]".format(test_file),
-        "{}:5:0: style: found 'i32' [rule]".format(test_file)
+        f"{test_file}:4:0: style: found 'i32' [rule]",
+        f"{test_file}:5:0: style: found 'i32' [rule]"
     ]
 
 
 def test_rule(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('''
 #define DEF_1
 #define DEF_2
@@ -1675,12 +1674,12 @@ def test_rule(tmpdir):
     assert exitcode == 0, stdout if stdout else stderr
     lines = stdout.splitlines()
     assert lines == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: f',
     ]
     lines = stderr.splitlines()
     assert lines == [
-        "{}:4:0: style: found 'f' [rule]".format(test_file)
+        f"{test_file}:4:0: style: found 'f' [rule]"
     ]
 
 
@@ -1688,8 +1687,8 @@ def test_rule_multiple_files(tmpdir):
     stderr_exp = []
     for i in range(10):
         test_file = os.path.join(tmpdir, f'test_{i}.c')
-        stderr_exp.append("{}:4:0: style: found 'f' [rule]".format(test_file))
-        with open(test_file, 'wt') as f:
+        stderr_exp.append(f"{test_file}:4:0: style: found 'f' [rule]")
+        with open(test_file, 'w') as f:
             f.write('''
 #define DEF_1
 #define DEF_2
@@ -1709,44 +1708,44 @@ def test_filelist(tmpdir):
     list_dir = os.path.join(tmpdir, 'list-dir')
     os.mkdir(list_dir)
 
-    with open(os.path.join(list_dir, 'aaa.c'), 'wt'):
+    with open(os.path.join(list_dir, 'aaa.c'), 'w'):
         pass
-    with open(os.path.join(list_dir, 'zzz.c'), 'wt'):
+    with open(os.path.join(list_dir, 'zzz.c'), 'w'):
         pass
-    with open(os.path.join(list_dir, 'valueflow.cpp'), 'wt'):
+    with open(os.path.join(list_dir, 'valueflow.cpp'), 'w'):
         pass
-    with open(os.path.join(list_dir, 'vfvalue.cpp'), 'wt'):
+    with open(os.path.join(list_dir, 'vfvalue.cpp'), 'w'):
         pass
-    with open(os.path.join(list_dir, 'vf_enumvalue.cpp'), 'wt'):
+    with open(os.path.join(list_dir, 'vf_enumvalue.cpp'), 'w'):
         pass
-    with open(os.path.join(list_dir, 'vf_analyze.h'), 'wt'):
+    with open(os.path.join(list_dir, 'vf_analyze.h'), 'w'):
         pass
 
     sub_dir_1 = os.path.join(list_dir, 'valueflow')
     os.mkdir(sub_dir_1)
-    with open(os.path.join(sub_dir_1, 'file.cpp'), 'wt'):
+    with open(os.path.join(sub_dir_1, 'file.cpp'), 'w'):
         pass
-    with open(os.path.join(sub_dir_1, 'file.c'), 'wt'):
+    with open(os.path.join(sub_dir_1, 'file.c'), 'w'):
         pass
-    with open(os.path.join(sub_dir_1, 'file.h'), 'wt'):
+    with open(os.path.join(sub_dir_1, 'file.h'), 'w'):
         pass
 
     sub_dir_2 = os.path.join(list_dir, 'vfvalue')
     os.mkdir(sub_dir_2)
-    with open(os.path.join(sub_dir_2, 'file.cpp'), 'wt'):
+    with open(os.path.join(sub_dir_2, 'file.cpp'), 'w'):
         pass
-    with open(os.path.join(sub_dir_2, 'file.c'), 'wt'):
+    with open(os.path.join(sub_dir_2, 'file.c'), 'w'):
         pass
-    with open(os.path.join(sub_dir_2, 'file.h'), 'wt'):
+    with open(os.path.join(sub_dir_2, 'file.h'), 'w'):
         pass
 
     sub_dir_3 = os.path.join(list_dir, 'vf_enumvalue')
     os.mkdir(sub_dir_3)
-    with open(os.path.join(sub_dir_3, 'file.cpp'), 'wt'):
+    with open(os.path.join(sub_dir_3, 'file.cpp'), 'w'):
         pass
-    with open(os.path.join(sub_dir_3, 'file.c'), 'wt'):
+    with open(os.path.join(sub_dir_3, 'file.c'), 'w'):
         pass
-    with open(os.path.join(sub_dir_3, 'file.h'), 'wt'):
+    with open(os.path.join(sub_dir_3, 'file.h'), 'w'):
         pass
 
     # TODO: -rp is not applied to "Checking" messages
@@ -1771,16 +1770,16 @@ def test_filelist(tmpdir):
     ]
     assert len(expected), len(lines)
     for i in range(1, len(expected)+1):
-        lines.remove('{}/{} files checked {}% done'.format(i, len(expected), int(100 * i // len(expected))))
+        lines.remove(f'{i}/{len(expected)} files checked {int(100 * i // len(expected))}% done')
     assert lines == expected
 
 
 def test_markup_lang(tmpdir):
     test_file_1 = os.path.join(tmpdir, 'test_1.qml')
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
     test_file_2 = os.path.join(tmpdir, 'test_2.cpp')
-    with open(test_file_2, 'wt'):
+    with open(test_file_2, 'w'):
         pass
 
     # do not assert processing markup file with enforced language
@@ -1799,7 +1798,7 @@ def test_markup_lang(tmpdir):
 
 def test_cpp_probe(tmpdir):
     test_file = os.path.join(tmpdir, 'test.h')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.writelines([
             'class A {};'
         ])
@@ -1813,13 +1812,13 @@ def test_cpp_probe(tmpdir):
     lines = stderr.splitlines()
     assert lines == [
         # TODO: fix that awkward format
-        "{}:1:1: error: Code 'classA{{' is invalid C code.: Use --std, -x or --language to enforce C++. Or --cpp-header-probe to identify C++ headers via the Emacs marker. [syntaxError]".format(test_file)
+        f"{test_file}:1:1: error: Code 'classA{{' is invalid C code.: Use --std, -x or --language to enforce C++. Or --cpp-header-probe to identify C++ headers via the Emacs marker. [syntaxError]"
     ]
 
 
 def test_cpp_probe_2(tmpdir):
     test_file = os.path.join(tmpdir, 'test.h')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.writelines([
             '// -*- C++ -*-',
             'class A {};'
@@ -1838,11 +1837,11 @@ def test_config_invalid(tmpdir):
     shutil.copytree(os.path.join(os.path.dirname(__lookup_cppcheck_exe()), 'cfg'), os.path.join(tmpdir, 'cfg'))
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     config_file = os.path.join(tmpdir, 'cppcheck.cfg')
-    with open(config_file, 'wt'):
+    with open(config_file, 'w'):
         pass
 
     exitcode, stdout, stderr, exe = cppcheck_ex([test_file], cwd=tmpdir, cppcheck_exe=exe)
@@ -1860,11 +1859,11 @@ def test_config_override(tmpdir):
     shutil.copytree(os.path.join(os.path.dirname(__lookup_cppcheck_exe()), 'cfg'), os.path.join(tmpdir, 'cfg'))
 
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     config_file = os.path.join(tmpdir, 'cppcheck.cfg')
-    with open(config_file, 'wt') as f:
+    with open(config_file, 'w') as f:
         f.write(json.dumps({
             'safety': False
         }))
@@ -1877,7 +1876,7 @@ def test_config_override(tmpdir):
 
 def test_checkers_report(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('x=1;')
     checkers_report = os.path.join(tmpdir, 'r.txt')
     exitcode, stdout, stderr = cppcheck(['--enable=all', '--checkers-report=' + checkers_report, test_file], remove_checkers_report=False)
@@ -1889,10 +1888,10 @@ def test_checkers_report(tmpdir):
 def test_checkers_report_misra_json(tmpdir):
     """check that misra checkers are reported properly when --addon=misra.json is used"""
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('x=1;')
     misra_json = os.path.join(tmpdir, 'misra.json')
-    with open(misra_json, 'wt') as f:
+    with open(misra_json, 'w') as f:
         f.write('{"script":"misra.py"}')
     exitcode, stdout, stderr = cppcheck('--enable=style --addon=misra.json --xml-version=3 test.c'.split(), cwd=tmpdir)
     assert exitcode == 0, stdout if stdout else stderr
@@ -1902,12 +1901,12 @@ def test_checkers_report_misra_json(tmpdir):
 def __test_ignore_file(tmpdir, ign, append=False, inject_path=False):
     os.mkdir(os.path.join(tmpdir, 'src'))
     test_file = os.path.join(tmpdir, 'src', 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     # TODO: this should say that all paths are ignored
     lines_exp = [
-        'ignored path: {}'.format(test_file),
+        f'ignored path: {test_file}',
         'cppcheck: error: could not find or open any of the paths given.',
         'cppcheck: Maybe all paths were ignored?'
     ]
@@ -1921,9 +1920,9 @@ def __test_ignore_file(tmpdir, ign, append=False, inject_path=False):
         ign = ign.replace('$path', str(test_file))
 
     if append:
-        args += ['-i{}'.format(ign)]
+        args += [f'-i{ign}']
     else:
-        args = ['-i{}'.format(ign)] + args
+        args = [f'-i{ign}'] + args
 
     exitcode, stdout, stderr = cppcheck(args, cwd=tmpdir)
     assert exitcode == 1, stdout if stdout else stderr
@@ -1988,7 +1987,7 @@ def test_ignore_abspath(tmpdir):
 
 def __write_gui_project(tmpdir, test_file, ignore):
     project_file = os.path.join(tmpdir, 'test.cppcheck')
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(
         """
 
@@ -2006,12 +2005,12 @@ def __write_gui_project(tmpdir, test_file, ignore):
 def __test_ignore_project(tmpdir, ign_proj, ign_cli=None, append_cli=False, inject_path_proj=False):
     os.mkdir(os.path.join(tmpdir, 'src'))
     test_file = os.path.join(tmpdir, 'src', 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     # TODO: this should say that all paths were ignored
     lines_exp = [
-        'ignored path: {}'.format(test_file),
+        f'ignored path: {test_file}',
         'cppcheck: error: could not find or open any of the paths given.',
         'cppcheck: Maybe all paths were ignored?'
     ]
@@ -2022,13 +2021,13 @@ def __test_ignore_project(tmpdir, ign_proj, ign_cli=None, append_cli=False, inje
     project_file = __write_gui_project(tmpdir, test_file, ign_proj)
     args = [
         '--debug-ignore',
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
 
     if append_cli:
-        args += ['-i{}'.format(ign_cli)]
+        args += [f'-i{ign_cli}']
     else:
-        args = ['-i{}'.format(ign_cli)] + args
+        args = [f'-i{ign_cli}'] + args
 
     exitcode, stdout, _ = cppcheck(args, cwd=tmpdir)
     assert exitcode == 1, stdout
@@ -2085,10 +2084,10 @@ def __write_compdb(tmpdir, test_file):
         {
             'directory': os.path.dirname(test_file),
             'file': test_file,
-            'command': 'gcc -c {}'.format(test_file)
+            'command': f'gcc -c {test_file}'
         }
     ]
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(j))
     return compile_commands
 
@@ -2096,7 +2095,7 @@ def __write_compdb(tmpdir, test_file):
 def __test_ignore_project_2(tmpdir, extra_args, append=False, inject_path=False):
     os.mkdir(os.path.join(tmpdir, 'src'))
     test_file = os.path.join(tmpdir, 'src', 'test.cpp')
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     lines_exp = [
@@ -2107,7 +2106,7 @@ def __test_ignore_project_2(tmpdir, extra_args, append=False, inject_path=False)
     project_file = __write_compdb(tmpdir, test_file)
     args = [
         '--debug-ignore',
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
 
     if inject_path:
@@ -2182,11 +2181,11 @@ def test_ignore_project_2_abspath(tmpdir):
 
 def test_dumpfile_platform(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('x=1;\n')
     cppcheck('--dump --platform=unix64 test.c'.split(), cwd=tmpdir)
     platform = ''
-    with open(test_file + '.dump', 'rt') as f:
+    with open(test_file + '.dump') as f:
         for line in f:
             if line.find(' to get proper output) [misra-c2012-2.3]'.format(test_file),
+        f'{test_file}:4:1: style: misra violation (use --rule-texts= to get proper output) [misra-c2012-2.3]',
     ]
 
 
@@ -2700,7 +2699,7 @@ def test_addon_suppr_cli_file_line(tmp_path):
 
 def test_addon_suppr_cli_absfile_line(tmp_path):
     test_file = tmp_path / 'test.c'
-    __test_addon_suppr(tmp_path, ['--suppress=misra-c2012-2.3:{}:3'.format(test_file)])
+    __test_addon_suppr(tmp_path, [f'--suppress=misra-c2012-2.3:{test_file}:3'])
 
 
 def test_ctu_path_builddir(tmp_path):  # #11883
@@ -2708,7 +2707,7 @@ def test_ctu_path_builddir(tmp_path):  # #11883
     os.mkdir(build_dir)
 
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f(int *p) { *p = 3; }
 int main() {
@@ -2721,22 +2720,22 @@ def test_ctu_path_builddir(tmp_path):  # #11883
         '-q',
         '--enable=style',
         '--suppress=nullPointer',  # we only care about the CTU findings
-        '--cppcheck-build-dir={}'.format(build_dir),
+        f'--cppcheck-build-dir={build_dir}',
         str(test_file)
     ]
 
     # the CTU path was not properly read leading to missing location information
     stderr_exp = [
-        '{}:2:19: error: Null pointer dereference: p [ctunullpointer]'.format(test_file),
+        f'{test_file}:2:19: error: Null pointer dereference: p [ctunullpointer]',
         'void f(int *p) { *p = 3; }',
         '                  ^',
-        "{}:4:14: note: Assignment 'p=0', assigned value is 0".format(test_file),
+        f"{test_file}:4:14: note: Assignment 'p=0', assigned value is 0",
         '    int *p = 0;',
         '             ^',
-        '{}:5:2: note: Calling function f, 1st argument is null'.format(test_file),
+        f'{test_file}:5:2: note: Calling function f, 1st argument is null',
         'f(p);',
         ' ^',
-        '{}:2:19: note: Dereferencing argument p that is null'.format(test_file),
+        f'{test_file}:2:19: note: Dereferencing argument p that is null',
         'void f(int *p) { *p = 3; }',
         '                  ^'
     ]
@@ -2758,7 +2757,7 @@ def test_ctu_builddir(tmp_path):  # #11883
     os.mkdir(build_dir)
 
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f(int *p) { *p = 3; }
 int main() {
@@ -2772,7 +2771,7 @@ def test_ctu_builddir(tmp_path):  # #11883
         '--template=simple',
         '--enable=style',
         '--suppress=nullPointer',  # we only care about the CTU findings
-        '--cppcheck-build-dir={}'.format(build_dir),
+        f'--cppcheck-build-dir={build_dir}',
         '-j1',
         '--emit-duplicates',
         str(test_file)
@@ -2783,7 +2782,7 @@ def test_ctu_builddir(tmp_path):  # #11883
     assert exitcode == 0, stdout
     assert stdout == ''
     assert stderr.splitlines() == [
-        '{}:2:19: error: Null pointer dereference: p [ctunullpointer]'.format(test_file)
+        f'{test_file}:2:19: error: Null pointer dereference: p [ctunullpointer]'
     ]
 
 
@@ -2973,7 +2972,7 @@ def __test_debug_template(tmp_path, verbose=False, debug=False):
     else:
         assert stdout.count('### Template Simplifier pass ') == 1
     assert stderr.splitlines() == [
-        '{}:4:14: error: Null pointer dereference: (int*)nullptr [nullPointer]'.format(test_file)
+        f'{test_file}:4:14: error: Null pointer dereference: (int*)nullptr [nullPointer]'
     ]
     return stdout
 
@@ -3005,7 +3004,7 @@ def test_file_ignore_2(tmp_path):  # #13570
     os.mkdir(lib_path)
 
     test_file_1 = lib_path / 'test_1.c'
-    with open(test_file_1, 'wt'):
+    with open(test_file_1, 'w'):
         pass
 
     args = [
@@ -3121,13 +3120,13 @@ def test_debug_valueflow_data_xml(tmp_path):  # #13606
 
 def test_dir_ignore(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt'):
+    with open(test_file, 'w'):
         pass
 
     lib_dir = tmp_path / 'lib'
     os.mkdir(lib_dir)
     lib_test_file = lib_dir / 'test.cpp'
-    with open(lib_test_file, 'wt'):
+    with open(lib_test_file, 'w'):
         pass
 
     args = [
@@ -3137,8 +3136,8 @@ def test_dir_ignore(tmp_path):
     ]
     # make sure the whole directory is being ignored instead of each of its contents individually
     out_lines = [
-        'ignored path: {}'.format(lib_dir),
-        'Checking {} ...'.format(test_file)
+        f'ignored path: {lib_dir}',
+        f'Checking {test_file} ...'
     ]
 
     assert_cppcheck(args, ec_exp=0, err_exp=[], out_exp=out_lines, cwd=str(tmp_path))
@@ -3146,7 +3145,7 @@ def test_dir_ignore(tmp_path):
 
 def test_check_headers(tmp_path):
     test_file_h = tmp_path / 'test.h'
-    with open(test_file_h, 'wt') as f:
+    with open(test_file_h, 'w') as f:
         f.write(
 """
 inline void hdr()
@@ -3156,7 +3155,7 @@ def test_check_headers(tmp_path):
 """)
 
     test_file_c = tmp_path / 'test.c'
-    with open(test_file_c, 'wt') as f:
+    with open(test_file_c, 'w') as f:
         f.write(
 """
 #include "test.h"
@@ -3178,7 +3177,7 @@ def test_check_headers(tmp_path):
 
 def test_unique_error(tmp_path):  # #6366
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """void f()
 {
@@ -3198,14 +3197,14 @@ def test_unique_error(tmp_path):  # #6366
     assert exitcode == 0, stdout
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        "{}:4:13: error: Array 'm[9]' accessed at index 9, which is out of bounds. [arrayIndexOutOfBounds]".format(test_file),
-        "{}:4:21: error: Array 'm[9]' accessed at index 9, which is out of bounds. [arrayIndexOutOfBounds]".format(test_file)
+        f"{test_file}:4:13: error: Array 'm[9]' accessed at index 9, which is out of bounds. [arrayIndexOutOfBounds]",
+        f"{test_file}:4:21: error: Array 'm[9]' accessed at index 9, which is out of bounds. [arrayIndexOutOfBounds]"
     ]
 
 
 def test_check_unused_templates_class(tmp_path):
     test_file_h = tmp_path / 'test.h'
-    with open(test_file_h, 'wt') as f:
+    with open(test_file_h, 'w') as f:
         f.write(
 """template
 class HdrCl1
@@ -3245,7 +3244,7 @@ class HdrCl2
 """)
 
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """#include "test.h"
 
@@ -3302,7 +3301,7 @@ class Cl2
 
 def test_check_unused_templates_func(tmp_path):  # #13714
     test_file_h = tmp_path / 'test.h'
-    with open(test_file_h, 'wt') as f:
+    with open(test_file_h, 'w') as f:
         f.write(
 """template
 void f_t_hdr_1()
@@ -3318,7 +3317,7 @@ def test_check_unused_templates_func(tmp_path):  # #13714
 """)
 
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """#include "test.h"
 
@@ -3357,7 +3356,7 @@ def test_check_unused_templates_func(tmp_path):  # #13714
 
 def __test_clang_tidy(tmpdir, use_compdb):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """static void foo() // NOLINT(misc-use-anonymous-namespace)
 {
@@ -3372,7 +3371,7 @@ def __test_clang_tidy(tmpdir, use_compdb):
         '--clang-tidy'
     ]
     if project_file:
-        args += ['--project={}'.format(project_file)]
+        args += [f'--project={project_file}']
     else:
         args += [str(test_file)]
     exitcode, stdout, stderr = cppcheck(args)
@@ -3380,8 +3379,8 @@ def __test_clang_tidy(tmpdir, use_compdb):
     assert stdout.splitlines() == [
     ]
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)nullptr [nullPointer]'.format(test_file),
-        '{}:3:14: style: C-style casts are discouraged; use static_cast/const_cast/reinterpret_cast [clang-tidy-google-readability-casting]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)nullptr [nullPointer]',
+        f'{test_file}:3:14: style: C-style casts are discouraged; use static_cast/const_cast/reinterpret_cast [clang-tidy-google-readability-casting]'
     ]
 
 
@@ -3399,7 +3398,7 @@ def test_clang_tidy_project(tmpdir):
 @pytest.mark.skipif(not has_clang_tidy, reason='clang-tidy is not available')
 def test_clang_tidy_error_exit(tmp_path):  # #13828 / #13829
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """#include 
 #include 
@@ -3422,21 +3421,21 @@ def test_clang_tidy_error_exit(tmp_path):  # #13828 / #13829
         '--inline-suppr',
         '--std=c++11',
         '--clang-tidy',
-        '--project={}'.format(project_file)
+        f'--project={project_file}'
     ]
 
     exitcode, stdout, stderr = cppcheck(args)
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        "{}:10:12: warning: 'str' used after it was moved [clang-tidy-bugprone-use-after-move]".format(test_file),
-        "{}:10:12: style: 'str' used after it was moved [clang-tidy-hicpp-invalid-access-moved]".format(test_file)
+        f"{test_file}:10:12: warning: 'str' used after it was moved [clang-tidy-bugprone-use-after-move]",
+        f"{test_file}:10:12: style: 'str' used after it was moved [clang-tidy-hicpp-invalid-access-moved]"
     ]
     assert exitcode == 0, stdout
 
 
 def test_suppress_unmatched_wildcard(tmp_path):  # #13660
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """void f()
 {
@@ -3470,7 +3469,7 @@ def test_suppress_unmatched_wildcard(tmp_path):  # #13660
 
 def test_suppress_unmatched_wildcard_cached(tmp_path):  # #14585
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """void f()
 {
@@ -3486,7 +3485,7 @@ def test_suppress_unmatched_wildcard_cached(tmp_path):  # #14585
         '-q',
         '--template=simple',
         '--enable=information',
-        '--cppcheck-build-dir={}'.format(build_dir),
+        f'--cppcheck-build-dir={build_dir}',
         '--suppress=nullPointer:test*.c',
         'test.c'
     ]
@@ -3508,7 +3507,7 @@ def test_suppress_unmatched_wildcard_unchecked(tmp_path):
     # make sure that unmatched wildcards suppressions are reported if files matching the expressions were processesd
     # but isSuppressed() has never been called (i.e. no findings in file at all)
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""void f() {}""")
 
     # need to run in the temporary folder because the path of the suppression has to match
@@ -3535,7 +3534,7 @@ def test_suppress_unmatched_wildcard_unchecked(tmp_path):
 
 def test_preprocess_enforced_c(tmp_path):  # #10989
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """#ifdef __cplusplus
 #error "err"
@@ -3556,7 +3555,7 @@ def test_preprocess_enforced_c(tmp_path):  # #10989
 
 def test_preprocess_enforced_cpp(tmp_path):  # #10989
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(
 """#ifdef __cplusplus
 #error "err"
@@ -3573,7 +3572,7 @@ def test_preprocess_enforced_cpp(tmp_path):  # #10989
     assert exitcode == 0, stdout if stdout else stderr
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        '{}:2:2: error: #error "err" [preprocessorErrorDirective]'.format(test_file)
+        f'{test_file}:2:2: error: #error "err" [preprocessorErrorDirective]'
     ]
 
 
@@ -3583,7 +3582,7 @@ def test_preprocess_system_include(tmp_path): # #13928
         pytest.skip(' header file not found')
 
     test_file = tmp_path / 'test.c'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('#include \n'
                 ';\n')
 
@@ -3640,7 +3639,7 @@ def __test_debug_normal(tmp_path, verbose):
         assert stdout.find('##AST') == -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'
     ]
     return stdout
 
@@ -3684,7 +3683,7 @@ def __test_debug_simplified(tmp_path, verbose):
     assert stdout.find('##AST') == -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'
     ]
     return stdout
 
@@ -3727,7 +3726,7 @@ def __test_debug_symdb(tmp_path, verbose):
     assert stdout.find('##AST') == -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'
     ]
     return stdout
 
@@ -3771,7 +3770,7 @@ def __test_debug_ast(tmp_path, verbose):
     assert stdout.find('##AST') != -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'
     ]
     return stdout
 
@@ -3814,7 +3813,7 @@ def __test_debug_valueflow(tmp_path, verbose):
     assert stdout.find('##AST') == -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'
     ]
     return stdout
 
@@ -3855,7 +3854,7 @@ def test_debug_syntaxerror_c(tmp_path):
     assert stdout.find('##AST') == -1
     assert stdout.find('### Template Simplifier pass ') == -1
     assert stderr.splitlines() == [
-        "{}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]".format(test_file)
+        f"{test_file}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]"
     ]
 
 
@@ -3887,7 +3886,7 @@ def test_ast_max_depth(tmp_path):
     assert exitcode == 0, stdout
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        '{}:12:5: error: maximum AST depth exceeded [internalAstError]'.format(test_file)
+        f'{test_file}:12:5: error: maximum AST depth exceeded [internalAstError]'
     ]
 
 
@@ -4032,7 +4031,7 @@ def test_simplecpp_unhandled_char(tmp_path):
     assert exitcode == 0, stdout
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        '{}:2:5: error: The code contains unhandled character(s) (character code=228). Neither unicode nor extended ascii is supported. [unhandledChar]'.format(test_file)
+        f'{test_file}:2:5: error: The code contains unhandled character(s) (character code=228). Neither unicode nor extended ascii is supported. [unhandledChar]'
     ]
 
 
@@ -4048,7 +4047,7 @@ def test_simplecpp_include_nested_too_deeply(tmp_path):
     for i in range(400):
         test_h = tmp_path / f'test_{i}.h'
         with open(test_h, "w") as f:
-            f.write('#include "test_{}.h"'.format(i+1))
+            f.write(f'#include "test_{i+1}.h"')
 
     args = [
         '-q',
@@ -4063,8 +4062,8 @@ def test_simplecpp_include_nested_too_deeply(tmp_path):
     test_h = tmp_path / 'test_398.h'
     assert stderr.splitlines() == [
         # TODO: should only report the error once
-        '{}:1:2: error: #include nested too deeply [includeNestedTooDeeply]'.format(test_h),
-        '{}:1:2: error: #include nested too deeply [includeNestedTooDeeply]'.format(test_h)
+        f'{test_h}:1:2: error: #include nested too deeply [includeNestedTooDeeply]',
+        f'{test_h}:1:2: error: #include nested too deeply [includeNestedTooDeeply]'
     ]
 
 
@@ -4085,8 +4084,8 @@ def test_simplecpp_syntax_error(tmp_path):
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
         # TODO: should only report the error once
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file),
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file)
+        f'{test_file}:1:2: error: No header in #include [syntaxError]',
+        f'{test_file}:1:2: error: No header in #include [syntaxError]'
     ]
 
 
@@ -4153,16 +4152,16 @@ def test_no_valid_configuration(tmp_path):
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0, stdout
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     # TODO: this lacks context about the configuration which encounters these errors
     # TODO: add message when a configuration is dropped?
     assert stderr.splitlines() == [
         # TODO: should only report the error once
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file),
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file),
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file),
-        '{}:0:0: information: This file is not analyzed. No working configuration could be extracted. Use -v for more details. [noValidConfiguration]'.format(test_file)
+        f'{test_file}:1:2: error: No header in #include [syntaxError]',
+        f'{test_file}:1:2: error: No header in #include [syntaxError]',
+        f'{test_file}:1:2: error: No header in #include [syntaxError]',
+        f'{test_file}:0:0: information: This file is not analyzed. No working configuration could be extracted. Use -v for more details. [noValidConfiguration]'
     ]
 
 
@@ -4188,13 +4187,13 @@ def test_no_valid_configuration_check_config(tmp_path):
     exitcode, stdout, stderr = cppcheck(args)
     assert exitcode == 0, stdout
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file)
+        f'Checking {test_file} ...'
     ]
     # TODO: this lacks context about the configuration which encounters these errors
     # TODO: add message when a configuration is dropped
     assert stderr.splitlines() == [
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file),
-        '{}:1:2: error: No header in #include [syntaxError]'.format(test_file)
+        f'{test_file}:1:2: error: No header in #include [syntaxError]',
+        f'{test_file}:1:2: error: No header in #include [syntaxError]'
     ]
 
 
@@ -4218,7 +4217,7 @@ def __test_active_checkers(tmp_path, active_cnt, total_cnt, use_misra=False, use
     if use_misra:
         args += ['--addon=misra']
     if build_dir:
-        args += ['--cppcheck-build-dir={}'.format(build_dir)]
+        args += [f'--cppcheck-build-dir={build_dir}']
     else:
         args += ['--no-cppcheck-build-dir']
 
@@ -4236,7 +4235,7 @@ def __test_active_checkers(tmp_path, active_cnt, total_cnt, use_misra=False, use
 
     if build_dir:
         checkers_file = build_dir / 'checkers.txt'
-        with open(checkers_file, 'r') as f:
+        with open(checkers_file) as f:
             checkers = f.read().splitlines()
 
         assert checkers == checkers_exp
@@ -4284,14 +4283,14 @@ def test_analyzerinfo(tmp_path):
         '-q',
         '--debug-analyzerinfo',
         '--template=simple',
-        '--cppcheck-build-dir={}'.format(build_dir),
+        f'--cppcheck-build-dir={build_dir}',
         '--enable=all',
         str(test_file)
     ]
 
     stderr_exp = [
-        '{}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]'.format(test_file),
-        "{}:1:6: style: The function 'f' is never used. [unusedFunction]".format(test_file)
+        f'{test_file}:3:14: error: Null pointer dereference: (int*)0 [nullPointer]',
+        f"{test_file}:1:6: style: The function 'f' is never used. [unusedFunction]"
     ]
 
     def run_and_assert_cppcheck(stdout_exp):
@@ -4305,12 +4304,12 @@ def run_and_assert_cppcheck(stdout_exp):
 
     # no cached results
     run_and_assert_cppcheck([
-        "no cached result '{}' for '{}' found".format(test_a1_file_s, test_file_s)
+        f"no cached result '{test_a1_file_s}' for '{test_file_s}' found"
     ])
 
     # cached results
     run_and_assert_cppcheck([
-        "skipping analysis - loaded 1 cached finding(s) from '{}' for '{}'".format(test_a1_file_s, test_file_s)
+        f"skipping analysis - loaded 1 cached finding(s) from '{test_a1_file_s}' for '{test_file_s}'"
     ])
 
     # modified file
@@ -4318,7 +4317,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('\n#define DEF')
 
     run_and_assert_cppcheck([
-        "discarding cached result from '{}' for '{}' - hash mismatch".format(test_a1_file_s, test_file_s)
+        f"discarding cached result from '{test_a1_file_s}' for '{test_file_s}' - hash mismatch"
     ])
 
     # invalid XML
@@ -4326,7 +4325,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('.')
 
     run_and_assert_cppcheck([
-        "discarding cached result - failed to load '{}' for '{}' (XML_ERROR_PARSING_TEXT)".format(test_a1_file_s, test_file_s)
+        f"discarding cached result - failed to load '{test_a1_file_s}' for '{test_file_s}' (XML_ERROR_PARSING_TEXT)"
     ])
 
     # missing root node
@@ -4334,7 +4333,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('')
 
     run_and_assert_cppcheck([
-        "discarding cached result from '{}' for '{}' - no root node found".format(test_a1_file_s, test_file_s)
+        f"discarding cached result from '{test_a1_file_s}' for '{test_file_s}' - no root node found"
     ])
 
     # mismatched root node
@@ -4342,7 +4341,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('')
 
     run_and_assert_cppcheck([
-        "discarding cached result from '{}' for '{}' - unexpected root node".format(test_a1_file_s, test_file_s)
+        f"discarding cached result from '{test_a1_file_s}' for '{test_file_s}' - unexpected root node"
     ])
 
     # missing 'hash' attribute
@@ -4350,7 +4349,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('')
 
     run_and_assert_cppcheck([
-        "discarding cached result from '{}' for '{}' - no 'hash' attribute found".format(test_a1_file_s, test_file_s)
+        f"discarding cached result from '{test_a1_file_s}' for '{test_file_s}' - no 'hash' attribute found"
     ])
 
     # invalid 'hash' attribute
@@ -4358,7 +4357,7 @@ def run_and_assert_cppcheck(stdout_exp):
         f.write('')
 
     run_and_assert_cppcheck([
-        "discarding cached result from '{}' for '{}' - hash mismatch".format(test_a1_file_s, test_file_s)
+        f"discarding cached result from '{test_a1_file_s}' for '{test_file_s}' - hash mismatch"
     ])
 
     # TODO:
@@ -4388,7 +4387,7 @@ def test_ctu_function_call_path_slash(tmp_path):  # #14591
     args = [
         '-q',
         '--template=simple',
-        '--cppcheck-build-dir={}'.format(build_dir),
+        f'--cppcheck-build-dir={build_dir}',
         str(test_file)
     ]
 
diff --git a/test/cli/performance_test.py b/test/cli/performance_test.py
index 55da3b3b04e..34cbe1c228c 100644
--- a/test/cli/performance_test.py
+++ b/test/cli/performance_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest performance_test.py
 
 import os
@@ -15,7 +14,7 @@ def test_slow_array_many_floats(tmpdir):
     # 11649
     # cppcheck valueflow takes a long time when an array has many floats
     filename = os.path.join(tmpdir, 'hang.c')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("const float f[] = {\n")
         for _ in range(20000):
             f.write('    13.6f,\n')
@@ -28,7 +27,7 @@ def test_slow_array_many_strings(tmpdir):
     # 11901
     # cppcheck valueflow takes a long time when analyzing a file with many strings
     filename = os.path.join(tmpdir, 'hang.c')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("const char *strings[] = {\n")
         for _ in range(20000):
             f.write('    "abc",\n')
@@ -40,7 +39,7 @@ def test_slow_array_many_strings(tmpdir):
 def test_slow_long_line(tmpdir):
     # simplecpp #314
     filename = os.path.join(tmpdir, 'hang.c')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("#define A() static const int a[] = {\\\n")
         for _ in range(5000):
             f.write(" -123, 456, -789,\\\n")
@@ -52,7 +51,7 @@ def test_slow_long_line(tmpdir):
 def test_slow_large_constant_expression(tmpdir):
     # 12182
     filename = os.path.join(tmpdir, 'hang.c')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("""
 #define FLAG1 0
 #define FLAG2 0
@@ -122,7 +121,7 @@ def test_slow_large_constant_expression(tmpdir):
 def test_slow_exprid(tmpdir):
     # 11885
     filename = os.path.join(tmpdir, 'hang.c')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("""
 int foo(int a, int b)
 {
@@ -150,7 +149,7 @@ def test_slow_exprid(tmpdir):
 def test_stack_overflow_AST(tmpdir):
     # 14435
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write("""
 #define ROW 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
 #define ROW8 ROW ROW ROW ROW ROW ROW ROW ROW
@@ -174,7 +173,7 @@ def test_stack_overflow_AST(tmpdir):
 def test_slow_initlist_varchanged(tmpdir):
     # #12235
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write(r"""
                 struct T {
                     int* q;
@@ -215,7 +214,7 @@ def test_slow_initlist_varchanged(tmpdir):
 def test_slow_many_scopes(tmpdir):
     # #12038
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write(r"""
                 #define BLOCK {\
                     char buf[sizeof("x") + 5 * 3 + 16];\
@@ -250,7 +249,7 @@ def test_slow_many_scopes(tmpdir):
 def test_crash_array_in_namespace(tmpdir):
     # 12847
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write(r"""
                 #define ROW A, A, A, A, A, A, A, A,
                 #define ROW8 ROW ROW ROW ROW ROW ROW ROW ROW
@@ -271,7 +270,7 @@ def test_crash_array_in_namespace(tmpdir):
 def test_crash_array_in_array(tmpdir):
     # 12861
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write(r"""
                 #define ROW A, A, A, A, A, A, A, A,
                 #define ROW8 ROW ROW ROW ROW ROW ROW ROW ROW
@@ -291,7 +290,7 @@ def test_crash_array_in_array(tmpdir):
 def test_slow_bifurcate(tmpdir):
     # #14134
     filename = os.path.join(tmpdir, 'hang.cpp')
-    with open(filename, 'wt') as f:
+    with open(filename, 'w') as f:
         f.write(r"""
                 class C {
                 public:
diff --git a/test/cli/premium_test.py b/test/cli/premium_test.py
index ddf7b1e2fbe..74d3fb48f51 100644
--- a/test/cli/premium_test.py
+++ b/test/cli/premium_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest premium_test.py
 
 import os
@@ -19,11 +18,11 @@ def __copy_cppcheck_premium(tmpdir):
 
     # add minimum cfg/std.cfg
     test_cfg_folder = tmpdir.mkdir('cfg')
-    with open(test_cfg_folder.join('std.cfg'), 'wt') as f:
+    with open(test_cfg_folder.join('std.cfg'), 'w') as f:
         f.write('\n')
 
     # add simple cppcheck.cfg
-    with open(tmpdir.join('cppcheck.cfg'), 'wt') as f:
+    with open(tmpdir.join('cppcheck.cfg'), 'w') as f:
         f.write("""
                 {
                     "addons": [],
@@ -39,7 +38,7 @@ def __copy_cppcheck_premium(tmpdir):
 
 def test_misra_c_builtin_style_checks(tmpdir):
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo() { int x; y = 0; }')
 
     exe = __copy_cppcheck_premium(tmpdir)
@@ -76,7 +75,7 @@ def test_build_dir_hash_cppcheck_product(tmpdir):
     # so that files are rescanned when cppcheck is switched
 
     test_file = os.path.join(tmpdir, 'test.cpp')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write(';')
 
     build_dir = tmpdir.mkdir('b')
@@ -94,7 +93,7 @@ def _get_hash(s:str):
         i += 7
         return s[i:s.find('"', i)]
 
-    with open(build_dir.join('test.a1'), 'rt') as f:
+    with open(build_dir.join('test.a1')) as f:
         f1 = f.read()
         hash1 = _get_hash(f1)
     assert re.match(r'^[0-9a-f]{6,}$', hash1), f1
@@ -105,7 +104,7 @@ def _get_hash(s:str):
     assert stderr == ''
     assert exitcode == 0
 
-    with open(build_dir.join('test.a1'), 'rt') as f:
+    with open(build_dir.join('test.a1')) as f:
         f2 = f.read()
         hash2 = _get_hash(f2)
     assert re.match(r'^[0-9a-f]{6,}$', hash2), f2
@@ -116,7 +115,7 @@ def _get_hash(s:str):
 def test_misra_py(tmpdir):
     # 13831 - do not execute misra.py when --premium=misra-c-2012 is used
     test_file = os.path.join(tmpdir, 'test.c')
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo();\n')
 
     exe = __copy_cppcheck_premium(tmpdir)
@@ -139,18 +138,18 @@ def test_invalid_license_retry(tmpdir):
 
     os.mkdir(build_dir)
 
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo();\n')
 
     args = [f"--addon={addon_file}", f"--cppcheck-build-dir={build_dir}", '--xml', '--enable=all', test_file]
 
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write('print(\'{"addon":"premium","column":0,"errorId":"invalidLicense","extra":"","file":"Cppcheck Premium","linenr":0,"message":"Invalid license: No license file was found, contact sales@cppchecksolutions.com","severity":"error"}\')')
 
     _, _, stderr = cppcheck(args)
     assert 'Invalid license' in stderr
 
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write('')
 
     _, _, stderr = cppcheck(args)
@@ -172,12 +171,12 @@ def test_cwe(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
     addon_file = os.path.join(tmpdir, 'premiumaddon.py')
 
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo();\n')
 
     args = [f"--addon={addon_file}", '--xml', test_file]
 
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write('print(\'{"addon":"a","column":1,"errorId":"id","extra":"","file":"test.c","cwe":123,"linenr":1,"message":"bug","severity":"error"}\')')
 
     _, _, stderr = cppcheck(args)
@@ -189,12 +188,12 @@ def test_hash(tmpdir):
     test_file = os.path.join(tmpdir, 'test.c')
     addon_file = os.path.join(tmpdir, 'premiumaddon.py')
 
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write('void foo();\n')
 
     args = [f"--addon={addon_file}", '--xml', test_file]
 
-    with open(addon_file, 'wt') as f:
+    with open(addon_file, 'w') as f:
         f.write('print(\'{"addon":"a","column":1,"errorId":"id","extra":"","file":"test.c","hash":123,"linenr":1,"message":"bug","severity":"error"}\')')
 
     _, _, stderr = cppcheck(args)
diff --git a/test/cli/proj2_test.py b/test/cli/proj2_test.py
index c9516d9ddbf..813c612af53 100644
--- a/test/cli/proj2_test.py
+++ b/test/cli/proj2_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest test-proj2.py
 
 import json
@@ -22,7 +21,7 @@ def __create_compile_commands(proj_dir):
     proj_dir = str(proj_dir)
     j = [{'directory': os.path.join(proj_dir, 'a'), 'command': 'gcc -c a.c', 'file': 'a.c'},
          {'directory': proj_dir, 'command': 'gcc -c b/b.c', 'file': 'b/b.c'}]
-    with open(os.path.join(proj_dir, __COMPILE_COMMANDS_JSON), 'wt') as f:
+    with open(os.path.join(proj_dir, __COMPILE_COMMANDS_JSON), 'w') as f:
         f.write(json.dumps(j))
 
 
diff --git a/test/cli/project_test.py b/test/cli/project_test.py
index e8c120f2c08..8595dc2e3fa 100644
--- a/test/cli/project_test.py
+++ b/test/cli/project_test.py
@@ -9,16 +9,16 @@
 
 @pytest.mark.parametrize("project_ext", ["json", "sln", "vcxproj", "bpr", "cppcheck"])
 def test_missing_project(project_ext):
-    project_file = "file.{}".format(project_ext)
+    project_file = f"file.{project_ext}"
 
     ret, stdout, stderr = cppcheck(['--project=' + project_file, '--template=cppcheck1'])
     assert 1 == ret
-    assert "cppcheck: error: failed to open project '{}'. The file does not exist.\n".format(project_file) == stdout
+    assert f"cppcheck: error: failed to open project '{project_file}'. The file does not exist.\n" == stdout
     assert "" == stderr
 
 
 def __test_project_error(tmpdir, ext, content, expected):
-    project_file = os.path.join(tmpdir, "file.{}".format(ext))
+    project_file = os.path.join(tmpdir, f"file.{ext}")
 
     with open(project_file, 'w') as f:
         if content is not None:
@@ -26,7 +26,7 @@ def __test_project_error(tmpdir, ext, content, expected):
 
     ret, stdout, stderr = cppcheck(['--project=' + str(project_file)])
     assert 1 == ret
-    assert "cppcheck: error: " + expected + "\ncppcheck: error: failed to load project '{}'. An error occurred.\n".format(project_file) == stdout
+    assert "cppcheck: error: " + expected + f"\ncppcheck: error: failed to load project '{project_file}'. An error occurred.\n" == stdout
     assert "" == stderr
 
 
diff --git a/test/cli/qml_test.py b/test/cli/qml_test.py
index 5ce7428b0ae..5688b525575 100644
--- a/test/cli/qml_test.py
+++ b/test/cli/qml_test.py
@@ -1,4 +1,3 @@
-
 # python3 -m pytest test-qml.py
 
 import os
@@ -26,9 +25,9 @@ def __test_unused_functions(extra_args):
     lines.sort()
     # there are unused functions. But fillSampleData is not unused because that is referenced from main.qml
     assert lines == [
-        "{}samplemodel.cpp:15:23: style: The function 'data' is never used. [unusedFunction]".format(__project_dir_sep),
-        "{}samplemodel.cpp:38:37: style: The function 'roleNames' is never used. [unusedFunction]".format(__project_dir_sep),
-        "{}samplemodel.cpp:9:18: style: The function 'rowCount' is never used. [unusedFunction]".format(__project_dir_sep)
+        f"{__project_dir_sep}samplemodel.cpp:15:23: style: The function 'data' is never used. [unusedFunction]",
+        f"{__project_dir_sep}samplemodel.cpp:38:37: style: The function 'roleNames' is never used. [unusedFunction]",
+        f"{__project_dir_sep}samplemodel.cpp:9:18: style: The function 'rowCount' is never used. [unusedFunction]"
     ]
     assert ret == 0, stdout
 
@@ -56,7 +55,7 @@ def test_unused_functions_j():
 def test_unused_functions_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions(['--cppcheck-build-dir={}'.format(build_dir)])
+    __test_unused_functions([f'--cppcheck-build-dir={build_dir}'])
 
 
 # TODO: test with project file
diff --git a/test/cli/rules_test.py b/test/cli/rules_test.py
index 241cdf34c51..96c6af0b6ca 100644
--- a/test/cli/rules_test.py
+++ b/test/cli/rules_test.py
@@ -10,7 +10,7 @@
 
 def test_empty_catch_block(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f()
 {
@@ -26,23 +26,23 @@ def test_empty_catch_block(tmp_path):
     rule_file = os.path.join(__rules_dir, 'empty-catch-block.xml')
     args = [
         '--template=simple',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: \\}\\s*catch\\s*\\(.*\\)\\s*\\{\\s*\\}'
     ]
     assert stderr.splitlines() == [
-        '{}:6:0: style: Empty catch block found. [rule]'.format(test_file)
+        f'{test_file}:6:0: style: Empty catch block found. [rule]'
     ]
 
 
 def test_show_all_defines(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 #define DEF_1
 
@@ -55,27 +55,27 @@ def test_show_all_defines(tmp_path):
     args = [
         '--template=simple',
         '-DDEF_2',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: .*',
-        'Checking {}: DEF_2=1...'.format(test_file)
+        f'Checking {test_file}: DEF_2=1...'
     ]
     if sys.platform == 'win32':
         test_file = str(test_file).replace('\\', '/')
     assert stderr.splitlines() == [
         # TODO: this message looks strange
-        ":1:0: information: found ' # line 2 \"{}\" # define DEF_1' [showalldefines]".format(test_file)
+        f":1:0: information: found ' # line 2 \"{test_file}\" # define DEF_1' [showalldefines]"
     ]
 
 
 def test_stl(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f()
 {
@@ -89,23 +89,23 @@ def test_stl(tmp_path):
     rule_file = os.path.join(__rules_dir, 'stl.xml')
     args = [
         '--template=simple',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule:  \\. find \\( "[^"]+?" \\) == \\d+ '
     ]
     assert stderr.splitlines() == [
-        '{}:5:0: performance: When looking for a string at a fixed position compare [UselessSTDStringFind]'.format(test_file)
+        f'{test_file}:5:0: performance: When looking for a string at a fixed position compare [UselessSTDStringFind]'
     ]
 
 
 def test_strlen_empty_str(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f(const char* s)
 {
@@ -118,23 +118,23 @@ def test_strlen_empty_str(tmp_path):
     rule_file = os.path.join(__rules_dir, 'strlen-empty-str.xml')
     args = [
         '--template=simple',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule:  if \\( ([!] )*?(strlen) \\( \\w+? \\) ([>] [0] )*?\\) { '
     ]
     assert stderr.splitlines() == [
-        '{}:4:0: performance: Using strlen() to check if a string is empty is not efficient. [StrlenEmptyString]'.format(test_file)
+        f'{test_file}:4:0: performance: Using strlen() to check if a string is empty is not efficient. [StrlenEmptyString]'
     ]
 
 
 def test_suggest_nullptr(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f()
 {
@@ -145,23 +145,23 @@ def test_suggest_nullptr(tmp_path):
     rule_file = os.path.join(__rules_dir, 'suggest_nullptr.xml')
     args = [
         '--template=simple',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule: (\\b\\w+\\b) \\* (\\b\\w+\\b) = 0 ;'
     ]
     assert stderr.splitlines() == [
-        "{}:4:0: style: Prefer to use a 'nullptr' instead of initializing a pointer with 0. [modernizeUseNullPtr]".format(test_file)
+        f"{test_file}:4:0: style: Prefer to use a 'nullptr' instead of initializing a pointer with 0. [modernizeUseNullPtr]"
     ]
 
 
 def test_unused_deref(tmp_path):
     test_file = tmp_path / 'test.cpp'
-    with open(test_file, 'wt') as f:
+    with open(test_file, 'w') as f:
         f.write("""
 void f(const char* p)
 {
@@ -172,15 +172,15 @@ def test_unused_deref(tmp_path):
     rule_file = os.path.join(__rules_dir, 'unused-deref.xml')
     args = [
         '--template=simple',
-        '--rule-file={}'.format(rule_file),
+        f'--rule-file={rule_file}',
         str(test_file)
     ]
     ret, stdout, stderr = cppcheck(args)
     assert ret == 0
     assert stdout.splitlines() == [
-        'Checking {} ...'.format(test_file),
+        f'Checking {test_file} ...',
         'Processing rule:  [;{}] [*] \\w+? (\\+\\+|\\-\\-) ; '
     ]
     assert stderr.splitlines() == [
-        '{}:3:0: style: Redundant * found, "*p++" is the same as "*(p++)". [UnusedDeref]'.format(test_file)
+        f'{test_file}:3:0: style: Redundant * found, "*p++" is the same as "*(p++)". [UnusedDeref]'
     ]
diff --git a/test/cli/sarif_test.py b/test/cli/sarif_test.py
index d2fe9396350..47e87bd0679 100644
--- a/test/cli/sarif_test.py
+++ b/test/cli/sarif_test.py
@@ -464,7 +464,7 @@ def test_sarif_rule_coverage():
     rules = driver["rules"]
 
     # Collect all rule IDs
-    rule_ids = set(rule["id"] for rule in rules)
+    rule_ids = {rule["id"] for rule in rules}
 
     # Should have at least 5 different rules triggered
     assert (
@@ -590,8 +590,8 @@ def test_sarif_results_consistency():
     results = run["results"]
 
     # Collect rule IDs from both rules and results
-    rule_ids_in_rules = set(rule["id"] for rule in rules)
-    rule_ids_in_results = set(result["ruleId"] for result in results)
+    rule_ids_in_rules = {rule["id"] for rule in rules}
+    rule_ids_in_results = {result["ruleId"] for result in results}
 
     # Every rule ID in results should have a corresponding rule definition
     for result_rule_id in rule_ids_in_results:
diff --git a/test/cli/suppress-syntaxError_test.py b/test/cli/suppress-syntaxError_test.py
index 9b58bf4e7c3..1bdb6d53e4c 100644
--- a/test/cli/suppress-syntaxError_test.py
+++ b/test/cli/suppress-syntaxError_test.py
@@ -1,4 +1,3 @@
-
 # python -m pytest test-suppress-syntaxError.py
 
 import os
diff --git a/test/cli/testutils.py b/test/cli/testutils.py
index f352af49f15..2a442dd50c2 100644
--- a/test/cli/testutils.py
+++ b/test/cli/testutils.py
@@ -41,7 +41,7 @@ def create_gui_project_file(project_file, root_path=None, import_project=None, p
         cppcheck_xml += '  \n'
     cppcheck_xml += '\n'
 
-    with open(project_file, 'wt') as f:
+    with open(project_file, 'w') as f:
         f.write(cppcheck_xml)
 
 
@@ -70,7 +70,7 @@ def __lookup_cppcheck_exe():
 
     if exe_path:
         exe_path = os.path.abspath(exe_path)
-        print("using '{}'".format(exe_path))
+        print(f"using '{exe_path}'")
     return exe_path
 
 
diff --git a/test/cli/unused_function_test.py b/test/cli/unused_function_test.py
index 591986f1dde..413705eb7d1 100644
--- a/test/cli/unused_function_test.py
+++ b/test/cli/unused_function_test.py
@@ -1,4 +1,3 @@
-
 # python3 -m pytest test-unused_function_test.py
 
 import os
@@ -25,9 +24,9 @@ def __create_compdb(tmpdir, projpath):
         j.append({
             'directory': projpath,
             'file': os.path.join(projpath, f),
-            'command': 'gcc -c {}'.format(f)
+            'command': f'gcc -c {f}'
         })
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(j, indent=4))
     return compile_commands
 
@@ -44,7 +43,7 @@ def __test_unused_functions(extra_args):
     ret, stdout, stderr = cppcheck(args)
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        "{}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]".format(__project_dir_sep)
+        f"{__project_dir_sep}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]"
     ]
     assert ret == 0, stdout
 
@@ -74,20 +73,20 @@ def test_unused_functions_j():
 def test_unused_functions_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions(['-j1', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_unused_functions(['-j1', f'--cppcheck-build-dir={build_dir}'])
 
 
 def test_unused_functions_builddir_j_thread(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions(['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=thread'])
+    __test_unused_functions(['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=thread'])
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='ProcessExecutor not available on Windows')
 def test_unused_functions_builddir_j_process(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions(['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=process'])
+    __test_unused_functions(['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=process'])
 
 
 def __test_unused_functions_project(extra_args):
@@ -97,13 +96,13 @@ def __test_unused_functions_project(extra_args):
         '--template=simple',
         '--enable=unusedFunction',
         '--inline-suppr',
-        '--project={}'.format(project_file),
+        f'--project={project_file}',
     ]
     args += extra_args
     ret, stdout, stderr = cppcheck(args)
     assert stdout.splitlines() == []
     assert [
-        "{}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]".format(__project_dir_sep)
+        f"{__project_dir_sep}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]"
     ] == stderr.splitlines()
     assert ret == 0, stdout
 
@@ -119,7 +118,7 @@ def test_unused_functions_project_j():
         '--template=simple',
         '--enable=unusedFunction',
         '--inline-suppr',
-        '--project={}'.format(project_file),
+        f'--project={project_file}',
         '-j2',
         '--no-cppcheck-build-dir'
     ]
@@ -134,20 +133,20 @@ def test_unused_functions_project_j():
 def test_unused_functions_project_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions_project(['-j1', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_unused_functions_project(['-j1', f'--cppcheck-build-dir={build_dir}'])
 
 
 def test_unused_functions_project_builddir_j_thread(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions_project(['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=thread'])
+    __test_unused_functions_project(['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=thread'])
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='ProcessExecutor not available on Windows')
 def test_unused_functions_project_builddir_j_process(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions_project(['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=process'])
+    __test_unused_functions_project(['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=process'])
 
 
 def __test_unused_functions_compdb(tmpdir, extra_args):
@@ -157,13 +156,13 @@ def __test_unused_functions_compdb(tmpdir, extra_args):
         '--template=simple',
         '--enable=unusedFunction',
         '--inline-suppr',
-        '--project={}'.format(compdb_file)
+        f'--project={compdb_file}'
     ]
     args += extra_args
     ret, stdout, stderr = cppcheck(args)
     assert stdout.splitlines() == []
     assert stderr.splitlines() == [
-        "{}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]".format(__project_dir_sep)
+        f"{__project_dir_sep}3.c:3:6: style: The function 'f3_3' is never used. [unusedFunction]"
     ]
     assert ret == 0, stdout
 
@@ -179,7 +178,7 @@ def test_unused_functions_compdb_j(tmpdir):
         '--template=simple',
         '--enable=unusedFunction',
         '--inline-suppr',
-        '--project={}'.format(compdb_file),
+        f'--project={compdb_file}',
         '-j2',
         '--no-cppcheck-build-dir'
     ]
@@ -194,11 +193,11 @@ def test_unused_functions_compdb_j(tmpdir):
 def test_unused_functions_compdb_buildir_j_thread(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions_compdb(tmpdir, ['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=thread'])
+    __test_unused_functions_compdb(tmpdir, ['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=thread'])
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='ProcessExecutor not available on Windows')
 def test_unused_functions_compdb_builddir_j_process(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_unused_functions_compdb(tmpdir, ['-j2', '--cppcheck-build-dir={}'.format(build_dir), '--executor=process'])
\ No newline at end of file
+    __test_unused_functions_compdb(tmpdir, ['-j2', f'--cppcheck-build-dir={build_dir}', '--executor=process'])
\ No newline at end of file
diff --git a/test/cli/whole-program_test.py b/test/cli/whole-program_test.py
index dfb4e8112d1..6ba2f7b9613 100644
--- a/test/cli/whole-program_test.py
+++ b/test/cli/whole-program_test.py
@@ -20,12 +20,12 @@ def __create_compile_commands(dir, entries):
         f = os.path.basename(e)
         obj = {
             'directory': os.path.dirname(os.path.abspath(e)),
-            'command': 'gcc -c {}'.format(f),
+            'command': f'gcc -c {f}',
             'file': f
         }
         j.append(obj)
     compile_commands = os.path.join(dir, 'compile_commmands.json')
-    with open(compile_commands, 'wt') as f:
+    with open(compile_commands, 'w') as f:
         f.write(json.dumps(j))
     return compile_commands
 
@@ -61,7 +61,7 @@ def test_addon_suppress_inline_project(tmpdir):
         '--enable=information,style',
         '--inline-suppr',
         '--error-exitcode=1',
-        '--project={}'.format(compile_db)
+        f'--project={compile_db}'
     ]
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
@@ -103,27 +103,27 @@ def test_suppress_inline_j():
 def test_suppress_inline_builddir(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j1'])
 
 
 def test_suppress_inline_builddir_cached(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j1'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j1'])
 
 
 def test_suppress_inline_builddir_j(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j2'])
 
 
 def test_inline_suppr_builddir_j_cached(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
-    __test_suppress_inline(['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j2'])
+    __test_suppress_inline([f'--cppcheck-build-dir={build_dir}', '-j2'])
 
 
 # TODO: remove overrides when it is fully working
@@ -139,7 +139,7 @@ def __test_suppress_inline_project(tmp_path, extra_args):
         '--enable=information,style',
         '--inline-suppr',
         '--error-exitcode=1',
-        '--project={}'.format(compile_db)
+        f'--project={compile_db}'
     ]
 
     args += extra_args
@@ -164,27 +164,27 @@ def test_suppress_inline_project_j(tmp_path):
 def test_suppress_inline_project_builddir(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j1'])
 
 
 def test_suppress_inline_project_builddir_cached(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j1'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j1'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j1'])
 
 
 def test_suppress_inline_project_builddir_j(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j2'])
 
 
 def test_suppress_inline_project_builddir_j_cached(tmp_path):
     build_dir = tmp_path / 'b1'
     os.mkdir(build_dir)
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
-    __test_suppress_inline_project(tmp_path, ['--cppcheck-build-dir={}'.format(build_dir), '-j2'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j2'])
+    __test_suppress_inline_project(tmp_path, [f'--cppcheck-build-dir={build_dir}', '-j2'])
 
 
 @pytest.mark.parametrize("builddir", (False,True))
@@ -215,9 +215,9 @@ def test_addon_builddir_use_ctuinfo(tmp_path):
         'whole-program']
     _, _, stderr = cppcheck(args, cwd=__script_dir)
     assert 'misra-c2012-5.8' in stderr
-    with open(tmp_path / 'whole1.a1.ctu-info', 'wt'):
+    with open(tmp_path / 'whole1.a1.ctu-info', 'w'):
         pass
-    with open(tmp_path / 'whole2.a1.ctu-info', 'wt'):
+    with open(tmp_path / 'whole2.a1.ctu-info', 'w'):
         pass
     _, _, stderr = cppcheck(args, cwd=__script_dir)
     assert 'misra-c2012-5.8' not in stderr
@@ -263,7 +263,7 @@ def __test_checkclass(extra_args):
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        "whole-program{}odr1.cpp:6:1: error: The one definition rule is violated, different classes/structs have the same name 'C' [ctuOneDefinitionRuleViolation]".format(os.path.sep)
+        f"whole-program{os.path.sep}odr1.cpp:6:1: error: The one definition rule is violated, different classes/structs have the same name 'C' [ctuOneDefinitionRuleViolation]"
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -281,13 +281,13 @@ def test_checkclass_j():
 def test_checkclass_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_checkclass(['--cppcheck-build-dir={}'.format(build_dir)])
+    __test_checkclass([f'--cppcheck-build-dir={build_dir}'])
 
 
 def test_checkclass_builddir_j(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_checkclass(['-j2', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_checkclass(['-j2', f'--cppcheck-build-dir={build_dir}'])
 
 
 def __test_checkclass_project(tmpdir, extra_args):
@@ -303,7 +303,7 @@ def __test_checkclass_project(tmpdir, extra_args):
         '--template=simple',
         '--enable=information,style',
         '--error-exitcode=1',
-        '--project={}'.format(compile_db)
+        f'--project={compile_db}'
     ]
 
     args += extra_args
@@ -311,7 +311,7 @@ def __test_checkclass_project(tmpdir, extra_args):
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        "{}:6:1: error: The one definition rule is violated, different classes/structs have the same name 'C' [ctuOneDefinitionRuleViolation]".format(odr_file_1)
+        f"{odr_file_1}:6:1: error: The one definition rule is violated, different classes/structs have the same name 'C' [ctuOneDefinitionRuleViolation]"
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -329,13 +329,13 @@ def test_checkclass_project_j(tmpdir):
 def test_checkclass_project_builddir(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_checkclass_project(tmpdir, ['-j1', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_checkclass_project(tmpdir, ['-j1', f'--cppcheck-build-dir={build_dir}'])
 
 
 def test_checkclass_project_builddir_j(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_checkclass_project(tmpdir, ['-j2', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_checkclass_project(tmpdir, ['-j2', f'--cppcheck-build-dir={build_dir}'])
 
 def test_ctu_odr_config():
     args = [
@@ -351,7 +351,7 @@ def test_ctu_odr_config():
     ret, stdout, stderr = cppcheck(args, cwd=__script_dir)
     lines = stderr.splitlines()
     assert lines == [
-        "whole-program{}odr_cfg1.cpp:2:1: error: The one definition rule is violated, different classes/structs have the same name 'S' [ctuOneDefinitionRuleViolation]".format(os.path.sep)
+        f"whole-program{os.path.sep}odr_cfg1.cpp:2:1: error: The one definition rule is violated, different classes/structs have the same name 'S' [ctuOneDefinitionRuleViolation]"
     ]
     assert stdout == ''
     assert ret == 1, stdout
@@ -390,7 +390,7 @@ def test_nullpointer_file0_j():
 def test_nullpointer_file0_builddir_j(tmpdir):
     build_dir = os.path.join(tmpdir, 'b1')
     os.mkdir(build_dir)
-    __test_nullpointer_file0(['-j2', '--cppcheck-build-dir={}'.format(build_dir)])
+    __test_nullpointer_file0(['-j2', f'--cppcheck-build-dir={build_dir}'])
 
 # TODO: this only succeeded because it depedent on the bugged unqiue message handling
 @pytest.mark.parametrize("single_file", [
@@ -404,14 +404,14 @@ def test_nullpointer_out_of_memory(tmpdir, single_file):
     code1 = 'void f(int* p) { *p = 0; }\n'
     code2 = 'int main() { int* p = malloc(10); f(p); return 0; }\n'
     if single_file:
-        with open(tmpdir / 'test.c', 'wt') as f:
+        with open(tmpdir / 'test.c', 'w') as f:
             f.write(code1 + code2)
     else:
-        with open(tmpdir / 'header.h', 'wt') as f:
+        with open(tmpdir / 'header.h', 'w') as f:
             f.write('void f(int* p);\n')
-        with open(tmpdir / 'test1.c', 'wt') as f:
+        with open(tmpdir / 'test1.c', 'w') as f:
             f.write('#include "header.h"\n' + code1)
-        with open(tmpdir / 'test2.c', 'wt') as f:
+        with open(tmpdir / 'test2.c', 'w') as f:
             f.write('#include "header.h"\n' + code2)
     args = [
         '--cppcheck-build-dir=.',
diff --git a/test/scripts/extracttests.py b/test/scripts/extracttests.py
index a45c8261d64..cbda331ba67 100755
--- a/test/scripts/extracttests.py
+++ b/test/scripts/extracttests.py
@@ -94,7 +94,7 @@ def parseFile(self, filename):
         start_code = None
         disable = False
 
-        for line in open(filename, 'r'):
+        for line in open(filename):
             # testclass starts
             res = re.match('class (' + name + ')', line)
             if res is not None:
@@ -383,7 +383,7 @@ def writeHtmlFile(nodes, functionName, filename, errorsOnly):
                     lines[line_number] += ' // ' + res.group(3)
                     code = '\n'.join(lines)
                 else:
-                    print('filename:%s expected:%s' % (filename, expected))
+                    print('filename:{} expected:{}'.format(filename, expected))
 
             # source code
             with open(codedir + filename, 'w') as fout:
diff --git a/test/seh/test-sehwrapper.py b/test/seh/test-sehwrapper.py
index 199d96bc0d3..d381237c6aa 100644
--- a/test/seh/test-sehwrapper.py
+++ b/test/seh/test-sehwrapper.py
@@ -16,7 +16,7 @@ def _lookup_cppcheck_exe(exe_name):
         for path in ('', 'bin/', 'bin/debug/'):
             exe_path = base + path + exe_name
             if os.path.isfile(exe_path):
-                print("using '{}'".format(exe_path))
+                print(f"using '{exe_path}'")
                 return exe_path
 
     return None
diff --git a/test/signal/test-signalhandler.py b/test/signal/test-signalhandler.py
index 1ed700d7ec8..a35eff14f03 100644
--- a/test/signal/test-signalhandler.py
+++ b/test/signal/test-signalhandler.py
@@ -17,7 +17,7 @@ def __lookup_cppcheck_exe(exe_name):
         for path in ('', 'bin/', 'bin/debug/'):
             exe_path = base + path + exe_name
             if os.path.isfile(exe_path):
-                print("using '{}'".format(exe_path))
+                print(f"using '{exe_path}'")
                 return exe_path
 
     return None
diff --git a/test/signal/test-stacktrace.py b/test/signal/test-stacktrace.py
index 47e7b2d72a2..a9e423934a8 100644
--- a/test/signal/test-stacktrace.py
+++ b/test/signal/test-stacktrace.py
@@ -13,7 +13,7 @@ def __lookup_cppcheck_exe(exe_name):
         for path in ('', 'bin/', 'bin/debug/'):
             exe_path = base + path + exe_name
             if os.path.isfile(exe_path):
-                print("using '{}'".format(exe_path))
+                print(f"using '{exe_path}'")
                 return exe_path
 
     return None
diff --git a/tools/MT-Unsafe.py b/tools/MT-Unsafe.py
index 965a6261db8..4d9f6b65f9b 100755
--- a/tools/MT-Unsafe.py
+++ b/tools/MT-Unsafe.py
@@ -56,7 +56,7 @@ def man_search(manpage):
         if manpage.endswith('.gz'):
             MANPAGE = gzip.open(manpage, 'r')
         else:
-            MANPAGE = open(manpage, 'r')
+            MANPAGE = open(manpage)
     except OSError as filename:
         print('cannot open %s' % filename, file=sys.stderr)
         return  # None, None
@@ -94,7 +94,7 @@ def man_search(manpage):
         # vprint(1, '%s for %s' % (res, lineread))
         if res:
             apis.add(res.group(1))
-            dprint(1, 'found api %s in %s' % (res.group(1), lineread))
+            dprint(1, 'found api {} in {}'.format(res.group(1), lineread))
             continue
 
         if 'MT-Unsafe' in lineread:
diff --git a/tools/bisect/bisect_common.py b/tools/bisect/bisect_common.py
index 10494d36512..ef7b2bad6ac 100644
--- a/tools/bisect/bisect_common.py
+++ b/tools/bisect/bisect_common.py
@@ -12,7 +12,7 @@ def build_cppcheck(bisect_path):
     install_path = os.path.join(bisect_path, commit_hash)
     cppcheck_path = os.path.join(install_path, 'cppcheck')
     if os.path.exists(install_path):
-        print('binary for {} already exists'.format(commit_hash))
+        print(f'binary for {commit_hash} already exists')
         return cppcheck_path
 
     bisect_repo_dir = os.path.join(bisect_path, 'cppcheck')
@@ -32,12 +32,12 @@ def build_cppcheck(bisect_path):
     # TODO: make jobs configurable
     # TODO: use "make install"?
     # TODO: use CXXOPTS overrides to workaround compiling issues in older versions
-    print('building {}'.format(commit_hash))
+    print(f'building {commit_hash}')
     # we always need to use CXXFLAGS because we need to support older versions
     subprocess.check_call(['make', '-C', bisect_repo_dir, '-j6', 'MATCHCOMPILER=yes', 'CXXFLAGS=-O2 -w -pipe', '-s'])
 
     # TODO: remove folder if installation failed
-    print('installing {}'.format(commit_hash))
+    print(f'installing {commit_hash}')
     os.mkdir(install_path)
     if os.path.exists(os.path.join(bisect_repo_dir, 'cfg')):
         shutil.copytree(os.path.join(bisect_repo_dir, 'cfg'), os.path.join(install_path, 'cfg'))
diff --git a/tools/bisect/bisect_hang.py b/tools/bisect/bisect_hang.py
index ad0497396c9..cc4cd1f6694 100644
--- a/tools/bisect/bisect_hang.py
+++ b/tools/bisect/bisect_hang.py
@@ -11,7 +11,7 @@ def run(cppcheck_path, options, elapsed_time=None):
         timeout = elapsed_time * 2
     cmd = options.split()
     cmd.insert(0, cppcheck_path)
-    print('running {}'.format(cppcheck_path))
+    print(f'running {cppcheck_path}')
     with subprocess.Popen(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) as p:
         try:
             p.communicate(timeout=timeout)
@@ -54,7 +54,7 @@ def run(cppcheck_path, options, elapsed_time=None):
     # TODO: handle error result
     run(cppcheck_path, options)
     elapsed_time = time.perf_counter() - t
-    print('elapsed_time: {}'.format(elapsed_time))
+    print(f'elapsed_time: {elapsed_time}')
     # TODO: write to stdout and redirect all all printing to stderr
     sys.exit(round(elapsed_time + .5))  # return the time
 
@@ -64,7 +64,7 @@ def run(cppcheck_path, options, elapsed_time=None):
 
 if not elapsed_time:
     # TODO: handle error result
-    print('elapsed_time: {}'.format(run_time))
+    print(f'elapsed_time: {run_time}')
     # TODO: write to stdout and redirect all printing to stderr
     sys.exit(round(run_time + .5))  # return the time
 
@@ -74,6 +74,6 @@ def run(cppcheck_path, options, elapsed_time=None):
 if not run_res:
     sys.exit(EC_BAD if not invert else EC_GOOD)  # timeout occurred
 
-print('run_time: {}'.format(run_time))
+print(f'run_time: {run_time}')
 
 sys.exit(EC_GOOD if not invert else EC_BAD)  # no timeout
diff --git a/tools/bisect/bisect_res.py b/tools/bisect/bisect_res.py
index 68550d00815..ea54fb062bb 100644
--- a/tools/bisect/bisect_res.py
+++ b/tools/bisect/bisect_res.py
@@ -7,7 +7,7 @@
 def run(cppcheck_path, options):
     cmd = options.split()
     cmd.insert(0, cppcheck_path)
-    print('running {}'.format(cppcheck_path))
+    print(f'running {cppcheck_path}')
     with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) as p:
         stdout, stderr = p.communicate()
         rc = p.returncode
diff --git a/tools/compare-valueflow-options.py b/tools/compare-valueflow-options.py
index 11b7cf87d42..8440db89c3d 100755
--- a/tools/compare-valueflow-options.py
+++ b/tools/compare-valueflow-options.py
@@ -20,7 +20,7 @@
 
 def format_float(a, b=1):
     if a > 0 and b > 0:
-        return '{:.2f}'.format(a / b)
+        return f'{a / b:.2f}'
     return 'N/A'
 
 
@@ -88,7 +88,7 @@ def count_errors(errout:str, c:set):
         try:
             lib.clone_cppcheck(cppcheck_path, '')
         except Exception as e:
-            print('Failed to clone Cppcheck repository ({}), retry later'.format(e))
+            print(f'Failed to clone Cppcheck repository ({e}), retry later')
             sys.exit(1)
 
         if not lib.compile_cppcheck(cppcheck_path):
@@ -146,7 +146,7 @@ def count_errors(errout:str, c:set):
 
         libraries = lib.library_includes.get_libraries(source_path)
 
-        with open(results_file, 'at') as myfile:
+        with open(results_file, 'a') as myfile:
             myfile.write('package:' + package + '\n')
             myfile.write('libraries:' + ','.join(libraries) +'\n')
 
@@ -163,7 +163,7 @@ def count_errors(errout:str, c:set):
                 else:
                     error_text = f'{id} crash code={c}'
 
-            with open(results_file, 'at') as myfile:
+            with open(results_file, 'a') as myfile:
                 if error_text is not None:
                     myfile.write(f'{error_text}\n')
                 else:
@@ -183,7 +183,7 @@ def count_errors(errout:str, c:set):
                             time_factor = time / time0
                             myfile.write(f'{id}: Timefactor: %.3f\n' % time_factor)
 
-        with open(summary_file, 'wt') as myfile:
+        with open(summary_file, 'w') as myfile:
             all = {}
             for id, c in summary_results.items():
                 for error_id, count in c.items():
diff --git a/tools/compare_ast_symdb.py b/tools/compare_ast_symdb.py
index 5437c38853b..55457e97af7 100644
--- a/tools/compare_ast_symdb.py
+++ b/tools/compare_ast_symdb.py
@@ -12,7 +12,7 @@
 CPPCHECK = os.path.expanduser('~/cppcheck/cppcheck')
 
 def run_cppcheck(cppcheck_parameters:str, clang:str):
-    cmd = '{} {} {} --debug --verbose'.format(CPPCHECK, cppcheck_parameters, clang)
+    cmd = f'{CPPCHECK} {cppcheck_parameters} {clang} --debug --verbose'
     #print(cmd)
     with subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
         # TODO: handle p.returncode?
@@ -64,20 +64,20 @@ def compare_ast_symdb(cppcheck_parameters: str):
     ast1 = get_ast(debug1)
     ast2 = get_ast(debug2)
     if ast1 != ast2:
-        print("ast is not the same: {}".format(cppcheck_parameters))
-        with open('cppcheck.ast', 'wt') as f:
+        print(f"ast is not the same: {cppcheck_parameters}")
+        with open('cppcheck.ast', 'w') as f:
             f.write(ast1)
-        with open('clang.ast', 'wt') as f:
+        with open('clang.ast', 'w') as f:
             f.write(ast2)
         same = False
 
     symdb1 = get_symdb(debug1)
     symdb2 = get_symdb(debug2)
     if symdb1 != symdb2:
-        print("symdb is not the same: {}".format(cppcheck_parameters))
-        with open('cppcheck.symdb', 'wt') as f:
+        print(f"symdb is not the same: {cppcheck_parameters}")
+        with open('cppcheck.symdb', 'w') as f:
             f.write(symdb1)
-        with open('clang.symdb', 'wt') as f:
+        with open('clang.symdb', 'w') as f:
             f.write(symdb2)
         same = False
 
diff --git a/tools/creduce.py b/tools/creduce.py
index f6f9c7d8e47..198b0c32dbc 100644
--- a/tools/creduce.py
+++ b/tools/creduce.py
@@ -17,7 +17,7 @@ def print_lines(lines):
         print(line)
 
 def write_to(file, lines):
-    content = list((line + "\n" for line in lines))
+    content = list(line + "\n" for line in lines)
     if (len(content) > 0):
         with open(file, 'w') as f:
             f.writelines(content)
@@ -27,7 +27,7 @@ def make_executable(p):
 
 def quote(s):
     text = s.replace("'", "'\"'\"'")
-    return "'{}'".format(text)
+    return f"'{text}'"
 
 class ScriptBuilder:
     def __init__(self):
@@ -48,8 +48,8 @@ def grep(self, text, file=None):
     def check(self, equal_zero=False, result=1):
         op = 'eq' if equal_zero else 'ne'
         cmds = ['RES=$?',
-                'if [ $RES -{} "0" ]; then'.format(op),
-                '    exit {}'.format(result),
+                f'if [ $RES -{op} "0" ]; then',
+                f'    exit {result}',
                 'fi']
         self.commands.extend(cmds)
 
diff --git a/tools/daca2-download.py b/tools/daca2-download.py
index ee02a26f186..7f87f60330d 100755
--- a/tools/daca2-download.py
+++ b/tools/daca2-download.py
@@ -43,7 +43,7 @@ def getpackages():
         return []
     # TODO: handle exitcode?
     subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
-    with open('ls-lR', 'rt') as f:
+    with open('ls-lR') as f:
         lines = f.readlines()
     # TODO: handle exitcode?
     subprocess.call(['rm', 'ls-lR'])
@@ -94,7 +94,7 @@ def removeAll():
                 else:
                     os.remove(filename)
         # pylint: disable=undefined-variable
-        except WindowsError as err:
+        except OSError as err:
             time.sleep(30)
             if count == 0:
                 print('Failed to cleanup files/folders')
diff --git a/tools/daca2-getpackages.py b/tools/daca2-getpackages.py
index 992921082c1..4f376668661 100755
--- a/tools/daca2-getpackages.py
+++ b/tools/daca2-getpackages.py
@@ -46,7 +46,7 @@ def getpackages():
     subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
     if not os.path.isfile('ls-lR'):
         sys.exit(1)
-    with open('ls-lR', 'rt') as f:
+    with open('ls-lR') as f:
         lines = f.readlines()
     # TODO: handle exitcode?
     subprocess.call(['rm', 'ls-lR'])
diff --git a/tools/donate-cpu-server.py b/tools/donate-cpu-server.py
index 6f44ea679a7..47061f74ed8 100755
--- a/tools/donate-cpu-server.py
+++ b/tools/donate-cpu-server.py
@@ -54,7 +54,7 @@
 
 def print_ts(msg) -> None:
     dt = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
-    print('[{}] {}'.format(dt, msg))
+    print(f'[{dt}] {msg}')
 
 
 # Set up an exception hook for all uncaught exceptions so they can be logged
@@ -178,7 +178,7 @@ def latestReport(latestResults: list) -> str:
         count = ['0', '0']
         lost = 0
         added = 0
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             line = line.strip()
             if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
                 datestr = line
@@ -214,7 +214,7 @@ def crashReport(results_path: str, query_params: dict):
     for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
         if not os.path.isfile(filename) or filename.endswith('.diff'):
             continue
-        with open(filename, 'rt') as file_:
+        with open(filename) as file_:
             datestr = None
             package_url = None
             for line in file_:
@@ -244,7 +244,7 @@ def crashReport(results_path: str, query_params: dict):
                     if c_head != 'Crash':
                         break
                     if package_url is not None:
-                        pkgs += '{}\n'.format(package_url)
+                        pkgs += f'{package_url}\n'
                 elif line.find(' received signal ') != -1:
                     crash_line = next(file_, '').strip()
                     location_index = crash_line.rfind(' at ')
@@ -276,7 +276,7 @@ def crashReport(results_path: str, query_params: dict):
                             stack_trace.append(m.group('number') + ' ' + m.group('function') + '(...) at ' + m.group('location'))
                             continue
 
-                        print_ts('{} - unmatched stack frame - {}'.format(package, l))
+                        print_ts(f'{package} - unmatched stack frame - {l}')
                         break
                     key = hash(' '.join(stack_trace))
 
@@ -315,7 +315,7 @@ def timeoutReport(results_path: str) -> str:
     for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
         if not os.path.isfile(filename) or filename.endswith('.diff'):
             continue
-        with open(filename, 'rt') as file_:
+        with open(filename) as file_:
             datestr = None
             for line in file_:
                 line = line.strip()
@@ -361,7 +361,7 @@ def staleReport(results_path: str, query_params: dict) -> str:
     for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
         if filename.endswith('.diff') or not os.path.isfile(filename):
             continue
-        with open(filename, 'rt') as f:
+        with open(filename) as f:
             # first line is datetime string
             datestr = f.readline().strip()
             try:
@@ -425,7 +425,7 @@ def diffReport(resultsPath: str) -> str:
     for filename in sorted(glob.glob(resultsPath + '/*.diff')):
         if not os.path.isfile(filename):
             continue
-        with open(filename, 'rt') as f:
+        with open(filename) as f:
             data = json.loads(f.read())
         uploadedToday = data['date'] == today
         for messageId in data['sums']:
@@ -458,7 +458,7 @@ def generate_package_diff_statistics(filename: str) -> None:
 
     sums = {}
 
-    for line in open(filename, 'rt'):
+    for line in open(filename):
         line = line.strip()
         if line == 'diff:':
             is_diff = True
@@ -486,7 +486,7 @@ def generate_package_diff_statistics(filename: str) -> None:
 
     filename_diff = filename + '.diff'
     if sums:
-        with open(filename_diff, 'wt') as f:
+        with open(filename_diff, 'w') as f:
             f.write(json.dumps(output))
     elif os.path.isfile(filename_diff):
         os.remove(filename_diff)
@@ -498,7 +498,7 @@ def diffMessageIdReport(resultPath: str, messageId: str) -> str:
     for filename in sorted(glob.glob(resultPath + '/*.diff')):
         if not os.path.isfile(filename):
             continue
-        with open(filename, 'rt') as f:
+        with open(filename) as f:
             diff_stats = json.loads(f.read())
         if messageId not in diff_stats['sums']:
             continue
@@ -507,7 +507,7 @@ def diffMessageIdReport(resultPath: str, messageId: str) -> str:
 
         url = None
         diff = False
-        for line in open(filename[:-5], 'rt'):
+        for line in open(filename[:-5]):
             if line.startswith('ftp://'):
                 url = line
             elif line == 'diff:\n':
@@ -529,7 +529,7 @@ def diffMessageIdTodayReport(resultPath: str, messageId: str) -> str:
     for filename in sorted(glob.glob(resultPath + '/*.diff')):
         if not os.path.isfile(filename):
             continue
-        with open(filename, 'rt') as f:
+        with open(filename) as f:
             diff_stats = json.loads(f.read())
         if messageId not in diff_stats['sums']:
             continue
@@ -541,7 +541,7 @@ def diffMessageIdTodayReport(resultPath: str, messageId: str) -> str:
         url = None
         diff = False
         firstLine = True
-        for line in open(filename[:-5], 'rt'):
+        for line in open(filename[:-5]):
             if firstLine:
                 firstLine = False
                 if not line.startswith(today):
@@ -599,7 +599,7 @@ def summaryReport(resultsPath: str, name: str, prefix: str, marker: str) -> str:
         uploadedToday = False
         firstLine = True
         inResults = False
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             if firstLine:
                 if line.startswith(today):
                     uploadedToday = True
@@ -641,7 +641,7 @@ def summaryReport(resultsPath: str, name: str, prefix: str, marker: str) -> str:
                 outToday[messageId] += 1
 
     html = '\n'
-    html += '{} report\n'.format(name)
+    html += f'{name} report\n'
     html += '

HEAD report

\n' html += '

Uploaded today

' html += summaryReportFromDict(outToday, prefix, 'today') @@ -668,7 +668,7 @@ def messageIdReport(resultPath: str, marker: str, messageId: str, query_params: continue url = None inResults = False - for line in open(filename, 'rt'): + for line in open(filename): if line.startswith('cppcheck: '): if OLD_VERSION not in line: # Package results seem to be too old, skip @@ -714,7 +714,7 @@ def messageIdTodayReport(resultPath: str, messageId: str, marker: str) -> str: url = None inResults = False firstLine = True - for line in open(filename, 'rt'): + for line in open(filename): if firstLine: firstLine = False if not line.startswith(today): @@ -754,8 +754,8 @@ def timeReport(resultPath: str, show_gt: bool, query_params: dict): title = 'Time report ({})'.format('regressed' if show_gt else 'improved') html = '\n' - html += '{}\n'.format(title) - html += '

{}

\n'.format(title) + html += f'{title}\n' + html += f'

{title}

\n' html += '
\n'
     column_width = [40, 10, 10, 10, 10, 10]
     html += ''
@@ -773,7 +773,7 @@ def timeReport(resultPath: str, show_gt: bool, query_params: dict):
             continue
         datestr = None
         package_url = None
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             line = line.strip()
             if line.startswith('cppcheck: '):
                 if OLD_VERSION not in line:
@@ -820,13 +820,13 @@ def timeReport(resultPath: str, show_gt: bool, query_params: dict):
                 data[pkg_name] = (datestr, split_line[2], split_line[1], time_factor)
 
                 if package_url is not None:
-                    pkgs += '{}\n'.format(package_url)
+                    pkgs += f'{package_url}\n'
             break
 
     sorted_data = sorted(data.items(), key=lambda kv: kv[1][3], reverse=show_gt)
     sorted_dict = collections.OrderedDict(sorted_data)
     for key in sorted_dict:
-        html += fmt(key, sorted_dict[key][0], sorted_dict[key][1], sorted_dict[key][2], '{:.2f}'.format(sorted_dict[key][3]),
+        html += fmt(key, sorted_dict[key][0], sorted_dict[key][1], sorted_dict[key][2], f'{sorted_dict[key][3]:.2f}',
                     column_width=column_width) + '\n'
 
     html += '\n'
@@ -842,9 +842,9 @@ def timeReport(resultPath: str, show_gt: bool, query_params: dict):
     html += 'Time for all packages (not just the ones listed above):\n'
     html += fmt('Total time:',
             '',
-            '{:.1f}'.format(total_time_base),
-            '{:.1f}'.format(total_time_head),
-            '{:.2f}'.format(total_time_factor), link=False, column_width=column_width)
+            f'{total_time_base:.1f}',
+            f'{total_time_head:.1f}',
+            f'{total_time_factor:.2f}', link=False, column_width=column_width)
 
     html += '\n'
     html += '
\n' @@ -858,8 +858,8 @@ def timeReport(resultPath: str, show_gt: bool, query_params: dict): def timeReportSlow(resultPath: str) -> str: title = 'Time report (slowest)' html = '\n' - html += '{}\n'.format(title) - html += '

{}

\n'.format(title) + html += f'{title}\n' + html += f'

{title}

\n' html += '
\n'
     html += ''
     html += fmt('Package', 'Date       Time', OLD_VERSION, 'Head', link=False)
@@ -873,7 +873,7 @@ def timeReportSlow(resultPath: str) -> str:
         if not os.path.isfile(filename) or filename.endswith('.diff'):
             continue
         datestr = None
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             line = line.strip()
             if line.startswith('cppcheck: '):
                 if OLD_VERSION not in line:
@@ -961,7 +961,7 @@ def check_library_report(result_path: str, message_id: str) -> str:
         if not os.path.isfile(filename) or filename.endswith('.diff'):
             continue
         in_results = False
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             if line.startswith('cppcheck: '):
                 if OLD_VERSION not in line:
                     # Package results seem to be too old, skip
@@ -1026,7 +1026,7 @@ def check_library_function_name(result_path: str, function_name: str, query_para
         in_results = False
         package_url = None
         cppcheck_options = None
-        for line in open(filename, 'rt'):
+        for line in open(filename):
             if line.startswith('cppcheck: '):
                 if OLD_VERSION not in line:
                     # Package results seem to be too old, skip
@@ -1050,7 +1050,7 @@ def check_library_function_name(result_path: str, function_name: str, query_para
             if not (' ' + function_name + ' ') in line:
                 continue
             if pkgs is not None and package_url is not None:
-                pkgs += '{}\n'.format(package_url.strip())
+                pkgs += f'{package_url.strip()}\n'
                 break
             if package_url:
                 output_lines_list.append(package_url)
@@ -1079,7 +1079,7 @@ def clientsReport(results_path: str):
     for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
         if not os.path.isfile(filename) or filename.endswith('.diff'):
             continue
-        with open(filename, 'rt') as file_:
+        with open(filename) as file_:
             datestr = None
             platform = None
             py_version = None
@@ -1180,7 +1180,7 @@ def run(self):
             cmd = self.cmd
             url, queryParams = self.parse_req(cmd)
             if url is None:
-                print_ts('invalid request: {}'.format(cmd))
+                print_ts(f'invalid request: {cmd}')
                 self.connection.close()
                 return
             t_start = time.perf_counter()
@@ -1280,10 +1280,10 @@ def run(self):
                     print_ts('HTTP/1.1 404 Not Found')
                     self.connection.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
                 else:
-                    with open(filename, 'rt') as f:
+                    with open(filename) as f:
                         data = f.read()
                     httpGetResponse(self.connection, data, 'text/plain')
-            print_ts('{} finished in {}s'.format(url, (time.perf_counter() - t_start)))
+            print_ts(f'{url} finished in {(time.perf_counter() - t_start)}s')
         except:
             tb = "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))
             print_ts(tb)
@@ -1303,7 +1303,7 @@ def read_data(connection, cmd, pos_nl, max_data_size, check_done, cmd_name, time
                 try:
                     text_received = bytes_received.decode('ascii', 'ignore')
                 except UnicodeDecodeError as e:
-                    print_ts('Error: Decoding failed ({}): {}'.format(cmd_name, e))
+                    print_ts(f'Error: Decoding failed ({cmd_name}): {e}')
                     data = None
                     break
                 t = 0.0
@@ -1313,21 +1313,21 @@ def read_data(connection, cmd, pos_nl, max_data_size, check_done, cmd_name, time
             else:
                 time.sleep(0.2)
                 t += 0.2
-    except socket.error as e:
-        print_ts('Socket error occurred ({}): {}'.format(cmd_name, e))
+    except OSError as e:
+        print_ts(f'Socket error occurred ({cmd_name}): {e}')
         data = None
 
     connection.close()
 
     if (timeout > 0) and (t >= timeout):
-        print_ts('Timeout occurred ({}).'.format(cmd_name))
+        print_ts(f'Timeout occurred ({cmd_name}).')
         data = None
 
     if data and (len(data) >= max_data_size):
-        print_ts('Maximum allowed data ({} bytes) exceeded ({}).'.format(max_data_size, cmd_name))
+        print_ts(f'Maximum allowed data ({max_data_size} bytes) exceeded ({cmd_name}).')
 
     elif data and check_done and not data.endswith('\nDONE'):
-        print_ts('Incomplete data received ({}).'.format(cmd_name))
+        print_ts(f'Incomplete data received ({cmd_name}).')
         data = None
 
     return data
@@ -1343,7 +1343,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
 
     latestResults = []
     if os.path.isfile('latest.txt'):
-        with open('latest.txt', 'rt') as f:
+        with open('latest.txt') as f:
             latestResults = f.read().strip().split(' ')
 
     print_ts('version ' + SERVER_VERSION)
@@ -1356,7 +1356,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
         try:
             bytes_received = connection.recv(128)
             cmd = bytes_received.decode('utf-8', 'ignore')
-        except socket.error as e:
+        except OSError as e:
             print_ts('Error: Recv error: ' + str(e))
             connection.close()
             continue
@@ -1366,12 +1366,12 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
             continue
         pos_nl = cmd.find('\n')
         if pos_nl < 1:
-            print_ts("No newline found in data: '{}'".format(cmd))
+            print_ts(f"No newline found in data: '{cmd}'")
             connection.close()
             continue
         firstLine = cmd[:pos_nl]
         if re.match('[a-zA-Z0-9./ ]+', firstLine) is None:
-            print_ts('Unsupported characters found in command: {}'.format(firstLine))
+            print_ts(f'Unsupported characters found in command: {firstLine}')
             connection.close()
             continue
         if cmd.startswith('GET /'):
@@ -1395,7 +1395,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                 if pkg is not None:
                     break
 
-            with open('package-index.txt', 'wt') as f:
+            with open('package-index.txt', 'w') as f:
                 f.write(str(packageIndex) + '\n')
 
             print_ts('get:' + pkg)
@@ -1452,17 +1452,17 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                 if os.path.exists(filename):
                     os.remove(filename)
                 continue
-            with open(filename, 'wt') as f:
+            with open(filename, 'w') as f:
                 f.write(strDateTime() + '\n' + data)
             # track latest added results..
             if len(latestResults) >= 20:
                 latestResults = latestResults[1:]
             latestResults.append(filename)
-            with open('latest.txt', 'wt') as f:
+            with open('latest.txt', 'w') as f:
                 f.write(' '.join(latestResults))
             # generate package.diff..
             generate_package_diff_statistics(filename)
-            print_ts('write finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
+            print_ts(f'write finished for {res.group(1)} ({len(data)} bytes / {(time.perf_counter() - t_start)}s)')
             continue
         if cmd.startswith('write_info\nftp://') or cmd.startswith('write_info\nhttp://'):
             t_start = time.perf_counter()
@@ -1500,9 +1500,9 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                 if os.path.exists(filename):
                     os.remove(filename)
                 continue
-            with open(filename, 'wt') as f:
+            with open(filename, 'w') as f:
                 f.write(strDateTime() + '\n' + data)
-            print_ts('write_info finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
+            print_ts(f'write_info finished for {res.group(1)} ({len(data)} bytes / {(time.perf_counter() - t_start)}s)')
             continue
         if cmd == 'getPackagesCount\n':
             packages_count = str(len(packages))
@@ -1517,7 +1517,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                 connection.send(pkg.encode('utf-8', 'ignore'))
                 print_ts('getPackageIdx: ' + pkg)
             else:
-                print_ts('getPackageIdx: index {} is out of range'.format(request_idx))
+                print_ts(f'getPackageIdx: index {request_idx} is out of range')
             connection.close()
             continue
         if cmd.startswith('write_nodata\nftp://'):
@@ -1530,7 +1530,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                 print_ts('No newline found in data. Ignoring no-data data.')
                 continue
             if pos < 10:
-                print_ts('Data is less than 10 characters ({}). Ignoring no-data data.'.format(pos))
+                print_ts(f'Data is less than 10 characters ({pos}). Ignoring no-data data.')
                 continue
             url = data[:pos]
 
@@ -1541,7 +1541,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
                     packages[currentIdx] = None
                     print_ts('write_nodata:' + url)
 
-                    with open('packages_nodata.txt', 'at') as f:
+                    with open('packages_nodata.txt', 'a') as f:
                         f.write(url + '\n')
                     break
                 if currentIdx == 0:
@@ -1573,20 +1573,20 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
     print_ts('work path: ' + workPath)
     resultPath = workPath + '/donated-results'
     if not os.path.isdir(resultPath):
-        print_ts("fatal: result path '{}' is missing".format(resultPath))
+        print_ts(f"fatal: result path '{resultPath}' is missing")
         sys.exit(1)
 
-    with open('packages.txt', 'rt') as f:
+    with open('packages.txt') as f:
         packages = [val.strip() for val in f.readlines()]
 
-    print_ts('packages: {}'.format(len(packages)))
+    print_ts(f'packages: {len(packages)}')
 
     if os.path.isfile('packages_nodata.txt'):
-        with open('packages_nodata.txt', 'rt') as f:
+        with open('packages_nodata.txt') as f:
             packages_nodata = [val.strip() for val in f.readlines()]
             packages_nodata.sort()
 
-        print_ts('packages_nodata: {}'.format(len(packages_nodata)))
+        print_ts(f'packages_nodata: {len(packages_nodata)}')
 
         print_ts('removing packages with no files to process')
         packages_nodata_clean = []
@@ -1597,13 +1597,13 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
 
         packages_nodata_diff = len(packages_nodata) - len(packages_nodata_clean)
         if packages_nodata_diff:
-            with open('packages_nodata.txt', 'wt') as f:
+            with open('packages_nodata.txt', 'w') as f:
                 for pkg in packages_nodata_clean:
                     f.write(pkg + '\n')
 
-            print_ts('removed {} packages from packages_nodata.txt'.format(packages_nodata_diff))
+            print_ts(f'removed {packages_nodata_diff} packages from packages_nodata.txt')
 
-        print_ts('packages: {}'.format(len(packages)))
+        print_ts(f'packages: {len(packages)}')
 
     if len(packages) == 0:
         print_ts('fatal: there are no packages')
@@ -1611,7 +1611,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
 
     packageIndex = 0
     if os.path.isfile('package-index.txt'):
-        with open('package-index.txt', 'rt') as f:
+        with open('package-index.txt') as f:
             packageIndex = int(f.read())
         if packageIndex < 0 or packageIndex >= len(packages):
             packageIndex = 0
diff --git a/tools/donate-cpu.py b/tools/donate-cpu.py
index 81c752b141f..08f68a67ee7 100755
--- a/tools/donate-cpu.py
+++ b/tools/donate-cpu.py
@@ -55,7 +55,7 @@
         print('Stop time:' + stop_time)
     elif arg.startswith('-j'):
         if not re.match(r'-j\d+', arg):
-            print('Argument "{}" is invalid.'.format(arg))
+            print(f'Argument "{arg}" is invalid.')
             print('"-j" must be followed by a positive number.')
             sys.exit(1)
         print('Jobs:' + arg[2:])
@@ -66,7 +66,7 @@
         print('Added Package:' + pkg)
     elif arg.startswith('--packages='):
         pkg_cnt = len(package_urls)
-        with open(arg[arg.find('=')+1:], 'rt') as f:
+        with open(arg[arg.find('=')+1:]) as f:
             for package_url in f:
                 package_url = package_url.strip()
                 if not package_url:
@@ -92,7 +92,7 @@
         if max_packages < 0:
             max_packages = None
         if max_packages is None:
-            print('Error: Max. packages value "{}" is invalid. Must be a positive number or 0.'.format(arg_value))
+            print(f'Error: Max. packages value "{arg_value}" is invalid. Must be a positive number or 0.')
             sys.exit(1)
         # 0 means infinitely, no counting needed.
         if max_packages == 0:
@@ -145,7 +145,7 @@
 if package_urls:
     max_packages = len(package_urls)
 if max_packages:
-    print('Maximum number of packages to download and analyze: {}'.format(max_packages))
+    print(f'Maximum number of packages to download and analyze: {max_packages}')
 if not os.path.exists(work_path):
     os.mkdir(work_path)
 repo_path = os.path.join(work_path, 'repo')
@@ -158,15 +158,15 @@
 try:
     lib.try_retry(lib.clone_cppcheck, fargs=(repo_path, migrate_repo_path))
 except Exception as e:
-    print('Error: Failed to clone Cppcheck ({}), retry later'.format(e))
+    print(f'Error: Failed to clone Cppcheck ({e}), retry later')
     sys.exit(1)
 
 while True:
     if max_packages:
         if packages_processed >= max_packages:
-            print('Processed the specified number of {} package(s). Exiting now.'.format(max_packages))
+            print(f'Processed the specified number of {max_packages} package(s). Exiting now.')
             break
-        print('Processing package {} of the specified {} package(s).'.format(packages_processed + 1, max_packages))
+        print(f'Processing package {packages_processed + 1} of the specified {max_packages} package(s).')
         packages_processed += 1
     if stop_time:
         print('stop_time:' + stop_time + '. Time:' + time.strftime('%H:%M') + '.')
@@ -176,31 +176,31 @@
     try:
         cppcheck_versions = lib.try_retry(lib.get_cppcheck_versions, max_tries=3, sleep_duration=30.0, sleep_factor=1.0)
     except Exception as e:
-        print('Failed to get cppcheck versions from server ({}), retry later'.format(e))
+        print(f'Failed to get cppcheck versions from server ({e}), retry later')
         sys.exit(1)
     for ver in cppcheck_versions:
         if ver == 'head':
             ver = 'main'
         current_cppcheck_dir = os.path.join(work_path, 'tree-'+ver)
         if ver != 'main' and lib.has_binary(current_cppcheck_dir):
-            print('No need to check Cppcheck-{} for changes - binary already exists'.format(ver))
+            print(f'No need to check Cppcheck-{ver} for changes - binary already exists')
             continue
-        print('Checking Cppcheck-{} for changes..'.format(ver))
+        print(f'Checking Cppcheck-{ver} for changes..')
         try:
             has_changes = lib.try_retry(lib.checkout_cppcheck_version, fargs=(repo_path, ver, current_cppcheck_dir), max_tries=3, sleep_duration=30.0, sleep_factor=1.0)
         except KeyboardInterrupt as e:
             # Passthrough for user abort
             raise e
         except Exception as e:
-            print('Failed to update Cppcheck-{} ({}), retry later'.format(ver, e))
+            print(f'Failed to update Cppcheck-{ver} ({e}), retry later')
             sys.exit(1)
         if ver == 'main':
             if (has_changes or not lib.has_binary(current_cppcheck_dir)) and not lib.compile_cppcheck(current_cppcheck_dir):
-                print('Failed to compile Cppcheck-{}, retry later'.format(ver))
+                print(f'Failed to compile Cppcheck-{ver}, retry later')
                 sys.exit(1)
         else:
             if not lib.compile_version(current_cppcheck_dir):
-                print('Failed to compile Cppcheck-{}, retry later'.format(ver))
+                print(f'Failed to compile Cppcheck-{ver}, retry later')
                 sys.exit(1)
     if package_urls:
         package = package_urls[packages_processed-1]
@@ -208,7 +208,7 @@
         try:
             package = lib.get_package()
         except Exception as e:
-            print('Error: Failed to get package ({}), retry later'.format(e))
+            print(f'Error: Failed to get package ({e}), retry later')
             sys.exit(1)
     tgz = lib.download_package(work_path, package, bandwidth_limit)
     if tgz is None:
@@ -272,7 +272,7 @@ def get_client_version_head(path):
                 count += ' Crash!'
         else:
             count += ' ' + str(c)
-        elapsed_time += " {:.1f}".format(t)
+        elapsed_time += f" {t:.1f}"
         errout = errout.replace(work_path, '[...]')
         results_to_diff.append(errout)
         if ver == 'head':
@@ -310,5 +310,5 @@ def get_client_version_head(path):
     if not max_packages or packages_processed < max_packages:
         print('Sleep 5 seconds..')
         if (client_version_head is not None) and (Version(client_version_head) > Version(lib.get_client_version())):
-            print("ATTENTION: A newer client version ({}) is available - please update!".format(client_version_head))
+            print(f"ATTENTION: A newer client version ({client_version_head}) is available - please update!")
         time.sleep(5)
diff --git a/tools/donate_cpu_lib.py b/tools/donate_cpu_lib.py
index 44d3157bc40..7651ef92596 100644
--- a/tools/donate_cpu_lib.py
+++ b/tools/donate_cpu_lib.py
@@ -41,7 +41,7 @@ def detect_make():
         except OSError:
             continue
 
-        print("using '{}'".format(m))
+        print(f"using '{m}'")
         return m
 
     print("Error: a make command ({}) is required".format(','.join(make_cmds)))
@@ -66,14 +66,14 @@ def check_requirements():
             #print('{} --version'.format(app))
             subprocess.check_call([app, '--version'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
         except OSError:
-            print("Error: '{}' is required".format(app))
+            print(f"Error: '{app}' is required")
             result = False
 
     try:
         # pylint: disable-next=unused-import - intentional
         import psutil
     except ImportError as e:
-        print("Error: {}. Module is required.".format(e))
+        print(f"Error: {e}. Module is required.")
         result = False
 
     return result
@@ -90,13 +90,13 @@ def try_retry(fun, fargs=(), max_tries=5, sleep_duration=5.0, sleep_factor=2.0):
             raise e
         except BaseException as e:
             if i < max_tries - 1:
-                print("{} in {}: {}".format(type(e).__name__, fun.__name__, str(e)))
-                print("Trying {} again in {} seconds".format(fun.__name__, sleep_duration))
+                print(f"{type(e).__name__} in {fun.__name__}: {str(e)}")
+                print(f"Trying {fun.__name__} again in {sleep_duration} seconds")
                 time.sleep(sleep_duration)
                 sleep_duration *= sleep_factor
                 # do not return - re-try
             else:
-                print("Maximum number of tries reached for {}".format(fun.__name__))
+                print(f"Maximum number of tries reached for {fun.__name__}")
                 raise e
 
 
@@ -123,7 +123,7 @@ def checkout_cppcheck_version(repo_path, version, cppcheck_path):
     if not os.path.isabs(cppcheck_path):
         raise ValueError("cppcheck_path is not an absolute path")
     if os.path.exists(cppcheck_path):
-        print('Checking out {}'.format(version))
+        print(f'Checking out {version}')
         subprocess.check_call(['git', 'checkout', '-f', version], cwd=cppcheck_path)
 
         # It is possible to pull branches, not tags
@@ -132,7 +132,7 @@ def checkout_cppcheck_version(repo_path, version, cppcheck_path):
 
         hash_old = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=cppcheck_path).strip()
 
-        print('Pulling {}'.format(version))
+        print(f'Pulling {version}')
         # --rebase is a workaround for a dropped commit - see https://github.com/danmar/cppcheck/pull/6904
         # TODO: drop the commit in question
         # TOD: remove --rebase
@@ -146,18 +146,18 @@ def checkout_cppcheck_version(repo_path, version, cppcheck_path):
         return has_changes
 
     if version != 'main':
-        print('Fetching {}'.format(version))
+        print(f'Fetching {version}')
         # Since this is a shallow clone, explicitly fetch the remote version tag
         refspec = 'refs/tags/' + version + ':ref/tags/' + version
         subprocess.check_call(['git', 'fetch', '--depth=1', 'origin', refspec], cwd=repo_path)
-    print('Adding worktree \'{}\' for {}'.format(cppcheck_path, version))
+    print(f'Adding worktree \'{cppcheck_path}\' for {version}')
     subprocess.check_call(['git', 'worktree', 'add', cppcheck_path,  version], cwd=repo_path)
     return True
 
 
 def get_cppcheck_info(cppcheck_path):
     try:
-        return subprocess.check_output(['git', 'show', "--pretty=%h (%ci)", 'HEAD', '--no-patch', '--no-notes'], universal_newlines=True, cwd=cppcheck_path).strip()
+        return subprocess.check_output(['git', 'show', "--pretty=%h (%ci)", 'HEAD', '--no-patch', '--no-notes'], text=True, cwd=cppcheck_path).strip()
     except:
         return ''
 
@@ -201,7 +201,7 @@ def compile_version(cppcheck_path):
 
 
 def compile_cppcheck(cppcheck_path):
-    print('Compiling {}'.format(os.path.basename(cppcheck_path)))
+    print(f'Compiling {os.path.basename(cppcheck_path)}')
 
     cppcheck_bin = __get_cppcheck_binary(cppcheck_path)
     # remove file so interrupted "main" branch compilation is being resumed
@@ -228,7 +228,7 @@ def compile_cppcheck(cppcheck_path):
                 build_cmd.append('RDYNAMIC=-lshlwapi')
             subprocess.check_call(build_cmd, cwd=cppcheck_path, env=build_env)
     except Exception as e:
-        print('Compilation failed: {}'.format(e))
+        print(f'Compilation failed: {e}')
         return False
 
     try:
@@ -237,7 +237,7 @@ def compile_cppcheck(cppcheck_path):
         else:
             subprocess.check_call([os.path.join(cppcheck_path, 'cppcheck'), '--version'], cwd=cppcheck_path)
     except Exception as e:
-        print('Running Cppcheck failed: {}'.format(e))
+        print(f'Running Cppcheck failed: {e}')
         # remove faulty binary
         if os.path.isfile(cppcheck_bin):
             os.remove(cppcheck_bin)
@@ -312,7 +312,7 @@ def rmtree_func():
     try:
         try_retry(rmtree_func, max_tries=5, sleep_duration=30, sleep_factor=1)
     except Exception as e:
-        print('Failed to cleanup {}: {}'.format(folder_name, e))
+        print(f'Failed to cleanup {folder_name}: {e}')
         sys.exit(1)
 
 
@@ -447,7 +447,7 @@ def scan_package(cppcheck_path, source_path, libraries, capture_callstack=True,
 
     # TODO: temporarily disabled timing information - use --showtime=top5_summary when next version is released
     # Reference for GNU C: https://gcc.gnu.org/onlinedocs/cpp/Common-Predefined-Macros.html
-    options = '{} --inconclusive --enable={} --inline-suppr --template=daca2'.format(libs, enable)
+    options = f'{libs} --inconclusive --enable={enable} --inline-suppr --template=daca2'
     if 'information' in enable:
         # TODO: remove missingInclude disabling after 2.16 has been released
         options += ' --disable=missingInclude --suppress=unmatchedSuppression'
@@ -459,7 +459,7 @@ def scan_package(cppcheck_path, source_path, libraries, capture_callstack=True,
         options += ' --check-library --debug-warnings --suppress=autoNoType --suppress=valueFlowBailout' \
                    ' --suppress=bailoutUninitVar --suppress=symbolDatabaseWarning --suppress=normalCheckLevelConditionExpressions'
     options += ' -D__GNUC__ --platform=unix64'
-    options_rp = options + ' -rp={}'.format(dir_to_scan)
+    options_rp = options + f' -rp={dir_to_scan}'
     if __make_cmd == 'msbuild.exe':
         cppcheck_cmd = os.path.join(cppcheck_path, 'bin', 'cppcheck.exe') + ' ' + options_rp
         cmd = cppcheck_cmd + ' ' + __jobs + ' ' + dir_to_scan
@@ -519,7 +519,7 @@ def scan_package(cppcheck_path, source_path, libraries, capture_callstack=True,
                 sig_num = int(ie_line[sig_start_pos:ie_line.find(' ', sig_start_pos)])
             # break on the first signalled file for now
             break
-    print('cppcheck finished with ' + str(returncode) + ('' if sig_num == -1 else ' (signal ' + str(sig_num) + ')') + ' in {:.1f}s'.format(elapsed_time))
+    print('cppcheck finished with ' + str(returncode) + ('' if sig_num == -1 else ' (signal ' + str(sig_num) + ')') + f' in {elapsed_time:.1f}s')
 
     options_j = options + ' ' + __jobs
 
@@ -637,8 +637,8 @@ def __send_all(connection, data):
 def __upload(cmd, data, cmd_info):
     with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
         sock.connect(__server_address)
-        __send_all(sock, '{}\n{}'.format(cmd, data))
-    print('{} has been successfully uploaded.'.format(cmd_info))
+        __send_all(sock, f'{cmd}\n{data}')
+    print(f'{cmd_info} has been successfully uploaded.')
     return True
 
 
@@ -651,7 +651,7 @@ def upload_results(package, results):
     try:
         try_retry(__upload, fargs=('write\n' + package, results + '\nDONE', 'Result'), max_tries=20, sleep_duration=15, sleep_factor=1)
     except Exception as e:
-        print('Result upload failed ({})!'.format(e))
+        print(f'Result upload failed ({e})!')
         return False
 
     return True
@@ -666,7 +666,7 @@ def upload_info(package, info_output):
     try:
         try_retry(__upload, fargs=('write_info\n' + package, info_output + '\nDONE', 'Information'), max_tries=20, sleep_duration=15, sleep_factor=1)
     except Exception as e:
-        print('Information upload failed ({})!'.format(e))
+        print(f'Information upload failed ({e})!')
         return False
 
     return True
@@ -676,7 +676,7 @@ def upload_nodata(package):
     try:
         try_retry(__upload, fargs=('write_nodata\n' + package, '', 'No-data status'), max_tries=3, sleep_duration=30, sleep_factor=1)
     except Exception as e:
-        print('No-data upload failed ({})!'.format(e))
+        print(f'No-data upload failed ({e})!')
         return False
 
     return True
@@ -738,10 +738,10 @@ def __iterate_files(self, path, has_include_cb):
             for name in files:
                 filename = os.path.join(root, name)
                 try:
-                    with open(filename, 'rt', errors='ignore') as f:
+                    with open(filename, errors='ignore') as f:
                         filedata = f.read()
                     has_include_cb(filedata)
-                except IOError:
+                except OSError:
                     pass
 
     def get_libraries(self, folder):
@@ -762,7 +762,7 @@ def has_include(filedata):
                 del library_includes_re[lib_d]
 
         self.__iterate_files(folder, has_include)
-        print('Found libraries: {}'.format(libraries))
+        print(f'Found libraries: {libraries}')
         return libraries
 
 
diff --git a/tools/get_checkers.py b/tools/get_checkers.py
index 4594b773dc2..93377d74d6a 100644
--- a/tools/get_checkers.py
+++ b/tools/get_checkers.py
@@ -7,7 +7,7 @@
 def print_checkers(glob_pattern:str):
     checkers = {}
     for filename in glob.glob(glob_pattern):
-        for line in open(filename,'rt'):
+        for line in open(filename):
             res = re.match(r'[ \t]*logChecker\(\s*"([^"]+)"\s*\);.*', line)
             if res is None:
                 continue
@@ -17,7 +17,7 @@ def print_checkers(glob_pattern:str):
                 req = ''
             checkers[res.group(1)] = req
     for c,req in dict(sorted(checkers.items())).items():
-        print('        {"%s","%s"},' % (c, req))
+        print('        {{"{}","{}"}},'.format(c, req))
 
 
 print("""/*
@@ -56,7 +56,7 @@ def print_checkers(glob_pattern:str):
     const char Dis[] = "Disapplied";""")
 
 for version in (2012, 2023, 2025):
-    with open(os.path.expanduser('~/cppchecksolutions/addon/coverage/misra-c-%i.txt' % version), 'rt') as f:
+    with open(os.path.expanduser('~/cppchecksolutions/addon/coverage/misra-c-%i.txt' % version)) as f:
         all_guidelines = f.read()
 
     if version == 2012:
@@ -68,7 +68,7 @@ def print_checkers(glob_pattern:str):
     for line in all_guidelines.split('\n'):
         res = re.match(r'Dir\s+(\d+)[.](\d+)\s+(\w+).*', line)
         if res:
-            a = amd.get('%s.%s' % (res.group(1), res.group(2)), 0)
+            a = amd.get('{}.{}'.format(res.group(1), res.group(2)), 0)
             print('        {%s,%s,%s,%i},' % (res.group(1), res.group(2), res.group(3)[:3], a))
     print('    };')
 
@@ -120,7 +120,7 @@ def print_checkers(glob_pattern:str):
     for line in all_guidelines.split('\n'):
         res = re.match(r'Rule\s+(\d+)[.](\d+)\s+(\w+).*', line)
         if res:
-            a = amd.get('%s.%s' % (res.group(1), res.group(2)), 0)
+            a = amd.get('{}.{}'.format(res.group(1), res.group(2)), 0)
             comment = '' if a == 0 else ' // Amendment %i' % a
             print('        {%s,%s,%s,%i},%s' % (res.group(1), res.group(2), res.group(3)[:3], a, comment))
     print('    };')
@@ -929,7 +929,7 @@ def getCertCInfo(main_url:str):
             if res:
                 if res.group(1) == 'EXP40-C' and 'EXP39-C' not in rules:
                     print('    {"EXP39-C", "L2"},')
-                print('    {"%s", "%s"},' % (res.group(1), res.group(2)))
+                print('    {{"{}", "{}"}},'.format(res.group(1), res.group(2)))
                 rules.append(res.group(1))
         if 'EXP45-C' in rules:
             if 'EXP46-C' not in rules:
diff --git a/tools/listErrorsWithoutCWE.py b/tools/listErrorsWithoutCWE.py
index 0a78ff5bd36..185af5abcce 100755
--- a/tools/listErrorsWithoutCWE.py
+++ b/tools/listErrorsWithoutCWE.py
@@ -1,5 +1,4 @@
 #!/usr/bin/env python3
-from __future__ import print_function
 import argparse
 import xml.etree.ElementTree as ET
 
diff --git a/tools/matchcompiler.py b/tools/matchcompiler.py
index bb03b6822e2..fc010b275a2 100755
--- a/tools/matchcompiler.py
+++ b/tools/matchcompiler.py
@@ -189,8 +189,8 @@ def _compileCmd(tok):
         if (len(tok) > 2) and (tok[0] == "%"):
             print("unhandled:" + tok)
         elif tok in tokTypes:
-            cond = ' || '.join(['tok->tokType() == Token::{}'.format(tokType) for tokType in tokTypes[tok]])
-            return '(({cond}) && tok->str() == MatchCompiler::makeConstString("{tok}"))'.format(cond=cond, tok=tok)
+            cond = ' || '.join([f'tok->tokType() == Token::{tokType}' for tokType in tokTypes[tok]])
+            return f'(({cond}) && tok->str() == MatchCompiler::makeConstString("{tok}"))'
         return (
             '(tok->str() == MatchCompiler::makeConstString("' + tok + '"))'
         )
@@ -679,7 +679,7 @@ def _replaceCStrings(self, line):
     def convertFile(self, srcname, destname, line_directive):
         self._reset()
 
-        with io.open(srcname, "rt", encoding="utf-8") as fin:
+        with open(srcname, encoding="utf-8") as fin:
             srclines = fin.readlines()
 
         code = ''
@@ -736,7 +736,7 @@ def convertFile(self, srcname, destname, line_directive):
             footer += '#endif\n'
             footer += '#undef MAYBE_UNUSED\n'
 
-        with io.open(destname, 'wt', encoding="utf-8") as fout:
+        with open(destname, 'w', encoding="utf-8") as fout:
             if modified or len(self._rawMatchFunctions):
                 fout.write(header)
                 fout.write(strFunctions)
diff --git a/tools/parse-glibc.py b/tools/parse-glibc.py
index 6aaef2be32c..5bd58b3569c 100644
--- a/tools/parse-glibc.py
+++ b/tools/parse-glibc.py
@@ -33,13 +33,13 @@ def checknonnull(cfg, functionName, nonnull):
 
 
 def parseheader(cppcheckpath, filename):
-    with open(filename, 'rt') as f:
+    with open(filename) as f:
         data = f.read()
 
-    with open(cppcheckpath + '/cfg/std.cfg', 'rt') as f:
+    with open(cppcheckpath + '/cfg/std.cfg') as f:
         stdcfg = f.read()
 
-    with open(cppcheckpath + '/cfg/posix.cfg', 'rt') as f:
+    with open(cppcheckpath + '/cfg/posix.cfg') as f:
         posixcfg = f.read()
 
     while '/*' in data:
diff --git a/tools/reduce.py b/tools/reduce.py
index 472dff7d98e..aa791de0c76 100755
--- a/tools/reduce.py
+++ b/tools/reduce.py
@@ -84,11 +84,11 @@ def runtool(self, filedata=None):
         else:
             # Something could be wrong, for example the command line for Cppcheck (CMD).
             # Print the output to give a hint how to fix it.
-            print('Error: {}\n{}'.format(stdout, stderr))
+            print(f'Error: {stdout}\n{stderr}')
         return False
 
     def __writefile(self, filename, filedata):
-        with open(filename, 'wt') as f:
+        with open(filename, 'w') as f:
             for line in filedata:
                 f.write(line)
 
@@ -316,9 +316,9 @@ def show_syntax():
         sys.exit(1)
     elapsed_time = time.time() - t
     reduce.set_elapsed_time(elapsed_time)
-    print('elapsed_time: {}'.format(elapsed_time))
+    print(f'elapsed_time: {elapsed_time}')
 
-    with open(arg_file, 'rt') as f:
+    with open(arg_file) as f:
         filedata = f.readlines()
 
     reduce.writeorigfile(filedata)
diff --git a/tools/test-my-pr.py b/tools/test-my-pr.py
index 4a40f544ee1..599724d35ee 100755
--- a/tools/test-my-pr.py
+++ b/tools/test-my-pr.py
@@ -16,7 +16,7 @@
 
 def format_float(a, b=1):
     if a > 0 and b > 0:
-        return '{:.2f}'.format(a / b)
+        return f'{a / b:.2f}'
     return 'N/A'
 
 
@@ -65,13 +65,13 @@ def format_float(a, b=1):
     try:
         lib.clone_cppcheck(repo_dir, old_repo_dir)
     except Exception as e:
-        print('Failed to clone Cppcheck repository ({}), retry later'.format(e))
+        print(f'Failed to clone Cppcheck repository ({e}), retry later')
         sys.exit(1)
 
     try:
         lib.checkout_cppcheck_version(repo_dir, 'main', main_dir)
     except Exception as e:
-        print('Failed to checkout main ({}), retry later'.format(e))
+        print(f'Failed to checkout main ({e}), retry later')
         sys.exit(1)
 
     try:
@@ -87,7 +87,7 @@ def format_float(a, b=1):
         subprocess.check_call(['git', 'fetch', '--depth=1', 'origin', commit_id])
         subprocess.check_call(['git', 'checkout', '-f', commit_id])
     except BaseException as e:
-        print('Error: {}'.format(e))
+        print(f'Error: {e}')
         print('Failed to switch to common ancestor of your branch and main')
         sys.exit(1)
 
diff --git a/tools/trac-keywords.py b/tools/trac-keywords.py
index 7a8f9473517..40b040acaab 100644
--- a/tools/trac-keywords.py
+++ b/tools/trac-keywords.py
@@ -1,4 +1,3 @@
-
 import subprocess
 import sys
 
diff --git a/tools/triage_py/triage_version.py b/tools/triage_py/triage_version.py
index c87af3bcdf8..fc0c9ffb9c5 100644
--- a/tools/triage_py/triage_version.py
+++ b/tools/triage_py/triage_version.py
@@ -67,11 +67,11 @@ def sort_commit_hashes(commits):
     versions.append(filename)
 
 if not len(versions):
-    print("error: no versions found in '{}'".format(directory))
+    print(f"error: no versions found in '{directory}'")
     sys.exit(1)
 
 if verbose:
-    print("found {} versions in '{}'".format(len(versions), directory))
+    print(f"found {len(versions)} versions in '{directory}'")
 
 try:
     Version(versions[0])
@@ -79,12 +79,12 @@ def sort_commit_hashes(commits):
     versions.sort(key=Version)
 except:
     if verbose:
-        print("'{}' not a version - assuming commit hashes".format(versions[0]))
+        print(f"'{versions[0]}' not a version - assuming commit hashes")
     if not git_repo:
         print('error: git repository argument required for commit hash sorting')
         sys.exit(1)
     if verbose:
-        print("using git repository '{}' to sort commit hashes".format(git_repo))
+        print(f"using git repository '{git_repo}' to sort commit hashes")
     use_hashes = True
     # if you use the folder from the bisect script that contains the repo as a folder - so remove it from the list
     if versions.count('cppcheck'):
@@ -110,7 +110,7 @@ def sort_commit_hashes(commits):
         sys.exit(1)
 
 if verbose:
-    print("analyzing '{}'".format(input_file))
+    print(f"analyzing '{input_file}'")
 
 last_udiff_version = ''
 last_ec = None
@@ -209,15 +209,15 @@ def sort_commit_hashes(commits):
         if not use_hashes:
             ver_str = version
         else:
-            ver_str = '{} ({})'.format(entry, version)
+            ver_str = f'{entry} ({version})'
         if args.perf:
             if out == "timeout":
                 data_str = "0.0" # TODO: how to handle these properly?
             elif ec != 0:
                 continue # skip errors
             else:
-                data_str = '{}'.format((end - start) / 1000.0 / 1000.0 / 1000.0)
-            print('"{}",{}'.format(ver_str, data_str))
+                data_str = f'{(end - start) / 1000.0 / 1000.0 / 1000.0}'
+            print(f'"{ver_str}",{data_str}')
             continue
         print(ver_str)
         print(ec)
@@ -244,7 +244,7 @@ def sort_commit_hashes(commits):
         if not use_hashes:
             print(version)
         else:
-            print('{} ({})'.format(entry, version))
+            print(f'{entry} ({version})')
 
         last_ec = ec
         last_out = out
@@ -255,12 +255,12 @@ def sort_commit_hashes(commits):
 
     if last_ec != ec:
         if verbose:
-            print("{}: exitcode changed".format(version))
+            print(f"{version}: exitcode changed")
         do_print = True
 
     if last_out != out:
         if verbose:
-            print("{}: output changed".format(version))
+            print(f"{version}: output changed")
         do_print = True
         if args.diff:
             udiff = difflib.unified_diff(last_out.splitlines(True), out.splitlines(True), fromfile=last_udiff_version, tofile=version)
@@ -278,7 +278,7 @@ def sort_commit_hashes(commits):
         if not use_hashes:
             print(version)
         else:
-            print('{} ({})'.format(entry, version))
+            print(f'{entry} ({version})')
 
     last_ec = ec
     last_out = out