diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..6fddca0 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 71cd06e..b08e831 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -13,43 +13,59 @@ jobs: image: centos:7 steps: - uses: actions/checkout@v2 + - uses: codespell-project/actions-codespell@master + with: + ignore_words_list: fo,wee - name: Install dependencies run: | curl -L -O https://github.com/Kitware/CMake/releases/download/v3.16.4/cmake-3.16.4-Linux-x86_64.sh chmod +x cmake-3.16.4-Linux-x86_64.sh ./cmake-3.16.4-Linux-x86_64.sh --skip-license --prefix=/usr/local - curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-10.el7.x86_64.rpm - curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-10.el7.x86_64.rpm - rpm -U --quiet p7zip-16.02-10.el7.x86_64.rpm - rpm -U --quiet p7zip-plugins-16.02-10.el7.x86_64.rpm - yum install -y make gcc-c++ + curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-20.el7.x86_64.rpm + curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-20.el7.x86_64.rpm + rpm -U --quiet p7zip-16.02-20.el7.x86_64.rpm + rpm -U --quiet p7zip-plugins-16.02-20.el7.x86_64.rpm + yum install -y make gcc-c++ libasan clang-analyzer - - name: Build ninja + - name: Build debug ninja + shell: bash + env: + CFLAGS: -fstack-protector-all -fsanitize=address + CXXFLAGS: -fstack-protector-all -fsanitize=address + run: | + scan-build -o scanlogs cmake -DCMAKE_BUILD_TYPE=Debug -B debug-build + scan-build -o scanlogs cmake --build debug-build --parallel --config Debug + + - name: Test debug ninja + run: ./ninja_test + working-directory: debug-build + + - name: Build release ninja shell: bash run: | - cmake -DCMAKE_BUILD_TYPE=Release -B build - cmake --build build --parallel --config Release - strip build/ninja + cmake -DCMAKE_BUILD_TYPE=Release -B release-build + cmake --build release-build --parallel --config Release + strip release-build/ninja - - name: Test ninja + - name: Test release ninja run: ./ninja_test - working-directory: build + working-directory: release-build - name: Create ninja archive run: | mkdir artifact - 7z a artifact/ninja-linux.zip ./build/ninja + 7z a artifact/ninja-linux.zip ./release-build/ninja # Upload ninja binary archive as an artifact - name: Upload artifact - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: ninja-binary-archives path: artifact - name: Upload release asset if: github.event.action == 'published' - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -57,3 +73,136 @@ jobs: asset_path: ./artifact/ninja-linux.zip asset_name: ninja-linux.zip asset_content_type: application/zip + + test: + runs-on: [ubuntu-latest] + container: + image: ubuntu:20.04 + steps: + - uses: actions/checkout@v2 + - name: Install dependencies + run: | + apt update + apt install -y python3-pytest ninja-build clang-tidy python3-pip clang libgtest-dev + pip3 install cmake==3.17.* + - name: Configure (GCC) + run: cmake -Bbuild-gcc -DCMAKE_BUILD_TYPE=Debug -G'Ninja Multi-Config' + + - name: Build (GCC, Debug) + run: cmake --build build-gcc --config Debug + - name: Unit tests (GCC, Debug) + run: ./build-gcc/Debug/ninja_test + - name: Python tests (GCC, Debug) + run: pytest-3 --color=yes ../.. + working-directory: build-gcc/Debug + + - name: Build (GCC, Release) + run: cmake --build build-gcc --config Release + - name: Unit tests (GCC, Release) + run: ./build-gcc/Release/ninja_test + - name: Python tests (GCC, Release) + run: pytest-3 --color=yes ../.. + working-directory: build-gcc/Release + + - name: Configure (Clang) + run: CC=clang CXX=clang++ cmake -Bbuild-clang -DCMAKE_BUILD_TYPE=Debug -G'Ninja Multi-Config' -DCMAKE_EXPORT_COMPILE_COMMANDS=1 + + - name: Build (Clang, Debug) + run: cmake --build build-clang --config Debug + - name: Unit tests (Clang, Debug) + run: ./build-clang/Debug/ninja_test + - name: Python tests (Clang, Debug) + run: pytest-3 --color=yes ../.. + working-directory: build-clang/Debug + + - name: Build (Clang, Release) + run: cmake --build build-clang --config Release + - name: Unit tests (Clang, Release) + run: ./build-clang/Release/ninja_test + - name: Python tests (Clang, Release) + run: pytest-3 --color=yes ../.. + working-directory: build-clang/Release + + - name: clang-tidy + run: /usr/lib/llvm-10/share/clang/run-clang-tidy.py -header-filter=src + working-directory: build-clang + + build-with-python: + runs-on: [ubuntu-latest] + container: + image: ${{ matrix.image }} + strategy: + matrix: + image: ['ubuntu:14.04', 'ubuntu:16.04', 'ubuntu:18.04'] + steps: + - uses: actions/checkout@v2 + - name: Install dependencies + run: | + apt update + apt install -y g++ python3 + - name: ${{ matrix.image }} + run: | + python3 configure.py --bootstrap + ./ninja all + python3 misc/ninja_syntax_test.py + ./misc/output_test.py + + build-aarch64: + name: Build Linux ARM64 + runs-on: [ubuntu-latest] + steps: + - uses: actions/checkout@v3 + + - name: Build + uses: uraimo/run-on-arch-action@v2 + with: + arch: aarch64 + distro: ubuntu18.04 + githubToken: ${{ github.token }} + dockerRunArgs: | + --volume "${PWD}:/ninja" + install: | + apt-get update -q -y + apt-get install -q -y make gcc g++ libasan5 clang-tools curl p7zip-full file + run: | + set -x + cd /ninja + + # INSTALL CMAKE + CMAKE_VERSION=3.23.4 + curl -L -O https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-Linux-aarch64.sh + chmod +x cmake-${CMAKE_VERSION}-Linux-aarch64.sh + ./cmake-${CMAKE_VERSION}-Linux-aarch64.sh --skip-license --prefix=/usr/local + + # BUILD + cmake -DCMAKE_BUILD_TYPE=Release -B release-build + cmake --build release-build --parallel --config Release + strip release-build/ninja + file release-build/ninja + + # TEST + pushd release-build + ./ninja_test + popd + + # CREATE ARCHIVE + mkdir artifact + 7z a artifact/ninja-linux-aarch64.zip ./release-build/ninja + + # Upload ninja binary archive as an artifact + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: ninja-binary-archives + path: artifact + + - name: Upload release asset + if: github.event.action == 'published' + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ github.event.release.upload_url }} + asset_path: ./artifact/ninja-linux-aarch64.zip + asset_name: ninja-linux-aarch64.zip + asset_content_type: application/zip diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 411cfe1..d3dd9ee 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -8,7 +8,7 @@ on: jobs: build: - runs-on: macOS-latest + runs-on: macos-12 steps: - uses: actions/checkout@v2 @@ -19,31 +19,31 @@ jobs: - name: Build ninja shell: bash env: - MACOSX_DEPLOYMENT_TARGET: 10.12 + MACOSX_DEPLOYMENT_TARGET: 10.15 run: | - cmake -DCMAKE_BUILD_TYPE=Release -B build - cmake --build build --parallel --config Release + cmake -Bbuild -GXcode '-DCMAKE_OSX_ARCHITECTURES=arm64;x86_64' + cmake --build build --config Release - name: Test ninja - run: ctest -vv + run: ctest -C Release -vv working-directory: build - name: Create ninja archive shell: bash run: | mkdir artifact - 7z a artifact/ninja-mac.zip ./build/ninja + 7z a artifact/ninja-mac.zip ./build/Release/ninja # Upload ninja binary archive as an artifact - name: Upload artifact - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: ninja-binary-archives path: artifact - name: Upload release asset if: github.event.action == 'published' - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 04fc2f6..e169eb4 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -10,6 +10,15 @@ jobs: build: runs-on: windows-latest + strategy: + fail-fast: false + matrix: + include: + - arch: 'x64' + suffix: '' + - arch: 'arm64' + suffix: 'arm64' + steps: - uses: actions/checkout@v2 @@ -19,10 +28,17 @@ jobs: - name: Build ninja shell: bash run: | - cmake -DCMAKE_BUILD_TYPE=Release -B build + cmake -Bbuild -A ${{ matrix.arch }} + cmake --build build --parallel --config Debug cmake --build build --parallel --config Release - - name: Test ninja + - name: Test ninja (Debug) + if: matrix.arch != 'arm64' + run: .\ninja_test.exe + working-directory: build/Debug + + - name: Test ninja (Release) + if: matrix.arch != 'arm64' run: .\ninja_test.exe working-directory: build/Release @@ -30,22 +46,22 @@ jobs: shell: bash run: | mkdir artifact - 7z a artifact/ninja-win.zip ./build/Release/ninja.exe + 7z a artifact/ninja-win${{ matrix.suffix }}.zip ./build/Release/ninja.exe # Upload ninja binary archive as an artifact - name: Upload artifact - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: ninja-binary-archives path: artifact - name: Upload release asset if: github.event.action == 'published' - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ github.event.release.upload_url }} - asset_path: ./artifact/ninja-win.zip - asset_name: ninja-win.zip + asset_path: ./artifact/ninja-win${{ matrix.suffix }}.zip + asset_name: ninja-win${{ matrix.suffix }}.zip asset_content_type: application/zip diff --git a/.gitignore b/.gitignore index dca1129..ca36ec8 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,12 @@ # Qt Creator project files /CMakeLists.txt.user + +# clangd +/.clangd/ +/compile_commands.json +/.cache/ + +# Visual Studio files +/.vs/ +/out/ diff --git a/CMakeLists.txt b/CMakeLists.txt index 70fc5e9..89bf88a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,10 @@ cmake_minimum_required(VERSION 3.15) include(CheckSymbolExists) include(CheckIPOSupported) -project(ninja) +option(NINJA_BUILD_BINARY "Build ninja binary" ON) +option(NINJA_FORCE_PSELECT "Use pselect() even on platforms that provide ppoll()" OFF) + +project(ninja CXX) # --- optional link-time optimization check_ipo_supported(RESULT lto_supported OUTPUT error) @@ -19,6 +22,8 @@ endif() if(MSVC) set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>") string(REPLACE "/GR" "" CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS}) + # Note that these settings are separately specified in configure.py, and + # these lists should be kept in sync. add_compile_options(/W4 /wd4100 /wd4267 /wd4706 /wd4702 /wd4244 /GR- /Zc:__cplusplus) add_compile_definitions(_CRT_SECURE_NO_WARNINGS) else() @@ -31,11 +36,34 @@ else() if(flag_color_diag) add_compile_options(-fdiagnostics-color) endif() + + if(NOT NINJA_FORCE_PSELECT) + # Check whether ppoll() is usable on the target platform. + # Set -DUSE_PPOLL=1 if this is the case. + # + # NOTE: Use check_cxx_symbol_exists() instead of check_symbol_exists() + # because on Linux, only exposes the symbol when _GNU_SOURCE + # is defined. + # + # Both g++ and clang++ define the symbol by default, because the C++ + # standard library headers require it, but *not* gcc and clang, which + # are used by check_symbol_exists(). + include(CheckSymbolExists) + check_symbol_exists(ppoll poll.h HAVE_PPOLL) + if(HAVE_PPOLL) + add_compile_definitions(USE_PPOLL=1) + endif() + endif() endif() # --- optional re2c +set(RE2C_MAJOR_VERSION 0) find_program(RE2C re2c) if(RE2C) + execute_process(COMMAND "${RE2C}" --vernum OUTPUT_VARIABLE RE2C_RAW_VERSION) + math(EXPR RE2C_MAJOR_VERSION "${RE2C_RAW_VERSION} / 10000") +endif() +if(${RE2C_MAJOR_VERSION} GREATER 1) # the depfile parser and ninja lexers are generated using re2c. function(re2c IN OUT) add_custom_command(DEPENDS ${IN} OUTPUT ${OUT} @@ -46,7 +74,7 @@ if(RE2C) re2c(${PROJECT_SOURCE_DIR}/src/lexer.in.cc ${PROJECT_BINARY_DIR}/lexer.cc) add_library(libninja-re2c OBJECT ${PROJECT_BINARY_DIR}/depfile_parser.cc ${PROJECT_BINARY_DIR}/lexer.cc) else() - message(WARNING "re2c was not found; changes to src/*.in.cc will not affect your build.") + message(WARNING "re2c 2 or later was not found; changes to src/*.in.cc will not affect your build.") add_library(libninja-re2c OBJECT src/depfile_parser.cc src/lexer.cc) endif() target_include_directories(libninja-re2c PRIVATE src) @@ -86,6 +114,8 @@ function(check_platform_supports_browse_mode RESULT) endfunction() +set(NINJA_PYTHON "python" CACHE STRING "Python interpreter to use for the browse tool") + check_platform_supports_browse_mode(platform_supports_ninja_browse) # Core source files all build into ninja library. @@ -124,10 +154,18 @@ if(WIN32) src/getopt.c src/minidump-win32.cc ) + # Build getopt.c, which can be compiled as either C or C++, as C++ + # so that build environments which lack a C compiler, but have a C++ + # compiler may build ninja. + set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX) else() target_sources(libninja PRIVATE src/subprocess-posix.cc) if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX") target_sources(libninja PRIVATE src/getopt.c) + # Build getopt.c, which can be compiled as either C or C++, as C++ + # so that build environments which lack a C compiler, but have a C++ + # compiler may build ninja. + set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX) endif() # Needed for perfstat_cpu_total @@ -136,6 +174,8 @@ else() endif() endif() +target_compile_features(libninja PUBLIC cxx_std_11) + #Fixes GetActiveProcessorCount on MinGW if(MINGW) target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601 __USE_MINGW_ANSI_STDIO=1) @@ -148,11 +188,13 @@ if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX") endif() # Main executable is library plus main() function. -add_executable(ninja src/ninja.cc) -target_link_libraries(ninja PRIVATE libninja libninja-re2c) +if(NINJA_BUILD_BINARY) + add_executable(ninja src/ninja.cc) + target_link_libraries(ninja PRIVATE libninja libninja-re2c) -if(WIN32) - target_sources(ninja PRIVATE windows/ninja.manifest) + if(WIN32) + target_sources(ninja PRIVATE windows/ninja.manifest) + endif() endif() # Adds browse mode into the ninja binary if it's supported by the host platform. @@ -171,18 +213,58 @@ if(platform_supports_ninja_browse) VERBATIM ) - target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE) - target_sources(ninja PRIVATE src/browse.cc) + if(NINJA_BUILD_BINARY) + target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE) + target_sources(ninja PRIVATE src/browse.cc) + endif() set_source_files_properties(src/browse.cc PROPERTIES OBJECT_DEPENDS "${PROJECT_BINARY_DIR}/build/browse_py.h" INCLUDE_DIRECTORIES "${PROJECT_BINARY_DIR}" - COMPILE_DEFINITIONS NINJA_PYTHON="python" + COMPILE_DEFINITIONS NINJA_PYTHON="${NINJA_PYTHON}" ) endif() include(CTest) if(BUILD_TESTING) + find_package(GTest) + if(NOT GTest_FOUND) + include(FetchContent) + FetchContent_Declare( + googletest + URL https://github.com/google/googletest/archive/release-1.10.0.tar.gz + URL_HASH SHA1=9c89be7df9c5e8cb0bc20b3c4b39bf7e82686770 + ) + FetchContent_MakeAvailable(googletest) + + # Before googletest-1.11.0, the CMake files provided by the source archive + # did not define the GTest::gtest target, only the gtest one, so define + # an alias when needed to ensure the rest of this file works with all + # GoogleTest releases. + # + # Note that surprisingly, this is not needed when using GTEST_ROOT to + # point to a local installation, because this one contains CMake-generated + # files that contain the right target definition, and which will be + # picked up by the find_package(GTest) file above. + # + # This comment and the four lines below can be removed once Ninja only + # depends on release-1.11.0 or above. + if (NOT TARGET GTest::gtest) + message(STATUS "Defining GTest::gtest alias to work-around bug in older release.") + add_library(GTest::gtest ALIAS gtest) + + # NOTE: gtest uninit some variables, gcc >= 1.11.3 may cause error on compile. + # Remove this comment and six lines below, once ninja deps gtest-1.11.0 or above. + if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "1.11.3") + check_cxx_compiler_flag(-Wmaybe-uninitialized flag_maybe_uninit) + if (flag_maybe_uninit) + target_compile_options(gtest PRIVATE -Wno-maybe-uninitialized) + endif() + endif() + + endif() + endif() + # Tests all build into ninja_test executable. add_executable(ninja_test src/build_log_test.cc @@ -207,9 +289,11 @@ if(BUILD_TESTING) src/util_test.cc ) if(WIN32) - target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc) + target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc + windows/ninja.manifest) endif() - target_link_libraries(ninja_test PRIVATE libninja libninja-re2c) + find_package(Threads REQUIRED) + target_link_libraries(ninja_test PRIVATE libninja libninja-re2c GTest::gtest Threads::Threads) foreach(perftest build_log_perftest @@ -232,4 +316,6 @@ if(BUILD_TESTING) add_test(NAME NinjaTest COMMAND ninja_test) endif() -install(TARGETS ninja) +if(NINJA_BUILD_BINARY) + install(TARGETS ninja) +endif() diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index be1fc02..37f6ebc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,14 +14,10 @@ Generally it's the [Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) with a few additions: -* Any code merged into the Ninja codebase which will be part of the main - executable must compile as C++03. You may use C++11 features in a test or an - unimportant tool if you guard your code with `#if __cplusplus >= 201103L`. * We have used `using namespace std;` a lot in the past. For new contributions, please try to avoid relying on it and instead whenever possible use `std::`. However, please do not change existing code simply to add `std::` unless your contribution already needs to change that line of code anyway. -* All source files should have the Google Inc. license header. * Use `///` for [Doxygen](http://www.doxygen.nl/) (use `\a` to refer to arguments). * It's not necessary to document each argument, especially when they're diff --git a/README.md b/README.md index d763766..732ef28 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ See [the manual](https://ninja-build.org/manual.html) or `doc/manual.asciidoc` included in the distribution for background and more details. -Binaries for Linux, Mac, and Windows are available at +Binaries for Linux, Mac and Windows are available on [GitHub](https://github.com/ninja-build/ninja/releases). Run `./ninja -h` for Ninja help. @@ -49,3 +49,39 @@ To run the unit tests: ``` ./build-cmake/ninja_test ``` + +## Generating documentation + +### Ninja Manual + +You must have `asciidoc` and `xsltproc` in your PATH, then do: + +``` +./configure.py +ninja manual doc/manual.pdf +``` + +Which will generate `doc/manual.html`. + +To generate the PDF version of the manual, you must have `dblatext` in your PATH then do: + +``` +./configure.py # only if you didn't do it previously. +ninja doc/manual.pdf +``` + +Which will generate `doc/manual.pdf`. + +### Doxygen documentation + +If you have `doxygen` installed, you can build documentation extracted from C++ +declarations and comments to help you navigate the code. Note that Ninja is a standalone +executable, not a library, so there is no public API, all details exposed here are +internal. + +``` +./configure.py # if needed +ninja doxygen +``` + +Then open `doc/doxygen/html/index.html` in a browser to look at it. diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 0000000..4e3a4bd --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,41 @@ +Notes to myself on all the steps to make for a Ninja release. + +### Push new release branch: +1. Run afl-fuzz for a day or so and run ninja_test +2. Consider sending a heads-up to the ninja-build mailing list first +3. Make sure branches 'master' and 'release' are synced up locally +4. Update src/version.cc with new version (with ".git"), then + ``` + git commit -am 'mark this 1.5.0.git' + ``` +5. git checkout release; git merge master +6. Fix version number in src/version.cc (it will likely conflict in the above) +7. Fix version in doc/manual.asciidoc (exists only on release branch) +8. commit, tag, push (don't forget to push --tags) + ``` + git commit -am v1.5.0; git push origin release + git tag v1.5.0; git push --tags + # Push the 1.5.0.git change on master too: + git checkout master; git push origin master + ``` +9. Construct release notes from prior notes + + credits: `git shortlog -s --no-merges REV..` + + +### Release on GitHub: +1. Go to [Tags](https://github.com/ninja-build/ninja/tags) +2. Open the newly created tag and select "Create release from tag" +3. Create the release which will trigger a build which automatically attaches + the binaries + +### Make announcement on mailing list: +1. copy old mail + +### Update website: +1. Make sure your ninja checkout is on the v1.5.0 tag +2. Clone https://github.com/ninja-build/ninja-build.github.io +3. In that repo, `./update-docs.sh` +4. Update index.html with newest version and link to release notes +5. `git commit -m 'run update-docs.sh, 1.5.0 release'` +6. `git push origin master` diff --git a/appveyor.yml b/appveyor.yml index f0b92b8..ecc9f98 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,7 +8,6 @@ environment: CHERE_INVOKING: 1 # Tell Bash to inherit the current working directory matrix: - MSYSTEM: MINGW64 - - MSYSTEM: MSVC - MSYSTEM: LINUX matrix: @@ -17,8 +16,6 @@ matrix: MSYSTEM: LINUX - image: Ubuntu1804 MSYSTEM: MINGW64 - - image: Ubuntu1804 - MSYSTEM: MSVC for: - @@ -30,31 +27,13 @@ for: pacman -S --quiet --noconfirm --needed re2c 2>&1\n ./configure.py --bootstrap --platform mingw 2>&1\n ./ninja all\n - ./ninja_test 2>&1\n ./misc/ninja_syntax_test.py 2>&1\n\"@" - - - matrix: - only: - - MSYSTEM: MSVC - build_script: - - cmd: >- - call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat" - - python configure.py --bootstrap - - ninja.bootstrap.exe all - - ninja_test - - python misc/ninja_syntax_test.py - - matrix: only: - image: Ubuntu1804 build_script: - ./configure.py --bootstrap - ./ninja all - - ./ninja_test - misc/ninja_syntax_test.py - misc/output_test.py diff --git a/build_description.md b/build_description.md index 542a167..c553a90 100644 --- a/build_description.md +++ b/build_description.md @@ -1,19 +1,13 @@ -# build ninja - -## 获取代码 -``` +#build ninja + +#获取代码 git clone https://gitee.com/openharmony/third_party_ninja.git -``` - -## 通过python脚本构建生成二进制文件 -``` + +#通过python脚本构建生成二进制文件 cd third_party_ninja ./configure.py --bootstrap -``` - -## 通过CMake编译生成二进制文件 -``` + +#通过CMake编译生成二进制文件 cd third_party_ninja -cmake -Bbuild-cmake +cmake -Bbuild-cmake -H. cmake --build build-cmake -``` diff --git a/configure.py b/configure.py index 4390434..6ee64a8 100755 --- a/configure.py +++ b/configure.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Copyright 2001 Google Inc. All Rights Reserved. # @@ -19,12 +19,9 @@ Projects that use ninja themselves should either write a similar script or use a meta-build system that supports Ninja output.""" -from __future__ import print_function - from optparse import OptionParser import os -import pipes -import string +import shlex import subprocess import sys @@ -264,7 +261,7 @@ n.variable('configure_args', ' '.join(configure_args)) env_keys = set(['CXX', 'AR', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS']) configure_env = dict((k, os.environ[k]) for k in os.environ if k in env_keys) if configure_env: - config_str = ' '.join([k + '=' + pipes.quote(configure_env[k]) + config_str = ' '.join([k + '=' + shlex.quote(configure_env[k]) for k in configure_env]) n.variable('configure_env', config_str + '$ ') n.newline() @@ -305,7 +302,18 @@ if platform.is_msvc(): else: n.variable('ar', configure_env.get('AR', 'ar')) +def search_system_path(file_name): + """Find a file in the system path.""" + for dir in os.environ['path'].split(';'): + path = os.path.join(dir, file_name) + if os.path.exists(path): + return path + +# Note that build settings are separately specified in CMakeLists.txt and +# these lists should be kept in sync. if platform.is_msvc(): + if not search_system_path('cl.exe'): + raise Exception('cl.exe not found. Run again from the Developer Command Prompt for VS') cflags = ['/showIncludes', '/nologo', # Don't print startup banner. '/Zi', # Create pdb with debug info. @@ -320,6 +328,7 @@ if platform.is_msvc(): # Disable warnings about ignored typedef in DbgHelp.h '/wd4091', '/GR-', # Disable RTTI. + '/Zc:__cplusplus', # Disable size_t -> int truncation warning. # We never have strings or arrays larger than 2**31. '/wd4267', @@ -339,6 +348,7 @@ else: '-Wno-unused-parameter', '-fno-rtti', '-fno-exceptions', + '-std=c++11', '-fvisibility=hidden', '-pipe', '-DNINJA_PYTHON="%s"' % options.with_python] if options.debug: @@ -474,7 +484,7 @@ n.comment('the depfile parser and ninja lexers are generated using re2c.') def has_re2c(): try: proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE) - return int(proc.communicate()[0], 10) >= 1103 + return int(proc.communicate()[0], 10) >= 1503 except OSError: return False if has_re2c(): @@ -485,20 +495,31 @@ if has_re2c(): n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc')) n.build(src('lexer.cc'), 're2c', src('lexer.in.cc')) else: - print("warning: A compatible version of re2c (>= 0.11.3) was not found; " + print("warning: A compatible version of re2c (>= 0.15.3) was not found; " "changes to src/*.in.cc will not affect your build.") n.newline() -n.comment('Core source files all build into ninja library.') cxxvariables = [] if platform.is_msvc(): cxxvariables = [('pdb', 'ninja.pdb')] + +n.comment('Generate a library for `ninja-re2c`.') +re2c_objs = [] +for name in ['depfile_parser', 'lexer']: + re2c_objs += cxx(name, variables=cxxvariables) +if platform.is_msvc(): + n.build(built('ninja-re2c.lib'), 'ar', re2c_objs) +else: + n.build(built('libninja-re2c.a'), 'ar', re2c_objs) +n.newline() + +n.comment('Core source files all build into ninja library.') +objs.extend(re2c_objs) for name in ['build', 'build_log', 'clean', 'clparser', 'debug_flags', - 'depfile_parser', 'deps_log', 'disk_interface', 'dyndep', @@ -508,7 +529,6 @@ for name in ['build', 'graph', 'graphviz', 'json', - 'lexer', 'line_printer', 'manifest_parser', 'metrics', @@ -562,44 +582,6 @@ if options.bootstrap: # build.ninja file. n = ninja_writer -n.comment('Tests all build into ninja_test executable.') - -objs = [] -if platform.is_msvc(): - cxxvariables = [('pdb', 'ninja_test.pdb')] - -for name in ['build_log_test', - 'build_test', - 'clean_test', - 'clparser_test', - 'depfile_parser_test', - 'deps_log_test', - 'dyndep_parser_test', - 'disk_interface_test', - 'edit_distance_test', - 'graph_test', - 'json_test', - 'lexer_test', - 'manifest_parser_test', - 'missing_deps_test', - 'ninja_test', - 'state_test', - 'status_test', - 'string_piece_util_test', - 'subprocess_test', - 'test', - 'util_test']: - objs += cxx(name, variables=cxxvariables) -if platform.is_windows(): - for name in ['includes_normalize_test', 'msvc_helper_test']: - objs += cxx(name, variables=cxxvariables) - -ninja_test = n.build(binary('ninja_test'), 'link', objs, implicit=ninja_lib, - variables=[('libs', libs)]) -n.newline() -all_targets += ninja_test - - n.comment('Ancillary executables.') if platform.is_aix() and '-maix64' not in ldflags: diff --git a/doc/manual.asciidoc b/doc/manual.asciidoc index a78bbf5..d7dc490 100644 --- a/doc/manual.asciidoc +++ b/doc/manual.asciidoc @@ -1,6 +1,6 @@ The Ninja build system ====================== -v1.11.0, Nov 2020 +v1.12.0, Apr 2024 Introduction @@ -25,7 +25,7 @@ Where other build systems are high-level languages, Ninja aims to be an assembler. Build systems get slow when they need to make decisions. When you are -in a edit-compile cycle you want it to be as fast as possible -- you +in an edit-compile cycle you want it to be as fast as possible -- you want the build system to do the minimum work necessary to figure out what needs to be built immediately. @@ -205,7 +205,11 @@ Several placeholders are available: `%o`:: Overall rate of finished edges per second `%c`:: Current rate of finished edges per second (average over builds specified by `-j` or its default) -`%e`:: Elapsed time in seconds. _(Available since Ninja 1.2.)_ +`%e`:: Elapsed time in seconds. _(Available since Ninja 1.2.)_ +`%E`:: Remaining time (ETA) in seconds. _(Available since Ninja 1.12.)_ +`%w`:: Elapsed time in [h:]mm:ss format. _(Available since Ninja 1.12.)_ +`%W`:: Remaining time (ETA) in [h:]mm:ss format. _(Available since Ninja 1.12.)_ +`%P`:: The percentage (in ppp% format) of time elapsed out of predicted total runtime. _(Available since Ninja 1.12.)_ `%%`:: A plain `%` character. The default progress status is `"[%f/%t] "` (note the trailing space @@ -223,14 +227,14 @@ found useful during Ninja's development. The current tools are: `browse`:: browse the dependency graph in a web browser. Clicking a file focuses the view on that file, showing inputs and outputs. This -feature requires a Python installation. By default port 8000 is used +feature requires a Python installation. By default, port 8000 is used and a web browser will be opened. This can be changed as follows: + ---- ninja -t browse --port=8000 --no-browser mytarget ---- + -`graph`:: output a file in the syntax used by `graphviz`, a automatic +`graph`:: output a file in the syntax used by `graphviz`, an automatic graph layout tool. Use it like: + ---- @@ -262,7 +266,7 @@ output files are out of date. rebuild those targets. _Available since Ninja 1.11._ -`clean`:: remove built files. By default it removes all built files +`clean`:: remove built files. By default, it removes all built files except for those created by the generator. Adding the `-g` flag also removes built files created by the generator (see <>). Additional arguments are @@ -325,20 +329,19 @@ Where `ENVFILE` is a binary file that contains an environment block suitable for CreateProcessA() on Windows (i.e. a series of zero-terminated strings that look like NAME=VALUE, followed by an extra zero terminator). Note that this uses the local codepage encoding. - ++ This tool also supports a deprecated way of parsing the compiler's output when -the `/showIncludes` flag is used, and generating a GCC-compatible depfile from it. +the `/showIncludes` flag is used, and generating a GCC-compatible depfile from it: + ---- +---- ninja -t msvc -o DEPFILE [-p STRING] -- cl.exe /showIncludes ---- +---- + - When using this option, `-p STRING` can be used to pass the localized line prefix that `cl.exe` uses to output dependency information. For English-speaking regions this is `"Note: including file: "` without the double quotes, but will be different for other regions. - ++ Note that Ninja supports this natively now, with the use of `deps = msvc` and `msvc_deps_prefix` in Ninja files. Native support also avoids launching an extra tool process each time the compiler must be called, which can speed up builds @@ -675,14 +678,14 @@ Ninja supports this processing in two forms. as a temporary). 2. `deps = msvc` specifies that the tool outputs header dependencies - in the form produced by Visual Studio's compiler's + in the form produced by the Visual Studio compiler's http://msdn.microsoft.com/en-us/library/hdkef6tk(v=vs.90).aspx[`/showIncludes` flag]. Briefly, this means the tool outputs specially-formatted lines to its stdout. Ninja then filters these lines from the displayed output. No `depfile` attribute is necessary, but the localized string - in front of the the header file path. For instance + in front of the header file path should be globally defined. For instance, `msvc_deps_prefix = Note: including file:` - for a English Visual Studio (the default). Should be globally defined. + for an English Visual Studio (the default). + ---- msvc_deps_prefix = Note: including file: @@ -965,14 +968,14 @@ Fundamentally, command lines behave differently on Unixes and Windows. On Unixes, commands are arrays of arguments. The Ninja `command` variable is passed directly to `sh -c`, which is then responsible for -interpreting that string into an argv array. Therefore the quoting +interpreting that string into an argv array. Therefore, the quoting rules are those of the shell, and you can use all the normal shell operators, like `&&` to chain multiple commands, or `VAR=value cmd` to set environment variables. On Windows, commands are strings, so Ninja passes the `command` string directly to `CreateProcess`. (In the common case of simply executing -a compiler this means there is less overhead.) Consequently the +a compiler this means there is less overhead.) Consequently, the quoting rules are determined by the called program, which on Windows are usually provided by the C library. If you need shell interpretation of the command (such as the use of `&&` to chain @@ -1047,6 +1050,9 @@ relative path, pointing to the same file, are considered different by Ninja. [[validations]] Validations ~~~~~~~~~~~ + +_Available since Ninja 1.11._ + Validations listed on the build line cause the specified files to be added to the top level of the build graph (as if they were specified on the Ninja command line) whenever the build line is a transitive @@ -1062,7 +1068,7 @@ A build edge can list another build edge as a validation even if the second edge depends on the first. Validations are designed to handle rules that perform error checking but -don't produce any artifacts needed by the build, for example static +don't produce any artifacts needed by the build, for example, static analysis tools. Marking the static analysis rule as an implicit input of the main build rule of the source files or of the rules that depend on the main build rule would slow down the critical path of the build, diff --git a/doc/style.css b/doc/style.css index 9976c03..363e272 100644 --- a/doc/style.css +++ b/doc/style.css @@ -1,15 +1,22 @@ +:root { + color-scheme: light dark; +} + body { margin: 5ex 10ex; max-width: 80ex; line-height: 1.5; font-family: sans-serif; } + h1, h2, h3 { font-weight: normal; } + pre, code { font-family: x, monospace; } + pre { padding: 1ex; background: #eee; @@ -17,13 +24,32 @@ pre { min-width: 0; font-size: 90%; } +@media (prefers-color-scheme: dark) { + pre { + background: #333; + border: solid 1px #444; + } +} + code { color: #007; } +@media (prefers-color-scheme: dark) { + code { + color: #a7cec8; + } +} + div.chapter { margin-top: 4em; border-top: solid 2px black; } +@media (prefers-color-scheme: dark) { + div.chapter { + border-top: solid 2px white; + } +} + p { margin-top: 0; } diff --git a/misc/measure.py b/misc/measure.py index 8ce95e6..f3825ef 100755 --- a/misc/measure.py +++ b/misc/measure.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2011 Google Inc. All Rights Reserved. # @@ -17,8 +17,6 @@ """measure the runtime of a command by repeatedly running it. """ -from __future__ import print_function - import time import subprocess import sys diff --git a/misc/ninja-mode.el b/misc/ninja-mode.el index 8b975d5..d4f06e6 100644 --- a/misc/ninja-mode.el +++ b/misc/ninja-mode.el @@ -19,16 +19,22 @@ ;;; Commentary: ;; Simple emacs mode for editing .ninja files. -;; Just some syntax highlighting for now. ;;; Code: +(defcustom ninja-indent-offset 2 + "*Amount of offset per level of indentation." + :type 'integer + :safe 'natnump + :group 'ninja) + +(defconst ninja-keywords-re + (concat "^" (regexp-opt '("rule" "build" "subninja" "include" "pool" "default") + 'words))) + (defvar ninja-keywords - `((,(concat "^" (regexp-opt '("rule" "build" "subninja" "include" - "pool" "default") - 'words)) - . font-lock-keyword-face) - ("\\([[:alnum:]_]+\\) =" 1 font-lock-variable-name-face) + `((,ninja-keywords-re . font-lock-keyword-face) + ("^[[:space:]]*\\([[:alnum:]_]+\\)[[:space:]]*=" 1 font-lock-variable-name-face) ;; Variable expansion. ("$[[:alnum:]_]+" . font-lock-variable-name-face) ("${[[:alnum:]._]+}" . font-lock-variable-name-face) @@ -69,11 +75,30 @@ (unless (= line-end (1+ (buffer-size))) (put-text-property line-end (1+ line-end) 'syntax-table '(12))))))))) +(defun ninja-compute-indentation () + "Calculate indentation for the current line." + (save-excursion + (beginning-of-line) + (if (or (looking-at ninja-keywords-re) + (= (line-number-at-pos) 1)) + 0 + (forward-line -1) + (if (looking-at ninja-keywords-re) + ninja-indent-offset + (current-indentation))))) + +(defun ninja-indent-line () + "Indent the current line. Uses previous indentation level if + available or `ninja-indent-offset'" + (interactive "*") + (indent-line-to (ninja-compute-indentation))) + ;;;###autoload (define-derived-mode ninja-mode prog-mode "ninja" (set (make-local-variable 'comment-start) "#") (set (make-local-variable 'parse-sexp-lookup-properties) t) (set (make-local-variable 'syntax-propertize-function) #'ninja-syntax-propertize) + (set (make-local-variable 'indent-line-function) 'ninja-indent-line) (setq font-lock-defaults '(ninja-keywords))) ;; Run ninja-mode for files ending in .ninja. diff --git a/misc/ninja_syntax.py b/misc/ninja_syntax.py index ca73b5b..2aa8456 100644 --- a/misc/ninja_syntax.py +++ b/misc/ninja_syntax.py @@ -23,37 +23,54 @@ use Python. import re import textwrap +from io import TextIOWrapper +from typing import Dict, List, Match, Optional, Tuple, Union -def escape_path(word): +def escape_path(word: str) -> str: return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') class Writer(object): - def __init__(self, output, width=78): + def __init__(self, output: TextIOWrapper, width: int = 78) -> None: self.output = output self.width = width - def newline(self): + def newline(self) -> None: self.output.write('\n') - def comment(self, text): + def comment(self, text: str) -> None: for line in textwrap.wrap(text, self.width - 2, break_long_words=False, break_on_hyphens=False): self.output.write('# ' + line + '\n') - def variable(self, key, value, indent=0): + def variable( + self, + key: str, + value: Optional[Union[bool, int, float, str, List[str]]], + indent: int = 0, + ) -> None: if value is None: return if isinstance(value, list): value = ' '.join(filter(None, value)) # Filter out empty strings. self._line('%s = %s' % (key, value), indent) - def pool(self, name, depth): + def pool(self, name: str, depth: int) -> None: self._line('pool %s' % name) self.variable('depth', depth, indent=1) - def rule(self, name, command, description=None, depfile=None, - generator=False, pool=None, restat=False, rspfile=None, - rspfile_content=None, deps=None): + def rule( + self, + name: str, + command: str, + description: Optional[str] = None, + depfile: Optional[str] = None, + generator: bool = False, + pool: Optional[str] = None, + restat: bool = False, + rspfile: Optional[str] = None, + rspfile_content: Optional[str] = None, + deps: Optional[Union[str, List[str]]] = None, + ) -> None: self._line('rule %s' % name) self.variable('command', command, indent=1) if description: @@ -73,8 +90,23 @@ class Writer(object): if deps: self.variable('deps', deps, indent=1) - def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, - variables=None, implicit_outputs=None, pool=None, dyndep=None): + def build( + self, + outputs: Union[str, List[str]], + rule: str, + inputs: Optional[Union[str, List[str]]] = None, + implicit: Optional[Union[str, List[str]]] = None, + order_only: Optional[Union[str, List[str]]] = None, + variables: Optional[ + Union[ + List[Tuple[str, Optional[Union[str, List[str]]]]], + Dict[str, Optional[Union[str, List[str]]]], + ] + ] = None, + implicit_outputs: Optional[Union[str, List[str]]] = None, + pool: Optional[str] = None, + dyndep: Optional[str] = None, + ) -> List[str]: outputs = as_list(outputs) out_outputs = [escape_path(x) for x in outputs] all_inputs = [escape_path(x) for x in as_list(inputs)] @@ -111,16 +143,16 @@ class Writer(object): return outputs - def include(self, path): + def include(self, path: str) -> None: self._line('include %s' % path) - def subninja(self, path): + def subninja(self, path: str) -> None: self._line('subninja %s' % path) - def default(self, paths): + def default(self, paths: Union[str, List[str]]) -> None: self._line('default %s' % ' '.join(as_list(paths))) - def _count_dollars_before_index(self, s, i): + def _count_dollars_before_index(self, s: str, i: int) -> int: """Returns the number of '$' characters right in front of s[i].""" dollar_count = 0 dollar_index = i - 1 @@ -129,7 +161,7 @@ class Writer(object): dollar_index -= 1 return dollar_count - def _line(self, text, indent=0): + def _line(self, text: str, indent: int = 0) -> None: """Write 'text' word-wrapped at self.width characters.""" leading_space = ' ' * indent while len(leading_space) + len(text) > self.width: @@ -165,11 +197,11 @@ class Writer(object): self.output.write(leading_space + text + '\n') - def close(self): + def close(self) -> None: self.output.close() -def as_list(input): +def as_list(input: Optional[Union[str, List[str]]]) -> List[str]: if input is None: return [] if isinstance(input, list): @@ -177,7 +209,7 @@ def as_list(input): return [input] -def escape(string): +def escape(string: str) -> str: """Escape a string such that it can be embedded into a Ninja file without further interpretation.""" assert '\n' not in string, 'Ninja syntax does not allow newlines' @@ -185,13 +217,13 @@ def escape(string): return string.replace('$', '$$') -def expand(string, vars, local_vars={}): +def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str: """Expand a string containing $vars as Ninja would. Note: doesn't handle the full Ninja variable syntax, but it's enough to make configure.py's use of it work. """ - def exp(m): + def exp(m: Match[str]) -> str: var = m.group(1) if var == '$': return '$' diff --git a/misc/ninja_syntax_test.py b/misc/ninja_syntax_test.py index 90ff9c6..61fb177 100755 --- a/misc/ninja_syntax_test.py +++ b/misc/ninja_syntax_test.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2011 Google Inc. All Rights Reserved. # diff --git a/misc/oss-fuzz/build.sh b/misc/oss-fuzz/build.sh index cacb3d3..4328feb 100644 --- a/misc/oss-fuzz/build.sh +++ b/misc/oss-fuzz/build.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash -eu # Copyright 2020 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +15,6 @@ # ################################################################################ -set -eu cmake -Bbuild-cmake -H. cmake --build build-cmake diff --git a/misc/output_test.py b/misc/output_test.py index 141716c..13b0926 100755 --- a/misc/output_test.py +++ b/misc/output_test.py @@ -13,29 +13,26 @@ import tempfile import unittest default_env = dict(os.environ) -if 'NINJA_STATUS' in default_env: - del default_env['NINJA_STATUS'] -if 'CLICOLOR_FORCE' in default_env: - del default_env['CLICOLOR_FORCE'] +default_env.pop('NINJA_STATUS', None) +default_env.pop('CLICOLOR_FORCE', None) default_env['TERM'] = '' NINJA_PATH = os.path.abspath('./ninja') def run(build_ninja, flags='', pipe=False, env=default_env): with tempfile.TemporaryDirectory() as d: - os.chdir(d) - with open('build.ninja', 'w') as f: + with open(os.path.join(d, 'build.ninja'), 'w') as f: f.write(build_ninja) f.flush() ninja_cmd = '{} {}'.format(NINJA_PATH, flags) try: if pipe: - output = subprocess.check_output([ninja_cmd], shell=True, env=env) + output = subprocess.check_output([ninja_cmd], shell=True, cwd=d, env=env) elif platform.system() == 'Darwin': output = subprocess.check_output(['script', '-q', '/dev/null', 'bash', '-c', ninja_cmd], - env=env) + cwd=d, env=env) else: output = subprocess.check_output(['script', '-qfec', ninja_cmd, '/dev/null'], - env=env) + cwd=d, env=env) except subprocess.CalledProcessError as err: sys.stdout.buffer.write(err.output) raise err @@ -112,14 +109,51 @@ red \x1b[31mred\x1b[0m ''') + def test_issue_1966(self): + self.assertEqual(run( +'''rule cat + command = cat $rspfile $rspfile > $out + rspfile = cat.rsp + rspfile_content = a b c + +build a: cat +''', '-j3'), +'''[1/1] cat cat.rsp cat.rsp > a\x1b[K +''') + + def test_pr_1685(self): # Running those tools without .ninja_deps and .ninja_log shouldn't fail. self.assertEqual(run('', flags='-t recompact'), '') self.assertEqual(run('', flags='-t restat'), '') + def test_issue_2048(self): + with tempfile.TemporaryDirectory() as d: + with open(os.path.join(d, 'build.ninja'), 'w'): + pass + + with open(os.path.join(d, '.ninja_log'), 'w') as f: + f.write('# ninja log v4\n') + + try: + output = subprocess.check_output([NINJA_PATH, '-t', 'recompact'], + cwd=d, + env=default_env, + stderr=subprocess.STDOUT, + text=True + ) + + self.assertEqual( + output.strip(), + "ninja: warning: build log version is too old; starting over" + ) + except subprocess.CalledProcessError as err: + self.fail("non-zero exit code with: " + err.output) + def test_status(self): self.assertEqual(run(''), 'ninja: no work to do.\n') self.assertEqual(run('', pipe=True), 'ninja: no work to do.\n') + self.assertEqual(run('', flags='--quiet'), '') def test_ninja_status_default(self): 'Do we show the default status by default?' diff --git a/misc/write_fake_manifests.py b/misc/write_fake_manifests.py index abcb677..bf9cf7d 100644 --- a/misc/write_fake_manifests.py +++ b/misc/write_fake_manifests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Writes large manifest files, for manifest parser performance testing. diff --git a/misc/zsh-completion b/misc/zsh-completion index 4cee3b8..d439df3 100644 --- a/misc/zsh-completion +++ b/misc/zsh-completion @@ -16,7 +16,7 @@ # Add the following to your .zshrc to tab-complete ninja targets # fpath=(path/to/ninja/misc/zsh-completion $fpath) -__get_targets() { +(( $+functions[_ninja-get-targets] )) || _ninja-get-targets() { dir="." if [ -n "${opt_args[-C]}" ]; then @@ -31,42 +31,45 @@ __get_targets() { eval ${targets_command} 2>/dev/null | cut -d: -f1 } -__get_tools() { - ninja -t list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 +(( $+functions[_ninja-get-tools] )) || _ninja-get-tools() { + # remove the first line; remove the leading spaces; replace spaces with colon + ninja -t list 2> /dev/null | sed -e '1d;s/^ *//;s/ \+/:/' } -__get_modes() { - ninja -d list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 | sed '$d' +(( $+functions[_ninja-get-modes] )) || _ninja-get-modes() { + # remove the first line; remove the last line; remove the leading spaces; replace spaces with colon + ninja -d list 2> /dev/null | sed -e '1d;$d;s/^ *//;s/ \+/:/' } -__modes() { +(( $+functions[_ninja-modes] )) || _ninja-modes() { local -a modes - modes=(${(fo)"$(__get_modes)"}) + modes=(${(fo)"$(_ninja-get-modes)"}) _describe 'modes' modes } -__tools() { +(( $+functions[_ninja-tools] )) || _ninja-tools() { local -a tools - tools=(${(fo)"$(__get_tools)"}) + tools=(${(fo)"$(_ninja-get-tools)"}) _describe 'tools' tools } -__targets() { +(( $+functions[_ninja-targets] )) || _ninja-targets() { local -a targets - targets=(${(fo)"$(__get_targets)"}) + targets=(${(fo)"$(_ninja-get-targets)"}) _describe 'targets' targets } _arguments \ - {-h,--help}'[Show help]' \ - '--version[Print ninja version]' \ + '(- *)'{-h,--help}'[Show help]' \ + '(- *)--version[Print ninja version]' \ '-C+[Change to directory before doing anything else]:directories:_directories' \ '-f+[Specify input build file (default=build.ninja)]:files:_files' \ '-j+[Run N jobs in parallel (default=number of CPUs available)]:number of jobs' \ '-l+[Do not start new jobs if the load average is greater than N]:number of jobs' \ '-k+[Keep going until N jobs fail (default=1)]:number of jobs' \ '-n[Dry run (do not run commands but act like they succeeded)]' \ - '-v[Show all command lines while building]' \ - '-d+[Enable debugging (use -d list to list modes)]:modes:__modes' \ - '-t+[Run a subtool (use -t list to list subtools)]:tools:__tools' \ - '*::targets:__targets' + '(-v --verbose --quiet)'{-v,--verbose}'[Show all command lines while building]' \ + "(-v --verbose --quiet)--quiet[Don't show progress status, just command output]" \ + '-d+[Enable debugging (use -d list to list modes)]:modes:_ninja-modes' \ + '-t+[Run a subtool (use -t list to list subtools)]:tools:_ninja-tools' \ + '*::targets:_ninja-targets' diff --git a/src/browse.cc b/src/browse.cc old mode 100644 new mode 100755 index 76bee07..ac54207 --- a/src/browse.cc +++ b/src/browse.cc @@ -71,8 +71,13 @@ void RunBrowsePython(State* state, const char* ninja_command, close(pipefd[0]); // Write the script file into the stdin of the Python process. - ssize_t len = write(pipefd[1], kBrowsePy, sizeof(kBrowsePy)); - if (len < (ssize_t)sizeof(kBrowsePy)) + // Only write n - 1 bytes, because Python 3.11 does not allow null + // bytes in source code anymore, so avoid writing the null string + // terminator. + // See https://github.com/python/cpython/issues/96670 + auto kBrowsePyLength = sizeof(kBrowsePy) - 1; + ssize_t len = write(pipefd[1], kBrowsePy, kBrowsePyLength); + if (len < (ssize_t)kBrowsePyLength) perror("ninja: write"); close(pipefd[1]); exit(0); diff --git a/src/browse.h b/src/browse.h old mode 100644 new mode 100755 diff --git a/src/browse.py b/src/browse.py index 653cbe9..b125e80 100755 --- a/src/browse.py +++ b/src/browse.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Copyright 2001 Google Inc. All Rights Reserved. # @@ -20,8 +20,6 @@ This script is inlined into the final executable and spawned by it when needed. """ -from __future__ import print_function - try: import http.server as httpserver import socketserver diff --git a/src/build.cc b/src/build.cc old mode 100644 new mode 100755 index 17e5dcb..abc73ee --- a/src/build.cc +++ b/src/build.cc @@ -18,6 +18,8 @@ #include #include #include +#include +#include #include #if defined(__SVR4) && defined(__sun) @@ -46,7 +48,7 @@ struct DryRunCommandRunner : public CommandRunner { virtual ~DryRunCommandRunner() {} // Overridden from CommandRunner: - virtual bool CanRunMore() const; + virtual size_t CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); @@ -54,8 +56,8 @@ struct DryRunCommandRunner : public CommandRunner { queue finished_; }; -bool DryRunCommandRunner::CanRunMore() const { - return true; +size_t DryRunCommandRunner::CanRunMore() const { + return SIZE_MAX; } bool DryRunCommandRunner::StartCommand(Edge* edge) { @@ -95,15 +97,20 @@ bool Plan::AddTarget(const Node* target, string* err) { bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err, set* dyndep_walk) { Edge* edge = node->in_edge(); - if (!edge) { // Leaf node. - if (node->dirty()) { - string referenced; - if (dependent) - referenced = ", needed by '" + dependent->path() + "',"; - *err = "'" + node->path() + "'" + referenced + " missing " - "and no known rule to make it"; - } - return false; + if (!edge) { + // Leaf node, this can be either a regular input from the manifest + // (e.g. a source file), or an implicit input from a depfile or dyndep + // file. In the first case, a dirty flag means the file is missing, + // and the build should stop. In the second, do not do anything here + // since there is no producing edge to add to the plan. + if (node->dirty() && !node->generated_by_dep_loader()) { + string referenced; + if (dependent) + referenced = ", needed by '" + dependent->path() + "',"; + *err = "'" + node->path() + "'" + referenced + + " missing and no known rule to make it"; + } + return false; } if (edge->outputs_ready()) @@ -144,13 +151,17 @@ bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err, void Plan::EdgeWanted(const Edge* edge) { ++wanted_edges_; - if (!edge->is_phony()) + if (!edge->is_phony()) { ++command_edges_; + if (builder_) + builder_->status_->EdgeAddedToPlan(edge); + } } Edge* Plan::FindWork() { if (ready_.empty()) return NULL; + EdgeSet::iterator e = ready_.begin(); Edge* edge = *e; ready_.erase(e); @@ -294,8 +305,11 @@ bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) { want_e->second = kWantNothing; --wanted_edges_; - if (!(*oe)->is_phony()) + if (!(*oe)->is_phony()) { --command_edges_; + if (builder_) + builder_->status_->EdgeRemovedFromPlan(*oe); + } } } } @@ -450,7 +464,7 @@ void Plan::Dump() const { struct RealCommandRunner : public CommandRunner { explicit RealCommandRunner(const BuildConfig& config) : config_(config) {} virtual ~RealCommandRunner() {} - virtual bool CanRunMore() const; + virtual size_t CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); virtual vector GetActiveEdges(); @@ -473,12 +487,26 @@ void RealCommandRunner::Abort() { subprocs_.Clear(); } -bool RealCommandRunner::CanRunMore() const { +size_t RealCommandRunner::CanRunMore() const { size_t subproc_number = subprocs_.running_.size() + subprocs_.finished_.size(); - return (int)subproc_number < config_.parallelism - && ((subprocs_.running_.empty() || config_.max_load_average <= 0.0f) - || GetLoadAverage() < config_.max_load_average); + + int64_t capacity = config_.parallelism - subproc_number; + + if (config_.max_load_average > 0.0f) { + int load_capacity = config_.max_load_average - GetLoadAverage(); + if (load_capacity < capacity) + capacity = load_capacity; + } + + if (capacity < 0) + capacity = 0; + + if (capacity == 0 && subprocs_.running_.empty()) + // Ensure that we make progress. + capacity = 1; + + return capacity; } bool RealCommandRunner::StartCommand(Edge* edge) { @@ -518,6 +546,10 @@ Builder::Builder(State* state, const BuildConfig& config, start_time_millis_(start_time_millis), disk_interface_(disk_interface), scan_(state, build_log, deps_log, disk_interface, &config_.depfile_parser_options) { + lock_file_path_ = ".ninja_lock"; + string build_dir = state_->bindings_.LookupVariable("builddir"); + if (!build_dir.empty()) + lock_file_path_ = build_dir + "/" + lock_file_path_; } Builder::~Builder() { @@ -552,6 +584,10 @@ void Builder::Cleanup() { disk_interface_->RemoveFile(depfile); } } + + string err; + if (disk_interface_->Stat(lock_file_path_, &err) > 0) + disk_interface_->RemoveFile(lock_file_path_); } Node* Builder::AddTarget(const string& name, string* err) { @@ -592,108 +628,6 @@ bool Builder::AddTarget(Node* target, string* err) { return true; } -bool Builder::AlreadyUpToDate() const { - return !plan_.more_to_do(); -} - -bool Builder::Build(string* err) { - assert(!AlreadyUpToDate()); - - status_->PlanHasTotalEdges(plan_.command_edge_count()); - int pending_commands = 0; - int failures_allowed = config_.failures_allowed; - - // Set up the command runner if we haven't done so already. - if (!command_runner_.get()) { - if (config_.dry_run) - command_runner_.reset(new DryRunCommandRunner); - else - command_runner_.reset(new RealCommandRunner(config_)); - } - - // We are about to start the build process. - status_->BuildStarted(); - - // This main loop runs the entire build process. - // It is structured like this: - // First, we attempt to start as many commands as allowed by the - // command runner. - // Second, we attempt to wait for / reap the next finished command. - while (plan_.more_to_do()) { - // See if we can start any more commands. - if (failures_allowed && command_runner_->CanRunMore()) { - if (Edge* edge = plan_.FindWork()) { - if (edge->GetBindingBool("generator")) { - scan_.build_log()->Close(); - } - - if (!StartEdge(edge, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - - if (edge->is_phony()) { - if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - } else { - ++pending_commands; - } - - // We made some progress; go back to the main loop. - continue; - } - } - - // See if we can reap any finished commands. - if (pending_commands) { - CommandRunner::Result result; - if (!command_runner_->WaitForCommand(&result) || - result.status == ExitInterrupted) { - Cleanup(); - status_->BuildFinished(); - *err = "interrupted by user"; - return false; - } - - --pending_commands; - if (!FinishCommand(&result, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - - if (!result.success()) { - if (failures_allowed) - failures_allowed--; - } - - // We made some progress; start the main loop over. - continue; - } - - // If we get here, we cannot make any more progress. - status_->BuildFinished(); - if (failures_allowed == 0) { - if (config_.failures_allowed > 1) - *err = "subcommands failed"; - else - *err = "subcommand failed"; - } else if (failures_allowed < config_.failures_allowed) - *err = "cannot make progress due to previous errors"; - else - *err = "stuck [this is a bug]"; - - return false; - } - - status_->BuildFinished(); - return true; -} - static std::string &Trim(std::string &s) { if (s.empty()) { @@ -769,6 +703,120 @@ std::string Builder::GetContent(Edge* edge) { } } +bool Builder::AlreadyUpToDate() const { + return !plan_.more_to_do(); +} + +bool Builder::Build(string* err) { + assert(!AlreadyUpToDate()); + + int pending_commands = 0; + int failures_allowed = config_.failures_allowed; + + // Set up the command runner if we haven't done so already. + if (!command_runner_.get()) { + if (config_.dry_run) + command_runner_.reset(new DryRunCommandRunner); + else + command_runner_.reset(new RealCommandRunner(config_)); + } + + // We are about to start the build process. + status_->BuildStarted(); + + // This main loop runs the entire build process. + // It is structured like this: + // First, we attempt to start as many commands as allowed by the + // command runner. + // Second, we attempt to wait for / reap the next finished command. + while (plan_.more_to_do()) { + // See if we can start any more commands. + if (failures_allowed) { + size_t capacity = command_runner_->CanRunMore(); + while (capacity > 0) { + Edge* edge = plan_.FindWork(); + if (!edge) + break; + + if (edge->GetBindingBool("generator")) { + scan_.build_log()->Close(); + } + + if (!StartEdge(edge, err)) { + Cleanup(); + status_->BuildFinished(); + return false; + } + + if (edge->is_phony()) { + if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) { + Cleanup(); + status_->BuildFinished(); + return false; + } + } else { + ++pending_commands; + + --capacity; + + // Re-evaluate capacity. + size_t current_capacity = command_runner_->CanRunMore(); + if (current_capacity < capacity) + capacity = current_capacity; + } + } + + // We are finished with all work items and have no pending + // commands. Therefore, break out of the main loop. + if (pending_commands == 0 && !plan_.more_to_do()) break; + } + + // See if we can reap any finished commands. + if (pending_commands) { + CommandRunner::Result result; + if (!command_runner_->WaitForCommand(&result) || + result.status == ExitInterrupted) { + Cleanup(); + status_->BuildFinished(); + *err = "interrupted by user"; + return false; + } + + --pending_commands; + if (!FinishCommand(&result, err)) { + Cleanup(); + status_->BuildFinished(); + return false; + } + + if (!result.success()) { + if (failures_allowed) + failures_allowed--; + } + + // We made some progress; start the main loop over. + continue; + } + + // If we get here, we cannot make any more progress. + status_->BuildFinished(); + if (failures_allowed == 0) { + if (config_.failures_allowed > 1) + *err = "subcommands failed"; + else + *err = "subcommand failed"; + } else if (failures_allowed < config_.failures_allowed) + *err = "cannot make progress due to previous errors"; + else + *err = "stuck [this is a bug]"; + + return false; + } + + status_->BuildFinished(); + return true; +} + bool Builder::StartEdge(Edge* edge, string* err) { METRIC_RECORD("StartEdge"); if (edge->is_phony()) @@ -779,19 +827,30 @@ bool Builder::StartEdge(Edge* edge, string* err) { status_->BuildEdgeStarted(edge, start_time_millis); - // Create directories necessary for outputs. + TimeStamp build_start = -1; + + // Create directories necessary for outputs and remember the current + // filesystem mtime to record later // XXX: this will block; do we care? for (vector::iterator o = edge->outputs_.begin(); o != edge->outputs_.end(); ++o) { if (!disk_interface_->MakeDirs((*o)->path())) return false; + if (build_start == -1) { + disk_interface_->WriteFile(lock_file_path_, ""); + build_start = disk_interface_->Stat(lock_file_path_, err); + if (build_start == -1) + build_start = 0; + } } + edge->command_start_time_ = build_start; + // Create response file, if needed // XXX: this may also block; do we care? string rspfile = edge->GetUnescapedRspfile(); if (!rspfile.empty()) { - string content = GetContent(edge); + string content = GetContent(edge);; if (!disk_interface_->WriteFile(rspfile, content)) return false; } @@ -836,8 +895,8 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { end_time_millis = GetTimeMillis() - start_time_millis_; running_edges_.erase(it); - status_->BuildEdgeFinished(edge, end_time_millis, result->success(), - result->output); + status_->BuildEdgeFinished(edge, start_time_millis, end_time_millis, + result->success(), result->output); // The rest of this function only applies to successful commands. if (!result->success()) { @@ -845,55 +904,38 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { } // Restat the edge outputs - TimeStamp output_mtime = 0; - bool restat = edge->GetBindingBool("restat"); + TimeStamp record_mtime = 0; if (!config_.dry_run) { + const bool restat = edge->GetBindingBool("restat"); + const bool generator = edge->GetBindingBool("generator"); bool node_cleaned = false; + record_mtime = edge->command_start_time_; - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err); - if (new_mtime == -1) - return false; - if (new_mtime > output_mtime) - output_mtime = new_mtime; - if ((*o)->mtime() == new_mtime && restat) { - // The rule command did not change the output. Propagate the clean - // state through the build graph. - // Note that this also applies to nonexistent outputs (mtime == 0). - if (!plan_.CleanNode(&scan_, *o, err)) + // restat and generator rules must restat the outputs after the build + // has finished. if record_mtime == 0, then there was an error while + // attempting to touch/stat the temp file when the edge started and + // we should fall back to recording the outputs' current mtime in the + // log. + if (record_mtime == 0 || restat || generator) { + for (vector::iterator o = edge->outputs_.begin(); + o != edge->outputs_.end(); ++o) { + TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err); + if (new_mtime == -1) return false; - node_cleaned = true; + if (new_mtime > record_mtime) + record_mtime = new_mtime; + if ((*o)->mtime() == new_mtime && restat) { + // The rule command did not change the output. Propagate the clean + // state through the build graph. + // Note that this also applies to nonexistent outputs (mtime == 0). + if (!plan_.CleanNode(&scan_, *o, err)) + return false; + node_cleaned = true; + } } } - if (node_cleaned) { - TimeStamp restat_mtime = 0; - // If any output was cleaned, find the most recent mtime of any - // (existing) non-order-only input or the depfile. - for (vector::iterator i = edge->inputs_.begin(); - i != edge->inputs_.end() - edge->order_only_deps_; ++i) { - TimeStamp input_mtime = disk_interface_->Stat((*i)->path(), err); - if (input_mtime == -1) - return false; - if (input_mtime > restat_mtime) - restat_mtime = input_mtime; - } - - string depfile = edge->GetUnescapedDepfile(); - if (restat_mtime != 0 && deps_type.empty() && !depfile.empty()) { - TimeStamp depfile_mtime = disk_interface_->Stat(depfile, err); - if (depfile_mtime == -1) - return false; - if (depfile_mtime > restat_mtime) - restat_mtime = depfile_mtime; - } - - // The total number of edges in the plan may have changed as a result - // of a restat. - status_->PlanHasTotalEdges(plan_.command_edge_count()); - - output_mtime = restat_mtime; + record_mtime = edge->command_start_time_; } } @@ -907,7 +949,7 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { if (scan_.build_log()) { if (!scan_.build_log()->RecordCommand(edge, start_time_millis, - end_time_millis, output_mtime)) { + end_time_millis, record_mtime)) { *err = string("Error writing to build log: ") + strerror(errno); return false; } @@ -1007,8 +1049,5 @@ bool Builder::LoadDyndeps(Node* node, string* err) { if (!plan_.DyndepsLoaded(&scan_, node, ddf, err)) return false; - // New command edges may have been added to the plan. - status_->PlanHasTotalEdges(plan_.command_edge_count()); - return true; } diff --git a/src/build.h b/src/build.h old mode 100644 new mode 100755 index 91ee1c4..d5d0b8b --- a/src/build.h +++ b/src/build.h @@ -18,12 +18,13 @@ #include #include #include -#include #include #include +#include #include "depfile_parser.h" #include "graph.h" // XXX needed for DependencyScan; should rearrange. +#include "graph.h" #include "exit_status.h" #include "util.h" // int64_t @@ -80,6 +81,7 @@ struct Plan { /// by information loaded from a dyndep file. bool DyndepsLoaded(DependencyScan* scan, const Node* node, const DyndepFile& ddf, std::string* err); + private: bool RefreshDyndepDependents(DependencyScan* scan, const Node* node, std::string* err); void UnmarkDependents(const Node* node, std::set* dependents); @@ -91,7 +93,7 @@ private: /// loads dynamic dependencies from the node's path. /// Returns 'false' if loading dyndep info fails and 'true' otherwise. bool NodeFinished(Node* node, std::string* err); - + /// Enumerate possible steps we want for an edge. enum Want { @@ -135,7 +137,7 @@ private: /// RealCommandRunner is an implementation that actually runs commands. struct CommandRunner { virtual ~CommandRunner() {} - virtual bool CanRunMore() const = 0; + virtual size_t CanRunMore() const = 0; virtual bool StartCommand(Edge* edge) = 0; /// The result of waiting for a command. @@ -197,7 +199,7 @@ struct Builder { /// Run the build. Returns false on error. /// It is an error to call this function when AlreadyUpToDate() is true. bool Build(std::string* err); - + bool StartEdge(Edge* edge, std::string* err); std::string GetContent(Edge* edge); @@ -217,11 +219,7 @@ struct Builder { State* state_; const BuildConfig& config_; Plan plan_; -#if __cplusplus < 201703L - std::auto_ptr command_runner_; -#else - std::unique_ptr command_runner_; // auto_ptr was removed in C++17. -#endif + std::unique_ptr command_runner_; Status* status_; private: @@ -236,6 +234,7 @@ struct Builder { /// Time the build started. int64_t start_time_millis_; + std::string lock_file_path_; DiskInterface* disk_interface_; DependencyScan scan_; diff --git a/src/build_log.cc b/src/build_log.cc old mode 100644 new mode 100755 index 4dcd6ce..792d1a3 --- a/src/build_log.cc +++ b/src/build_log.cc @@ -53,8 +53,8 @@ using namespace std; namespace { const char kFileSignature[] = "# ninja log v%d\n"; -const int kOldestSupportedVersion = 4; -const int kCurrentVersion = 5; +const int kOldestSupportedVersion = 6; +const int kCurrentVersion = 6; // 64bit MurmurHash2, by Austin Appleby #if defined(_MSC_VER) @@ -116,9 +116,9 @@ BuildLog::LogEntry::LogEntry(const string& output) : output(output) {} BuildLog::LogEntry::LogEntry(const string& output, uint64_t command_hash, - int start_time, int end_time, TimeStamp restat_mtime) + int start_time, int end_time, TimeStamp mtime) : output(output), command_hash(command_hash), - start_time(start_time), end_time(end_time), mtime(restat_mtime) + start_time(start_time), end_time(end_time), mtime(mtime) {} BuildLog::BuildLog() @@ -279,14 +279,21 @@ LoadStatus BuildLog::Load(const string& path, string* err) { if (!log_version) { sscanf(line_start, kFileSignature, &log_version); + bool invalid_log_version = false; if (log_version < kOldestSupportedVersion) { - *err = ("build log version invalid, perhaps due to being too old; " - "starting over"); + invalid_log_version = true; + *err = "build log version is too old; starting over"; + + } else if (log_version > kCurrentVersion) { + invalid_log_version = true; + *err = "build log version is too new; starting over"; + } + if (invalid_log_version) { fclose(file); unlink(path.c_str()); - // Don't report this as a failure. An empty build log will cause + // Don't report this as a failure. A missing build log will cause // us to rebuild the outputs anyway. - return LOAD_SUCCESS; + return LOAD_NOT_FOUND; } } @@ -303,7 +310,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) { *end = 0; int start_time = 0, end_time = 0; - TimeStamp restat_mtime = 0; + TimeStamp mtime = 0; start_time = atoi(start); start = end + 1; @@ -319,7 +326,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) { if (!end) continue; *end = 0; - restat_mtime = strtoll(start, NULL, 10); + mtime = strtoll(start, NULL, 10); start = end + 1; end = (char*)memchr(start, kFieldSeparator, line_end - start); @@ -343,15 +350,10 @@ LoadStatus BuildLog::Load(const string& path, string* err) { entry->start_time = start_time; entry->end_time = end_time; - entry->mtime = restat_mtime; - if (log_version >= 5) { - char c = *end; *end = '\0'; - entry->command_hash = (uint64_t)strtoull(start, NULL, 16); - *end = c; - } else { - entry->command_hash = LogEntry::HashCommand(StringPiece(start, - end - start)); - } + entry->mtime = mtime; + char c = *end; *end = '\0'; + entry->command_hash = (uint64_t)strtoull(start, NULL, 16); + *end = c; } fclose(file); diff --git a/src/build_log.h b/src/build_log.h old mode 100644 new mode 100755 index 88551e3..dd72c4c --- a/src/build_log.h +++ b/src/build_log.h @@ -73,7 +73,7 @@ struct BuildLog { explicit LogEntry(const std::string& output); LogEntry(const std::string& output, uint64_t command_hash, - int start_time, int end_time, TimeStamp restat_mtime); + int start_time, int end_time, TimeStamp mtime); }; /// Lookup a previously-run command by its output path. diff --git a/src/build_log_perftest.cc b/src/build_log_perftest.cc old mode 100644 new mode 100755 diff --git a/src/build_log_test.cc b/src/build_log_test.cc old mode 100644 new mode 100755 index 3718299..12c2dc7 --- a/src/build_log_test.cc +++ b/src/build_log_test.cc @@ -104,9 +104,11 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) { TEST_F(BuildLogTest, DoubleEntry) { FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "0\t1\t2\tout\tcommand abc\n"); - fprintf(f, "3\t4\t5\tout\tcommand def\n"); + fprintf(f, "# ninja log v6\n"); + fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command abc")); + fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command def")); fclose(f); string err; @@ -133,9 +135,13 @@ TEST_F(BuildLogTest, Truncate) { log1.RecordCommand(state_.edges_[1], 20, 25); log1.Close(); } - +#ifdef __USE_LARGEFILE64 + struct stat64 statbuf; + ASSERT_EQ(0, stat64(kTestFilename, &statbuf)); +#else struct stat statbuf; ASSERT_EQ(0, stat(kTestFilename, &statbuf)); +#endif ASSERT_GT(statbuf.st_size, 0); // For all possible truncations of the input file, assert that we don't @@ -169,10 +175,11 @@ TEST_F(BuildLogTest, ObsoleteOldVersion) { ASSERT_NE(err.find("version"), string::npos); } -TEST_F(BuildLogTest, SpacesInOutputV4) { +TEST_F(BuildLogTest, SpacesInOutput) { FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "123\t456\t456\tout with space\tcommand\n"); + fprintf(f, "# ninja log v6\n"); + fprintf(f, "123\t456\t456\tout with space\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command")); fclose(f); string err; @@ -193,10 +200,12 @@ TEST_F(BuildLogTest, DuplicateVersionHeader) { // build log on Windows. This shouldn't crash, and the second version header // should be ignored. FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "123\t456\t456\tout\tcommand\n"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "456\t789\t789\tout2\tcommand2\n"); + fprintf(f, "# ninja log v6\n"); + fprintf(f, "123\t456\t456\tout\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command")); + fprintf(f, "# ninja log v6\n"); + fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command2")); fclose(f); string err; @@ -243,7 +252,7 @@ struct TestDiskInterface : public DiskInterface { TEST_F(BuildLogTest, Restat) { FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n" + fprintf(f, "# ninja log v6\n" "1\t2\t3\tout\tcommand\n"); fclose(f); std::string err; @@ -271,12 +280,13 @@ TEST_F(BuildLogTest, VeryLongInputLine) { // Ninja's build log buffer is currently 256kB. Lines longer than that are // silently ignored, but don't affect parsing of other lines. FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); + fprintf(f, "# ninja log v6\n"); fprintf(f, "123\t456\t456\tout\tcommand start"); for (size_t i = 0; i < (512 << 10) / strlen(" more_command"); ++i) fputs(" more_command", f); fprintf(f, "\n"); - fprintf(f, "456\t789\t789\tout2\tcommand2\n"); + fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n", + BuildLog::LogEntry::HashCommand("command2")); fclose(f); string err; diff --git a/src/build_test.cc b/src/build_test.cc old mode 100644 new mode 100755 index 4ef62b2..74aaf7e --- a/src/build_test.cc +++ b/src/build_test.cc @@ -15,6 +15,8 @@ #include "build.h" #include +#include +#include #include "build_log.h" #include "deps_log.h" @@ -63,6 +65,7 @@ TEST_F(PlanTest, Basic) { EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); ASSERT_EQ("", err); ASSERT_TRUE(plan_.more_to_do()); + PrepareForTarget("out"); Edge* edge = plan_.FindWork(); ASSERT_TRUE(edge); @@ -95,7 +98,6 @@ TEST_F(PlanTest, DoubleOutputDirect) { GetNode("mid1")->MarkDirty(); GetNode("mid2")->MarkDirty(); GetNode("out")->MarkDirty(); - string err; EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); ASSERT_EQ("", err); @@ -104,6 +106,7 @@ TEST_F(PlanTest, DoubleOutputDirect) { Edge* edge; edge = plan_.FindWork(); ASSERT_TRUE(edge); // cat in + string err; plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); ASSERT_EQ("", err); @@ -169,7 +172,6 @@ TEST_F(PlanTest, DoubleDependent) { GetNode("a1")->MarkDirty(); GetNode("a2")->MarkDirty(); GetNode("out")->MarkDirty(); - string err; EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); ASSERT_EQ("", err); @@ -284,7 +286,6 @@ TEST_F(PlanTest, PoolsWithDepthTwo) { GetNode("outb" + string(1, '1' + static_cast(i)))->MarkDirty(); } GetNode("allTheThings")->MarkDirty(); - string err; EXPECT_TRUE(plan_.AddTarget(GetNode("allTheThings"), &err)); ASSERT_EQ("", err); @@ -473,7 +474,7 @@ struct FakeCommandRunner : public CommandRunner { max_active_edges_(1), fs_(fs) {} // CommandRunner impl - virtual bool CanRunMore() const; + virtual size_t CanRunMore() const; virtual bool StartCommand(Edge* edge); virtual bool WaitForCommand(Result* result); virtual vector GetActiveEdges(); @@ -574,8 +575,11 @@ void BuildTest::RebuildTarget(const string& target, const char* manifest, builder.command_runner_.release(); } -bool FakeCommandRunner::CanRunMore() const { - return active_edges_.size() < max_active_edges_; +size_t FakeCommandRunner::CanRunMore() const { + if (active_edges_.size() < max_active_edges_) + return SIZE_MAX; + + return 0; } bool FakeCommandRunner::StartCommand(Edge* edge) { @@ -611,6 +615,7 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { fs_->WriteFile(edge->outputs_[0]->path(), content); } else if (edge->rule().name() == "touch-implicit-dep-out") { string dep = edge->GetBinding("test_dependency"); + fs_->Tick(); fs_->Create(dep, ""); fs_->Tick(); for (vector::iterator out = edge->outputs_.begin(); @@ -627,7 +632,12 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { fs_->Create(dep, ""); } else if (edge->rule().name() == "generate-depfile") { string dep = edge->GetBinding("test_dependency"); + bool touch_dep = edge->GetBindingBool("touch_dependency"); string depfile = edge->GetUnescapedDepfile(); + if (touch_dep) { + fs_->Tick(); + fs_->Create(dep, ""); + } string contents; for (vector::iterator out = edge->outputs_.begin(); out != edge->outputs_.end(); ++out) { @@ -635,6 +645,20 @@ bool FakeCommandRunner::StartCommand(Edge* edge) { fs_->Create((*out)->path(), ""); } fs_->Create(depfile, contents); + } else if (edge->rule().name() == "long-cc") { + string dep = edge->GetBinding("test_dependency"); + string depfile = edge->GetUnescapedDepfile(); + string contents; + for (vector::iterator out = edge->outputs_.begin(); + out != edge->outputs_.end(); ++out) { + fs_->Tick(); + fs_->Tick(); + fs_->Tick(); + fs_->Create((*out)->path(), ""); + contents += (*out)->path() + ": " + dep + "\n"; + } + if (!dep.empty() && !depfile.empty()) + fs_->Create(depfile, contents); } else { printf("unknown command\n"); return false; @@ -690,6 +714,18 @@ bool FakeCommandRunner::WaitForCommand(Result* result) { else result->status = ExitSuccess; + // This rule simulates an external process modifying files while the build command runs. + // See TestInputMtimeRaceCondition and TestInputMtimeRaceConditionWithDepFile. + // Note: only the first and third time the rule is run per test is the file modified, so + // the test can verify that subsequent runs without the race have no work to do. + if (edge->rule().name() == "long-cc") { + string dep = edge->GetBinding("test_dependency"); + if (fs_->now_ == 4) + fs_->files_[dep].mtime = 3; + if (fs_->now_ == 10) + fs_->files_[dep].mtime = 9; + } + // Provide a way for test cases to verify when an edge finishes that // some other edge is still active. This is useful for test cases // covering behavior involving multiple active edges. @@ -954,9 +990,19 @@ TEST_F(BuildTest, DepFileOK) { ASSERT_EQ(1u, fs_.files_read_.size()); EXPECT_EQ("foo.o.d", fs_.files_read_[0]); - // Expect three new edges: one generating foo.o, and two more from - // loading the depfile. - ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size()); + // Expect one new edge generating foo.o. Loading the depfile should have + // added nodes, but not phony edges to the graph. + ASSERT_EQ(orig_edges + 1, (int)state_.edges_.size()); + + // Verify that nodes for blah.h and bar.h were added and that they + // are marked as generated by a dep loader. + ASSERT_FALSE(state_.LookupNode("foo.o")->generated_by_dep_loader()); + ASSERT_FALSE(state_.LookupNode("foo.c")->generated_by_dep_loader()); + ASSERT_TRUE(state_.LookupNode("blah.h")); + ASSERT_TRUE(state_.LookupNode("blah.h")->generated_by_dep_loader()); + ASSERT_TRUE(state_.LookupNode("bar.h")); + ASSERT_TRUE(state_.LookupNode("bar.h")->generated_by_dep_loader()); + // Expect our edge to now have three inputs: foo.c and two headers. ASSERT_EQ(3u, edge->inputs_.size()); @@ -1122,7 +1168,6 @@ TEST_F(BuildTest, DepFileCanonicalize) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, "rule cc\n command = cc $in\n depfile = $out.d\n" "build gen/stuff\\things/foo.o: cc x\\y/z\\foo.c\n")); - Edge* edge = state_.edges_.back(); fs_.Create("x/y/z/foo.c", ""); GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing. @@ -1135,10 +1180,10 @@ TEST_F(BuildTest, DepFileCanonicalize) { // The depfile path does not get Canonicalize as it seems unnecessary. EXPECT_EQ("gen/stuff\\things/foo.o.d", fs_.files_read_[0]); - // Expect three new edges: one generating foo.o, and two more from - // loading the depfile. - ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size()); + // Expect one new edge enerating foo.o. + ASSERT_EQ(orig_edges + 1, (int)state_.edges_.size()); // Expect our edge to now have three inputs: foo.c and two headers. + Edge* edge = state_.edges_.back(); ASSERT_EQ(3u, edge->inputs_.size()); // Expect the command line we generate to only use the original input, and @@ -1471,7 +1516,7 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate) { TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, "rule touch-implicit-dep-out\n" -" command = touch $test_dependency ; sleep 1 ; touch $out\n" +" command = sleep 1 ; touch $test_dependency ; sleep 1 ; touch $out\n" " generator = 1\n" "build out.imp: touch-implicit-dep-out | inimp inimp2\n" " test_dependency = inimp\n")); @@ -1497,6 +1542,29 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) { EXPECT_TRUE(builder_.AddTarget("out.imp", &err)); EXPECT_TRUE(builder_.AlreadyUpToDate()); EXPECT_FALSE(GetNode("out.imp")->dirty()); + + command_runner_.commands_ran_.clear(); + state_.Reset(); + builder_.Cleanup(); + builder_.plan_.Reset(); + + fs_.Tick(); + fs_.Create("inimp", ""); + + EXPECT_TRUE(builder_.AddTarget("out.imp", &err)); + EXPECT_FALSE(builder_.AlreadyUpToDate()); + + EXPECT_TRUE(builder_.Build(&err)); + EXPECT_TRUE(builder_.AlreadyUpToDate()); + + command_runner_.commands_ran_.clear(); + state_.Reset(); + builder_.Cleanup(); + builder_.plan_.Reset(); + + EXPECT_TRUE(builder_.AddTarget("out.imp", &err)); + EXPECT_TRUE(builder_.AlreadyUpToDate()); + EXPECT_FALSE(GetNode("out.imp")->dirty()); } TEST_F(BuildWithLogTest, NotInLogButOnDisk) { @@ -1800,6 +1868,52 @@ TEST_F(BuildWithLogTest, RestatMissingInput) { ASSERT_EQ(restat_mtime, log_entry->mtime); } +TEST_F(BuildWithLogTest, RestatInputChangesDueToRule) { + ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, +"rule generate-depfile\n" +" command = sleep 1 ; touch $touch_dependency; touch $out ; echo \"$out: $test_dependency\" > $depfile\n" +"build out1: generate-depfile || cat1\n" +" test_dependency = in2\n" +" touch_dependency = 1\n" +" restat = 1\n" +" depfile = out.d\n")); + + // Perform the first build. out1 is a restat rule, so its recorded mtime in the build + // log should be the time the command completes, not the time the command started. One + // of out1's discovered dependencies will have a newer mtime than when out1 started + // running, due to its command touching the dependency itself. + string err; + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + EXPECT_EQ(2u, command_runner_.commands_ran_.size()); + EXPECT_EQ(2u, builder_.plan_.command_edge_count()); + BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out1"); + ASSERT_TRUE(NULL != log_entry); + ASSERT_EQ(2u, log_entry->mtime); + + command_runner_.commands_ran_.clear(); + state_.Reset(); + builder_.Cleanup(); + builder_.plan_.Reset(); + + fs_.Tick(); + fs_.Create("in1", ""); + + // Touching a dependency of an order-only dependency of out1 should not cause out1 to + // rebuild. If out1 were not a restat rule, then it would rebuild here because its + // recorded mtime would have been an earlier mtime than its most recent input's (in2) + // mtime + EXPECT_TRUE(builder_.AddTarget("out1", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(!state_.GetNode("out1", 0)->dirty()); + EXPECT_TRUE(builder_.Build(&err)); + ASSERT_EQ("", err); + EXPECT_EQ(1u, command_runner_.commands_ran_.size()); + EXPECT_EQ(1u, builder_.plan_.command_edge_count()); +} + TEST_F(BuildWithLogTest, GeneratedPlainDepfileMtime) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, "rule generate-depfile\n" @@ -1904,10 +2018,11 @@ TEST_F(BuildTest, RspFileSuccess) EXPECT_TRUE(builder_.Build(&err)); ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - // The RSP files were created - ASSERT_EQ(files_created + 2, fs_.files_created_.size()); + // The RSP files and temp file to acquire output mtimes were created + ASSERT_EQ(files_created + 3, fs_.files_created_.size()); ASSERT_EQ(1u, fs_.files_created_.count("out 2.rsp")); ASSERT_EQ(1u, fs_.files_created_.count("out 3.rsp")); + ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock")); // The RSP files were removed ASSERT_EQ(files_removed + 2, fs_.files_removed_.size()); @@ -1941,9 +2056,10 @@ TEST_F(BuildTest, RspFileFailure) { ASSERT_EQ("subcommand failed", err); ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - // The RSP file was created - ASSERT_EQ(files_created + 1, fs_.files_created_.size()); + // The RSP file and temp file to acquire output mtimes were created + ASSERT_EQ(files_created + 2, fs_.files_created_.size()); ASSERT_EQ(1u, fs_.files_created_.count("out.rsp")); + ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock")); // The RSP file was NOT removed ASSERT_EQ(files_removed, fs_.files_removed_.size()); @@ -2094,11 +2210,28 @@ TEST_F(BuildTest, DepsGccWithEmptyDepfileErrorsOut) { ASSERT_EQ(1u, command_runner_.commands_ran_.size()); } -TEST_F(BuildTest, StatusFormatElapsed) { +TEST_F(BuildTest, StatusFormatElapsed_e) { status_.BuildStarted(); // Before any task is done, the elapsed time must be zero. - EXPECT_EQ("[%/e0.000]", - status_.FormatProgressStatus("[%%/e%e]", 0)); + EXPECT_EQ("[%/e0.000]", status_.FormatProgressStatus("[%%/e%e]", 0)); +} + +TEST_F(BuildTest, StatusFormatElapsed_w) { + status_.BuildStarted(); + // Before any task is done, the elapsed time must be zero. + EXPECT_EQ("[%/e00:00]", status_.FormatProgressStatus("[%%/e%w]", 0)); +} + +TEST_F(BuildTest, StatusFormatETA) { + status_.BuildStarted(); + // Before any task is done, the ETA time must be unknown. + EXPECT_EQ("[%/E?]", status_.FormatProgressStatus("[%%/E%E]", 0)); +} + +TEST_F(BuildTest, StatusFormatTimeProgress) { + status_.BuildStarted(); + // Before any task is done, the percentage of elapsed time must be zero. + EXPECT_EQ("[%/p 0%]", status_.FormatProgressStatus("[%%/p%p]", 0)); } TEST_F(BuildTest, StatusFormatReplacePlaceholder) { @@ -2125,8 +2258,8 @@ TEST_F(BuildTest, FailedDepsParse) { } struct BuildWithQueryDepsLogTest : public BuildTest { - BuildWithQueryDepsLogTest() : BuildTest(&log_) { - } + BuildWithQueryDepsLogTest() + : BuildTest(&log_), deps_log_file_("ninja_deps") {} ~BuildWithQueryDepsLogTest() { log_.Close(); @@ -2138,12 +2271,13 @@ struct BuildWithQueryDepsLogTest : public BuildTest { temp_dir_.CreateAndEnter("BuildWithQueryDepsLogTest"); std::string err; - ASSERT_TRUE(log_.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(log_.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); } ScopedTempDir temp_dir_; + ScopedFilePath deps_log_file_; DepsLog log_; }; @@ -2337,7 +2471,8 @@ TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlySecondaryOutput) { /// builder_ it sets up, because we want pristine objects for /// each build. struct BuildWithDepsLogTest : public BuildTest { - BuildWithDepsLogTest() {} + BuildWithDepsLogTest() + : build_log_file_("build_log"), deps_log_file_("ninja_deps") {} virtual void SetUp() { BuildTest::SetUp(); @@ -2350,6 +2485,8 @@ struct BuildWithDepsLogTest : public BuildTest { } ScopedTempDir temp_dir_; + ScopedFilePath build_log_file_; + ScopedFilePath deps_log_file_; /// Shadow parent class builder_ so we don't accidentally use it. void* builder_; @@ -2363,6 +2500,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) { "build out: cat in1\n" " deps = gcc\n" " depfile = in1.d\n"; + { State state; ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); @@ -2370,7 +2508,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) { // Run the build once, everything should be ok. DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2400,8 +2538,8 @@ TEST_F(BuildWithDepsLogTest, Straightforward) { // Run the build again. DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); builder.command_runner_.reset(&command_runner_); @@ -2441,7 +2579,7 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) { // Run the build once, everything should be ok. DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2470,8 +2608,8 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); builder.command_runner_.reset(&command_runner_); @@ -2522,6 +2660,210 @@ TEST_F(BuildWithDepsLogTest, DepsIgnoredInDryRun) { builder.command_runner_.release(); } +TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceCondition) { + string err; + const char* manifest = + "rule long-cc\n" + " command = long-cc\n" + "build out: long-cc in1\n" + " test_dependency = in1\n"; + + State state; + ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); + ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); + + BuildLog build_log; + ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err)); + ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err)); + + DepsLog deps_log; + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); + + BuildLog::LogEntry* log_entry = NULL; + { + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + // Run the build, out gets built, dep file is created + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + // See that an entry in the logfile is created. the input_mtime is 1 since that was + // the mtime of in1 when the command was started + log_entry = build_log.LookupByOutput("out"); + ASSERT_TRUE(NULL != log_entry); + ASSERT_EQ(1u, log_entry->mtime); + + builder.command_runner_.release(); + } + + { + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + // Trigger the build again - "out" should rebuild despite having a newer mtime than + // "in1", since "in1" was touched during the build of out (simulated by changing its + // mtime in the the test builder's WaitForCommand() which runs before FinishCommand() + command_runner_.commands_ran_.clear(); + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + // Check that the logfile entry is still correct + log_entry = build_log.LookupByOutput("out"); + ASSERT_TRUE(NULL != log_entry); + ASSERT_TRUE(fs_.files_["in1"].mtime < log_entry->mtime); + builder.command_runner_.release(); + } + + { + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + // And a subsequent run should not have any work to do + command_runner_.commands_ran_.clear(); + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.AlreadyUpToDate()); + + builder.command_runner_.release(); + } +} + +TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceConditionWithDepFile) { + string err; + const char* manifest = + "rule long-cc\n" + " command = long-cc\n" + "build out: long-cc\n" + " deps = gcc\n" + " depfile = out.d\n" + " test_dependency = header.h\n"; + + fs_.Create("header.h", ""); + + State state; + ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); + + BuildLog build_log; + ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err)); + ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err)); + + DepsLog deps_log; + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); + + { + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + + // Run the build, out gets built, dep file is created + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + // See that an entry in the logfile is created. the mtime is 1 due to the command + // starting when the file system's mtime was 1. + BuildLog::LogEntry* log_entry = build_log.LookupByOutput("out"); + ASSERT_TRUE(NULL != log_entry); + ASSERT_EQ(1u, log_entry->mtime); + + builder.command_runner_.release(); + } + + { + // Trigger the build again - "out" will rebuild since its newest input mtime (header.h) + // is newer than the recorded mtime of out in the build log + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + builder.command_runner_.release(); + } + + { + // Trigger the build again - "out" won't rebuild since the file wasn't updated during + // the previous build + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + ASSERT_TRUE(builder.AlreadyUpToDate()); + + builder.command_runner_.release(); + } + + // touch the header to trigger a rebuild + fs_.Create("header.h", ""); + ASSERT_EQ(fs_.now_, 7); + + { + // Rebuild. This time, long-cc will cause header.h to be updated while the build is + // in progress + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + builder.command_runner_.release(); + } + + { + // Rebuild. Because header.h is now in the deplog for out, it should be detectable as + // a change-while-in-progress and should cause a rebuild of out. + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.Build(&err)); + ASSERT_EQ(1u, command_runner_.commands_ran_.size()); + + builder.command_runner_.release(); + } + + { + // This time, the header.h file was not updated during the build, so the target should + // not be considered dirty. + Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); + builder.command_runner_.reset(&command_runner_); + command_runner_.commands_ran_.clear(); + + state.Reset(); + EXPECT_TRUE(builder.AddTarget("out", &err)); + ASSERT_EQ("", err); + EXPECT_TRUE(builder.AlreadyUpToDate()); + + builder.command_runner_.release(); + } +} + /// Check that a restat rule generating a header cancels compilations correctly. TEST_F(BuildTest, RestatDepfileDependency) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, @@ -2564,7 +2906,7 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) { // Run the build once, everything should be ok. DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2590,8 +2932,8 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) { // Run the build again. DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); builder.command_runner_.reset(&command_runner_); @@ -2623,7 +2965,7 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) { // Run the build once, everything should be ok. DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2643,8 +2985,8 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2656,9 +2998,9 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) { EXPECT_TRUE(builder.AddTarget("fo o.o", &err)); ASSERT_EQ("", err); - // Expect three new edges: one generating fo o.o, and two more from - // loading the depfile. - ASSERT_EQ(3u, state.edges_.size()); + // Expect one new edge generating fo o.o, loading the depfile should + // not generate new edges. + ASSERT_EQ(1u, state.edges_.size()); // Expect our edge to now have three inputs: foo.c and two headers. ASSERT_EQ(3u, edge->inputs_.size()); @@ -2694,7 +3036,7 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); @@ -2717,8 +3059,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); @@ -2740,8 +3082,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0); @@ -2769,7 +3111,7 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) { // Run the build once, everything should be ok. DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -2791,23 +3133,21 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); builder.command_runner_.reset(&command_runner_); - Edge* edge = state.edges_.back(); - state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing. EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err)); ASSERT_EQ("", err); - // Expect three new edges: one generating fo o.o, and two more from - // loading the depfile. - ASSERT_EQ(3u, state.edges_.size()); + // Expect one new edge generating fo o.o. + ASSERT_EQ(1u, state.edges_.size()); // Expect our edge to now have three inputs: foo.c and two headers. + Edge* edge = state.edges_.back(); ASSERT_EQ(3u, edge->inputs_.size()); // Expect the command line we generate to only use the original input. @@ -2862,11 +3202,13 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) { fs_.Create("out.d", "out: header.h"); fs_.Create("header.h", ""); - RebuildTarget("out", manifest, "build_log", "ninja_deps"); + RebuildTarget("out", manifest, build_log_file_.c_str(), + deps_log_file_.c_str()); ASSERT_EQ(2u, command_runner_.commands_ran_.size()); // Sanity: this rebuild should be NOOP - RebuildTarget("out", manifest, "build_log", "ninja_deps"); + RebuildTarget("out", manifest, build_log_file_.c_str(), + deps_log_file_.c_str()); ASSERT_EQ(0u, command_runner_.commands_ran_.size()); // Touch 'header.in', blank dependencies log (create a different one). @@ -2875,12 +3217,14 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) { fs_.Tick(); fs_.Create("header.in", ""); + ScopedFilePath deps2_file_("ninja_deps2"); + // (switch to a new blank deps_log "ninja_deps2") - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); + RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str()); ASSERT_EQ(2u, command_runner_.commands_ran_.size()); // Sanity: this build should be NOOP - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); + RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str()); ASSERT_EQ(0u, command_runner_.commands_ran_.size()); // Check that invalidating deps by target timestamp also works here @@ -2888,11 +3232,11 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) { fs_.Tick(); fs_.Create("header.in", ""); fs_.Create("out", ""); - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); + RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str()); ASSERT_EQ(2u, command_runner_.commands_ran_.size()); // And this build should be NOOP again - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); + RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str()); ASSERT_EQ(0u, command_runner_.commands_ran_.size()); } @@ -2909,7 +3253,10 @@ TEST_F(BuildTest, WrongOutputInDepfileCausesRebuild) { fs_.Create("header.h", ""); fs_.Create("foo.o.d", "bar.o.d: header.h\n"); - RebuildTarget("foo.o", manifest, "build_log", "ninja_deps"); + ScopedFilePath build_log("build_log"); + ScopedFilePath deps_file("ninja_deps"); + + RebuildTarget("foo.o", manifest, build_log.c_str(), deps_file.c_str()); ASSERT_EQ(1u, command_runner_.commands_ran_.size()); } @@ -3042,9 +3389,10 @@ TEST_F(BuildTest, DyndepBuild) { ASSERT_EQ(2u, fs_.files_read_.size()); EXPECT_EQ("dd-in", fs_.files_read_[0]); EXPECT_EQ("dd", fs_.files_read_[1]); - ASSERT_EQ(2u + files_created, fs_.files_created_.size()); + ASSERT_EQ(3u + files_created, fs_.files_created_.size()); EXPECT_EQ(1u, fs_.files_created_.count("dd")); EXPECT_EQ(1u, fs_.files_created_.count("out")); + EXPECT_EQ(1u, fs_.files_created_.count(".ninja_lock")); } TEST_F(BuildTest, DyndepBuildSyntaxError) { @@ -3355,8 +3703,8 @@ TEST_F(BuildTest, DyndepBuildDiscoverOutputAndDepfileInput) { EXPECT_TRUE(builder_.AddTarget("out", &err)); ASSERT_EQ("", err); - // Loading the depfile gave tmp.imp a phony input edge. - ASSERT_TRUE(GetNode("tmp.imp")->in_edge()->is_phony()); + // Loading the depfile did not give tmp.imp a phony input edge. + ASSERT_FALSE(GetNode("tmp.imp")->in_edge()); EXPECT_TRUE(builder_.Build(&err)); EXPECT_EQ("", err); @@ -3865,7 +4213,7 @@ TEST_F(BuildWithDepsLogTest, ValidationThroughDepfile) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); @@ -3900,8 +4248,8 @@ TEST_F(BuildWithDepsLogTest, ValidationThroughDepfile) { ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); + ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err)); + ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err)); ASSERT_EQ("", err); Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0); diff --git a/src/canon_perftest.cc b/src/canon_perftest.cc old mode 100644 new mode 100755 diff --git a/src/clean.cc b/src/clean.cc old mode 100644 new mode 100755 index 575bf6b..ceffe64 --- a/src/clean.cc +++ b/src/clean.cc @@ -127,6 +127,7 @@ int Cleaner::CleanAll(bool generator) { int Cleaner::CleanDead(const BuildLog::Entries& entries) { Reset(); PrintHeader(); + LoadDyndeps(); for (BuildLog::Entries::const_iterator i = entries.begin(); i != entries.end(); ++i) { Node* n = state_->LookupNode(i->first); // Detecting stale outputs works as follows: @@ -292,7 +293,8 @@ void Cleaner::LoadDyndeps() { // Load dyndep files that exist, before they are cleaned. for (vector::iterator e = state_->edges_.begin(); e != state_->edges_.end(); ++e) { - if (Node* dyndep = (*e)->dyndep_) { + Node* dyndep; + if ((dyndep = (*e)->dyndep_) && dyndep->dyndep_pending()) { // Capture and ignore errors loading the dyndep file. // We clean as much of the graph as we know. std::string err; diff --git a/src/clean.h b/src/clean.h old mode 100644 new mode 100755 diff --git a/src/clean_test.cc b/src/clean_test.cc old mode 100644 new mode 100755 diff --git a/src/clparser.cc b/src/clparser.cc old mode 100644 new mode 100755 diff --git a/src/clparser.h b/src/clparser.h old mode 100644 new mode 100755 diff --git a/src/clparser_perftest.cc b/src/clparser_perftest.cc old mode 100644 new mode 100755 diff --git a/src/clparser_test.cc b/src/clparser_test.cc old mode 100644 new mode 100755 diff --git a/src/debug_flags.cc b/src/debug_flags.cc old mode 100644 new mode 100755 diff --git a/src/debug_flags.h b/src/debug_flags.h old mode 100644 new mode 100755 diff --git a/src/depfile_parser.cc b/src/depfile_parser.cc old mode 100644 new mode 100755 index 98fba2e..7ce7290 --- a/src/depfile_parser.cc +++ b/src/depfile_parser.cc @@ -54,6 +54,7 @@ bool DepfileParser::Parse(string* content, string* err) { bool have_target = false; bool parsing_targets = true; bool poisoned_input = false; + bool is_empty = true; while (in < end) { bool have_newline = false; // out: current output point (typically same as in, but can fall behind @@ -335,6 +336,7 @@ yy32: } if (len > 0) { + is_empty = false; StringPiece piece = StringPiece(filename, len); // If we've seen this as an input before, skip it. std::vector::iterator pos = std::find(ins_.begin(), ins_.end(), piece); @@ -363,7 +365,7 @@ yy32: poisoned_input = false; } } - if (!have_target) { + if (!have_target && !is_empty) { *err = "expected ':' in depfile"; return false; } diff --git a/src/depfile_parser.h b/src/depfile_parser.h old mode 100644 new mode 100755 diff --git a/src/depfile_parser.in.cc b/src/depfile_parser.in.cc old mode 100644 new mode 100755 index 75ba982..4b5f5fe --- a/src/depfile_parser.in.cc +++ b/src/depfile_parser.in.cc @@ -53,6 +53,7 @@ bool DepfileParser::Parse(string* content, string* err) { bool have_target = false; bool parsing_targets = true; bool poisoned_input = false; + bool is_empty = true; while (in < end) { bool have_newline = false; // out: current output point (typically same as in, but can fall behind @@ -171,6 +172,7 @@ bool DepfileParser::Parse(string* content, string* err) { } if (len > 0) { + is_empty = false; StringPiece piece = StringPiece(filename, len); // If we've seen this as an input before, skip it. std::vector::iterator pos = std::find(ins_.begin(), ins_.end(), piece); @@ -199,7 +201,7 @@ bool DepfileParser::Parse(string* content, string* err) { poisoned_input = false; } } - if (!have_target) { + if (!have_target && !is_empty) { *err = "expected ':' in depfile"; return false; } diff --git a/src/depfile_parser_perftest.cc b/src/depfile_parser_perftest.cc old mode 100644 new mode 100755 diff --git a/src/depfile_parser_test.cc b/src/depfile_parser_test.cc old mode 100644 new mode 100755 index 8886258..947ae76 --- a/src/depfile_parser_test.cc +++ b/src/depfile_parser_test.cc @@ -378,3 +378,24 @@ TEST_F(DepfileParserTest, BuggyMP) { "z:\n", &err)); ASSERT_EQ("inputs may not also have inputs", err); } + +TEST_F(DepfileParserTest, EmptyFile) { + std::string err; + EXPECT_TRUE(Parse("", &err)); + ASSERT_EQ(0u, parser_.outs_.size()); + ASSERT_EQ(0u, parser_.ins_.size()); +} + +TEST_F(DepfileParserTest, EmptyLines) { + std::string err; + EXPECT_TRUE(Parse("\n\n", &err)); + ASSERT_EQ(0u, parser_.outs_.size()); + ASSERT_EQ(0u, parser_.ins_.size()); +} + +TEST_F(DepfileParserTest, MissingColon) { + // The file is not empty but is missing a colon separator. + std::string err; + EXPECT_FALSE(Parse("foo.o foo.c\n", &err)); + EXPECT_EQ("expected ':' in depfile", err); +} diff --git a/src/deps_log.cc b/src/deps_log.cc old mode 100644 new mode 100755 index 7e48b38..e32a7a9 --- a/src/deps_log.cc +++ b/src/deps_log.cc @@ -361,7 +361,7 @@ bool DepsLog::Recompact(const string& path, string* err) { return true; } -bool DepsLog::IsDepsEntryLiveFor(Node* node) { +bool DepsLog::IsDepsEntryLiveFor(const Node* node) { // Skip entries that don't have in-edges or whose edges don't have a // "deps" attribute. They were in the deps log from previous builds, but // the the files they were for were removed from the build and their deps diff --git a/src/deps_log.h b/src/deps_log.h old mode 100644 new mode 100755 index 09cc41c..2a1b188 --- a/src/deps_log.h +++ b/src/deps_log.h @@ -97,7 +97,7 @@ struct DepsLog { /// past but are no longer part of the manifest. This function returns if /// this is the case for a given node. This function is slow, don't call /// it from code that runs on every build. - bool IsDepsEntryLiveFor(Node* node); + static bool IsDepsEntryLiveFor(const Node* node); /// Used for tests. const std::vector& nodes() const { return nodes_; } diff --git a/src/deps_log_test.cc b/src/deps_log_test.cc old mode 100644 new mode 100755 index 13fcc78..cb1c925 --- a/src/deps_log_test.cc +++ b/src/deps_log_test.cc @@ -138,9 +138,13 @@ TEST_F(DepsLogTest, DoubleEntry) { deps.push_back(state.GetNode("bar.h", 0)); log.RecordDeps(state.GetNode("out.o", 0), 1, deps); log.Close(); - +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif file_size = (int)st.st_size; ASSERT_GT(file_size, 0); } @@ -160,9 +164,13 @@ TEST_F(DepsLogTest, DoubleEntry) { deps.push_back(state.GetNode("bar.h", 0)); log.RecordDeps(state.GetNode("out.o", 0), 1, deps); log.Close(); - +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif int file_size_2 = (int)st.st_size; ASSERT_EQ(file_size, file_size_2); } @@ -198,9 +206,13 @@ TEST_F(DepsLogTest, Recompact) { log.RecordDeps(state.GetNode("other_out.o", 0), 1, deps); log.Close(); - +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif file_size = (int)st.st_size; ASSERT_GT(file_size, 0); } @@ -222,8 +234,13 @@ TEST_F(DepsLogTest, Recompact) { log.RecordDeps(state.GetNode("out.o", 0), 1, deps); log.Close(); +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif file_size_2 = (int)st.st_size; // The file should grow to record the new deps. ASSERT_GT(file_size_2, file_size); @@ -273,8 +290,13 @@ TEST_F(DepsLogTest, Recompact) { ASSERT_EQ(other_out, log.nodes()[other_out->id()]); // The file should have shrunk a bit for the smaller deps. +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif file_size_3 = (int)st.st_size; ASSERT_LT(file_size_3, file_size_2); } @@ -317,8 +339,13 @@ TEST_F(DepsLogTest, Recompact) { ASSERT_EQ(-1, state.LookupNode("baz.h")->id()); // The file should have shrunk more. +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif int file_size_4 = (int)st.st_size; ASSERT_LT(file_size_4, file_size_3); } @@ -374,8 +401,13 @@ TEST_F(DepsLogTest, Truncated) { } // Get the file size. +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif // Try reloading at truncated sizes. // Track how many nodes/deps were found; they should decrease with @@ -434,8 +466,13 @@ TEST_F(DepsLogTest, TruncatedRecovery) { // Shorten the file, corrupting the last record. { +#ifdef __USE_LARGEFILE64 + struct stat64 st; + ASSERT_EQ(0, stat64(kTestFilename, &st)); +#else struct stat st; ASSERT_EQ(0, stat(kTestFilename, &st)); +#endif string err; ASSERT_TRUE(Truncate(kTestFilename, st.st_size - 2, &err)); } diff --git a/src/disk_interface.cc b/src/disk_interface.cc old mode 100644 new mode 100755 index e73d901..0f27e9d --- a/src/disk_interface.cc +++ b/src/disk_interface.cc @@ -23,9 +23,10 @@ #include #ifdef _WIN32 -#include -#include #include // _mkdir +#include + +#include #else #include #endif @@ -110,7 +111,8 @@ bool StatAllFilesInDir(const string& dir, map* stamps, if (find_handle == INVALID_HANDLE_VALUE) { DWORD win_err = GetLastError(); - if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) + if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND || + win_err == ERROR_DIRECTORY) return true; *err = "FindFirstFileExA(" + dir + "): " + GetLastErrorString(); return false; @@ -156,13 +158,33 @@ bool DiskInterface::MakeDirs(const string& path) { } // RealDiskInterface ----------------------------------------------------------- +RealDiskInterface::RealDiskInterface() +#ifdef _WIN32 +: use_cache_(false), long_paths_enabled_(false) { + setlocale(LC_ALL, ""); + + // Probe ntdll.dll for RtlAreLongPathsEnabled, and call it if it exists. + HINSTANCE ntdll_lib = ::GetModuleHandleW(L"ntdll"); + if (ntdll_lib) { + typedef BOOLEAN(WINAPI FunctionType)(); + auto* func_ptr = reinterpret_cast( + ::GetProcAddress(ntdll_lib, "RtlAreLongPathsEnabled")); + if (func_ptr) { + long_paths_enabled_ = (*func_ptr)(); + } + } +} +#else +{} +#endif TimeStamp RealDiskInterface::Stat(const string& path, string* err) const { METRIC_RECORD("node stat"); #ifdef _WIN32 // MSDN: "Naming Files, Paths, and Namespaces" // http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx - if (!path.empty() && path[0] != '\\' && path.size() > MAX_PATH) { + if (!path.empty() && !AreLongPathsEnabled() && path[0] != '\\' && + path.size() > MAX_PATH) { ostringstream err_stream; err_stream << "Stat(" << path << "): Filename longer than " << MAX_PATH << " characters"; @@ -194,9 +216,14 @@ TimeStamp RealDiskInterface::Stat(const string& path, string* err) const { } DirCache::iterator di = ci->second.find(base); return di != ci->second.end() ? di->second : 0; +#else +#ifdef __USE_LARGEFILE64 + struct stat64 st; + if (stat64(path.c_str(), &st) < 0) { #else struct stat st; if (stat(path.c_str(), &st) < 0) { +#endif if (errno == ENOENT || errno == ENOTDIR) return 0; *err = "stat(" + path + "): " + strerror(errno); @@ -267,7 +294,7 @@ FileReader::Status RealDiskInterface::ReadFile(const string& path, int RealDiskInterface::RemoveFile(const string& path) { #ifdef _WIN32 - DWORD attributes = GetFileAttributes(path.c_str()); + DWORD attributes = GetFileAttributesA(path.c_str()); if (attributes == INVALID_FILE_ATTRIBUTES) { DWORD win_err = GetLastError(); if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) { @@ -278,7 +305,7 @@ int RealDiskInterface::RemoveFile(const string& path) { // On Windows Ninja should behave the same: // https://github.com/ninja-build/ninja/issues/1886 // Skip error checking. If this fails, accept whatever happens below. - SetFileAttributes(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY); + SetFileAttributesA(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY); } if (attributes & FILE_ATTRIBUTE_DIRECTORY) { // remove() deletes both files and directories. On Windows we have to @@ -286,7 +313,7 @@ int RealDiskInterface::RemoveFile(const string& path) { // used on a directory) // This fixes the behavior of ninja -t clean in some cases // https://github.com/ninja-build/ninja/issues/828 - if (!RemoveDirectory(path.c_str())) { + if (!RemoveDirectoryA(path.c_str())) { DWORD win_err = GetLastError(); if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) { return 1; @@ -296,7 +323,7 @@ int RealDiskInterface::RemoveFile(const string& path) { return -1; } } else { - if (!DeleteFile(path.c_str())) { + if (!DeleteFileA(path.c_str())) { DWORD win_err = GetLastError(); if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) { return 1; @@ -327,3 +354,9 @@ void RealDiskInterface::AllowStatCache(bool allow) { cache_.clear(); #endif } + +#ifdef _WIN32 +bool RealDiskInterface::AreLongPathsEnabled(void) const { + return long_paths_enabled_; +} +#endif diff --git a/src/disk_interface.h b/src/disk_interface.h old mode 100644 new mode 100755 index bc29ab7..74200b8 --- a/src/disk_interface.h +++ b/src/disk_interface.h @@ -69,11 +69,7 @@ struct DiskInterface: public FileReader { /// Implementation of DiskInterface that actually hits the disk. struct RealDiskInterface : public DiskInterface { - RealDiskInterface() -#ifdef _WIN32 - : use_cache_(false) -#endif - {} + RealDiskInterface(); virtual ~RealDiskInterface() {} virtual TimeStamp Stat(const std::string& path, std::string* err) const; virtual bool MakeDir(const std::string& path); @@ -85,11 +81,19 @@ struct RealDiskInterface : public DiskInterface { /// Whether stat information can be cached. Only has an effect on Windows. void AllowStatCache(bool allow); +#ifdef _WIN32 + /// Whether long paths are enabled. Only has an effect on Windows. + bool AreLongPathsEnabled() const; +#endif + private: #ifdef _WIN32 /// Whether stat information can be cached. bool use_cache_; + /// Whether long paths are enabled. + bool long_paths_enabled_; + typedef std::map DirCache; // TODO: Neither a map nor a hashmap seems ideal here. If the statcache // works out, come up with a better data structure. diff --git a/src/disk_interface_test.cc b/src/disk_interface_test.cc old mode 100644 new mode 100755 index 5e952ed..e8d869c --- a/src/disk_interface_test.cc +++ b/src/disk_interface_test.cc @@ -17,6 +17,7 @@ #ifdef _WIN32 #include #include +#include #endif #include "disk_interface.h" @@ -65,6 +66,17 @@ TEST_F(DiskInterfaceTest, StatMissingFile) { EXPECT_EQ("", err); } +TEST_F(DiskInterfaceTest, StatMissingFileWithCache) { + disk_.AllowStatCache(true); + string err; + + // On Windows, the errno for FindFirstFileExA, which is used when the stat + // cache is enabled, is different when the directory name is not a directory. + ASSERT_TRUE(Touch("notadir")); + EXPECT_EQ(0, disk_.Stat("notadir/nosuchfile", &err)); + EXPECT_EQ("", err); +} + TEST_F(DiskInterfaceTest, StatBadPath) { string err; #ifdef _WIN32 @@ -85,6 +97,24 @@ TEST_F(DiskInterfaceTest, StatExistingFile) { EXPECT_EQ("", err); } +#ifdef _WIN32 +TEST_F(DiskInterfaceTest, StatExistingFileWithLongPath) { + string err; + char currentdir[32767]; + _getcwd(currentdir, sizeof(currentdir)); + const string filename = string(currentdir) + +"\\filename_with_256_characters_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\ +xxxxxxxxxxxxxxxxxxxxx"; + const string prefixed = "\\\\?\\" + filename; + ASSERT_TRUE(Touch(prefixed.c_str())); + EXPECT_GT(disk_.Stat(disk_.AreLongPathsEnabled() ? + filename : prefixed, &err), 1); + EXPECT_EQ("", err); +} +#endif + TEST_F(DiskInterfaceTest, StatExistingDir) { string err; ASSERT_TRUE(disk_.MakeDir("subdir")); @@ -198,7 +228,7 @@ TEST_F(DiskInterfaceTest, MakeDirs) { EXPECT_EQ(0, fclose(f)); #ifdef _WIN32 string path2 = "another\\with\\back\\\\slashes\\"; - EXPECT_TRUE(disk_.MakeDirs(path2.c_str())); + EXPECT_TRUE(disk_.MakeDirs(path2)); FILE* f2 = fopen((path2 + "a_file").c_str(), "w"); EXPECT_TRUE(f2); EXPECT_EQ(0, fclose(f2)); diff --git a/src/dyndep.cc b/src/dyndep.cc old mode 100644 new mode 100755 index dd4ed09..a0d699d --- a/src/dyndep.cc +++ b/src/dyndep.cc @@ -97,15 +97,10 @@ bool DyndepLoader::UpdateEdge(Edge* edge, Dyndeps const* dyndeps, for (std::vector::const_iterator i = dyndeps->implicit_outputs_.begin(); i != dyndeps->implicit_outputs_.end(); ++i) { - if (Edge* old_in_edge = (*i)->in_edge()) { - // This node already has an edge producing it. Fail with an error - // unless the edge was generated by ImplicitDepLoader, in which - // case we can replace it with the now-known real producer. - if (!old_in_edge->generated_by_dep_loader_) { - *err = "multiple rules generate " + (*i)->path(); - return false; - } - old_in_edge->outputs_.clear(); + if ((*i)->in_edge()) { + // This node already has an edge producing it. + *err = "multiple rules generate " + (*i)->path(); + return false; } (*i)->set_in_edge(edge); } diff --git a/src/dyndep.h b/src/dyndep.h old mode 100644 new mode 100755 diff --git a/src/dyndep_parser.cc b/src/dyndep_parser.cc old mode 100644 new mode 100755 diff --git a/src/dyndep_parser.h b/src/dyndep_parser.h old mode 100644 new mode 100755 diff --git a/src/dyndep_parser_test.cc b/src/dyndep_parser_test.cc old mode 100644 new mode 100755 diff --git a/src/edit_distance.cc b/src/edit_distance.cc old mode 100644 new mode 100755 diff --git a/src/edit_distance.h b/src/edit_distance.h old mode 100644 new mode 100755 diff --git a/src/edit_distance_test.cc b/src/edit_distance_test.cc old mode 100644 new mode 100755 diff --git a/src/eval_env.cc b/src/eval_env.cc old mode 100644 new mode 100755 diff --git a/src/eval_env.h b/src/eval_env.h old mode 100644 new mode 100755 diff --git a/src/exit_status.h b/src/exit_status.h old mode 100644 new mode 100755 diff --git a/src/getopt.c b/src/getopt.c old mode 100644 new mode 100755 diff --git a/src/getopt.h b/src/getopt.h old mode 100644 new mode 100755 diff --git a/src/graph.cc b/src/graph.cc old mode 100644 new mode 100755 index 43ba45a..31b109a --- a/src/graph.cc +++ b/src/graph.cc @@ -32,7 +32,6 @@ using namespace std; bool Node::Stat(DiskInterface* disk_interface, string* err) { - METRIC_RECORD("node stat"); mtime_ = disk_interface->Stat(path_, err); if (mtime_ == -1) { return false; @@ -298,37 +297,34 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge, return false; } - BuildLog::LogEntry* entry = 0; - // Dirty if we're missing the output. if (!output->exists()) { EXPLAIN("output %s doesn't exist", output->path().c_str()); return true; } + BuildLog::LogEntry* entry = 0; + + // If this is a restat rule, we may have cleaned the output in a + // previous run and stored the command start time in the build log. + // We don't want to consider a restat rule's outputs as dirty unless + // an input changed since the last run, so we'll skip checking the + // output file's actual mtime and simply check the recorded mtime from + // the log against the most recent input's mtime (see below) + bool used_restat = false; + if (edge->GetBindingBool("restat") && build_log() && + (entry = build_log()->LookupByOutput(output->path()))) { + used_restat = true; + } + // Dirty if the output is older than the input. - if (most_recent_input && output->mtime() < most_recent_input->mtime()) { - TimeStamp output_mtime = output->mtime(); - - // If this is a restat rule, we may have cleaned the output with a restat - // rule in a previous run and stored the most recent input mtime in the - // build log. Use that mtime instead, so that the file will only be - // considered dirty if an input was modified since the previous run. - bool used_restat = false; - if (edge->GetBindingBool("restat") && build_log() && - (entry = build_log()->LookupByOutput(output->path()))) { - output_mtime = entry->mtime; - used_restat = true; - } - - if (output_mtime < most_recent_input->mtime()) { - EXPLAIN("%soutput %s older than most recent input %s " - "(%" PRId64 " vs %" PRId64 ")", - used_restat ? "restat of " : "", output->path().c_str(), - most_recent_input->path().c_str(), - output_mtime, most_recent_input->mtime()); - return true; - } + if (!used_restat && most_recent_input && output->mtime() < most_recent_input->mtime()) { + EXPLAIN("output %s older than most recent input %s " + "(%" PRId64 " vs %" PRId64 ")", + output->path().c_str(), + most_recent_input->path().c_str(), + output->mtime(), most_recent_input->mtime()); + return true; } if (build_log()) { @@ -346,7 +342,9 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge, // May also be dirty due to the mtime in the log being older than the // mtime of the most recent input. This can occur even when the mtime // on disk is newer if a previous run wrote to the output file but - // exited with an error or was interrupted. + // exited with an error or was interrupted. If this was a restat rule, + // then we only check the recorded mtime against the most recent input + // mtime and ignore the actual output's mtime above. EXPLAIN("recorded mtime of %s older than most recent input %s (%" PRId64 " vs %" PRId64 ")", output->path().c_str(), most_recent_input->path().c_str(), entry->mtime, most_recent_input->mtime()); @@ -393,7 +391,7 @@ struct EdgeEnv : public Env { std::string MakePathList(const Node* const* span, size_t size, char sep) const; private: - vector lookups_; + std::vector lookups_; const Edge* const edge_; EscapeKind escape_in_out_; bool recursive_; @@ -403,21 +401,50 @@ string EdgeEnv::LookupVariable(const string& var) { if (var == "in" || var == "in_newline") { int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ - edge_->order_only_deps_; -#if __cplusplus >= 201103L return MakePathList(edge_->inputs_.data(), explicit_deps_count, -#else - return MakePathList(&edge_->inputs_[0], explicit_deps_count, -#endif var == "in" ? ' ' : '\n'); } else if (var == "out") { int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_; return MakePathList(&edge_->outputs_[0], explicit_outs_count, ' '); } + // Technical note about the lookups_ vector. + // + // This is used to detect cycles during recursive variable expansion + // which can be seen as a graph traversal problem. Consider the following + // example: + // + // rule something + // command = $foo $foo $var1 + // var1 = $var2 + // var2 = $var3 + // var3 = $var1 + // foo = FOO + // + // Each variable definition can be seen as a node in a graph that looks + // like the following: + // + // command --> foo + // | + // v + // var1 <-----. + // | | + // v | + // var2 ---> var3 + // + // The lookups_ vector is used as a stack of visited nodes/variables + // during recursive expansion. Entering a node adds an item to the + // stack, leaving the node removes it. + // + // The recursive_ flag is used as a small performance optimization + // to never record the starting node in the stack when beginning a new + // expansion, since in most cases, expansions are not recursive + // at all. + // if (recursive_) { - vector::const_iterator it; - if ((it = find(lookups_.begin(), lookups_.end(), var)) != lookups_.end()) { - string cycle; + auto it = std::find(lookups_.begin(), lookups_.end(), var); + if (it != lookups_.end()) { + std::string cycle; for (; it != lookups_.end(); ++it) cycle.append(*it + " -> "); cycle.append(var); @@ -427,13 +454,17 @@ string EdgeEnv::LookupVariable(const string& var) { // See notes on BindingEnv::LookupWithFallback. const EvalString* eval = edge_->rule_->GetBinding(var); - if (recursive_ && eval) + bool record_varname = recursive_ && eval; + if (record_varname) lookups_.push_back(var); // In practice, variables defined on rules never use another rule variable. // For performance, only start checking for cycles after the first lookup. recursive_ = true; - return edge_->env_->LookupWithFallback(var, eval, this); + std::string result = edge_->env_->LookupWithFallback(var, eval, this); + if (record_varname) + lookups_.pop_back(); + return result; } std::string EdgeEnv::MakePathList(const Node* const* const span, @@ -696,7 +727,6 @@ bool ImplicitDepLoader::ProcessDepfileDeps( Node* node = state_->GetNode(*i, slash_bits); *implicit_dep = node; node->AddOutEdge(edge); - CreatePhonyInEdge(node); } return true; @@ -724,7 +754,6 @@ bool ImplicitDepLoader::LoadDepsFromLog(Edge* edge, string* err) { Node* node = deps->nodes[i]; *implicit_dep = node; node->AddOutEdge(edge); - CreatePhonyInEdge(node); } return true; } @@ -736,21 +765,3 @@ vector::iterator ImplicitDepLoader::PreallocateSpace(Edge* edge, edge->implicit_deps_ += count; return edge->inputs_.end() - edge->order_only_deps_ - count; } - -void ImplicitDepLoader::CreatePhonyInEdge(Node* node) { - if (node->in_edge()) - return; - - Edge* phony_edge = state_->AddEdge(&State::kPhonyRule); - phony_edge->generated_by_dep_loader_ = true; - node->set_in_edge(phony_edge); - phony_edge->outputs_.push_back(node); - - // RecomputeDirty might not be called for phony_edge if a previous call - // to RecomputeDirty had caused the file to be stat'ed. Because previous - // invocations of RecomputeDirty would have seen this node without an - // input edge (and therefore ready), we have to set outputs_ready_ to true - // to avoid a potential stuck build. If we do call RecomputeDirty for - // this node, it will simply set outputs_ready_ to the correct value. - phony_edge->outputs_ready_ = true; -} diff --git a/src/graph.h b/src/graph.h old mode 100644 new mode 100755 index 9de67d2..433b7e3 --- a/src/graph.h +++ b/src/graph.h @@ -38,14 +38,7 @@ struct State; /// it's dirty, mtime, etc. struct Node { Node(const std::string& path, uint64_t slash_bits) - : path_(path), - slash_bits_(slash_bits), - mtime_(-1), - exists_(ExistenceStatusUnknown), - dirty_(false), - dyndep_pending_(false), - in_edge_(NULL), - id_(-1) {} + : path_(path), slash_bits_(slash_bits) {} /// Return false on error. bool Stat(DiskInterface* disk_interface, std::string* err); @@ -104,6 +97,14 @@ struct Node { Edge* in_edge() const { return in_edge_; } void set_in_edge(Edge* edge) { in_edge_ = edge; } + /// Indicates whether this node was generated from a depfile or dyndep file, + /// instead of being a regular input or output from the Ninja manifest. + bool generated_by_dep_loader() const { return generated_by_dep_loader_; } + + void set_generated_by_dep_loader(bool value) { + generated_by_dep_loader_ = value; + } + int id() const { return id_; } void set_id(int id) { id_ = id; } @@ -119,13 +120,13 @@ private: /// Set bits starting from lowest for backslashes that were normalized to /// forward slashes by CanonicalizePath. See |PathDecanonicalized|. - uint64_t slash_bits_; + uint64_t slash_bits_ = 0; /// Possible values of mtime_: /// -1: file hasn't been examined /// 0: we looked, and file doesn't exist /// >0: actual file's mtime, or the latest mtime of its dependencies if it doesn't exist - TimeStamp mtime_; + TimeStamp mtime_ = -1; enum ExistenceStatus { /// The file hasn't been examined. @@ -135,20 +136,27 @@ private: /// The path is an actual file. mtime_ will be the file's mtime. ExistenceStatusExists }; - ExistenceStatus exists_; + ExistenceStatus exists_ = ExistenceStatusUnknown; /// Dirty is true when the underlying file is out-of-date. /// But note that Edge::outputs_ready_ is also used in judging which /// edges to build. - bool dirty_; + bool dirty_ = false; /// Store whether dyndep information is expected from this node but /// has not yet been loaded. - bool dyndep_pending_; + bool dyndep_pending_ = false; + + /// Set to true when this node comes from a depfile, a dyndep file or the + /// deps log. If it does not have a producing edge, the build should not + /// abort if it is missing (as for regular source inputs). By default + /// all nodes have this flag set to true, since the deps and build logs + /// can be loaded before the manifest. + bool generated_by_dep_loader_ = true; /// The Edge that produces this Node, or NULL when there is no /// known edge to produce it. - Edge* in_edge_; + Edge* in_edge_ = nullptr; /// All Edges that use this Node as an input. std::vector out_edges_; @@ -157,7 +165,7 @@ private: std::vector validation_out_edges_; /// A dense integer id for the node, assigned and used by DepsLog. - int id_; + int id_ = -1; }; /// An edge in the dependency graph; links between Nodes using Rules. @@ -172,7 +180,8 @@ struct Edge { : rule_(NULL), pool_(NULL), dyndep_(NULL), env_(NULL), mark_(VisitNone), id_(0), outputs_ready_(false), deps_loaded_(false), deps_missing_(false), generated_by_dep_loader_(false), - implicit_deps_(0), order_only_deps_(0), implicit_outs_(0) {} + command_start_time_(0), implicit_deps_(0), order_only_deps_(0), + implicit_outs_(0) {} /// Return true if all inputs' in-edges are ready. bool AllInputsReady() const; @@ -198,19 +207,20 @@ struct Edge { // Append all edge explicit inputs to |*out|. Possibly with shell escaping. void CollectInputs(bool shell_escape, std::vector* out) const; - const Rule* rule_; - Pool* pool_; + const Rule* rule_ = nullptr; + Pool* pool_ = nullptr; std::vector inputs_; std::vector outputs_; std::vector validations_; - Node* dyndep_; - BindingEnv* env_; - VisitMark mark_; - size_t id_; - bool outputs_ready_; - bool deps_loaded_; - bool deps_missing_; - bool generated_by_dep_loader_; + Node* dyndep_ = nullptr; + BindingEnv* env_ = nullptr; + VisitMark mark_ = VisitNone; + size_t id_ = 0; + bool outputs_ready_ = false; + bool deps_loaded_ = false; + bool deps_missing_ = false; + bool generated_by_dep_loader_ = false; + TimeStamp command_start_time_ = 0; const Rule& rule() const { return *rule_; } Pool* pool() const { return pool_; } @@ -225,8 +235,8 @@ struct Edge { // don't cause the target to rebuild. // These are stored in inputs_ in that order, and we keep counts of // #2 and #3 when we need to access the various subsets. - int implicit_deps_; - int order_only_deps_; + int implicit_deps_ = 0; + int order_only_deps_ = 0; bool is_implicit(size_t index) { return index >= inputs_.size() - order_only_deps_ - implicit_deps_ && !is_order_only(index); @@ -240,7 +250,7 @@ struct Edge { // 2) implicit outs, which the target generates but are not part of $out. // These are stored in outputs_ in that order, and we keep a count of // #2 to use when we need to access the various subsets. - int implicit_outs_; + int implicit_outs_ = 0; bool is_implicit_out(size_t index) const { return index >= outputs_.size() - implicit_outs_; } @@ -248,6 +258,10 @@ struct Edge { bool is_phony() const; bool use_console() const; bool maybe_phonycycle_diagnostic() const; + + // Historical info: how long did this edge take last time, + // as per .ninja_log, if known? Defaults to -1 if unknown. + int64_t prev_elapsed_time_millis = -1; }; struct EdgeCmp { @@ -295,11 +309,6 @@ struct ImplicitDepLoader { /// an iterator pointing at the first new space. std::vector::iterator PreallocateSpace(Edge* edge, int count); - /// If we don't have a edge that generates this input already, - /// create one; this makes us not abort if the input is missing, - /// but instead will rebuild in that circumstance. - void CreatePhonyInEdge(Node* node); - State* state_; DiskInterface* disk_interface_; DepsLog* deps_log_; diff --git a/src/graph_test.cc b/src/graph_test.cc old mode 100644 new mode 100755 index 9dba8af..110a729 --- a/src/graph_test.cc +++ b/src/graph_test.cc @@ -977,3 +977,4 @@ TEST_F(GraphTest, PhonyDepsMtimes) { EXPECT_EQ(out1->mtime(), out1Mtime1); EXPECT_TRUE(out1->dirty()); } + diff --git a/src/graphviz.cc b/src/graphviz.cc old mode 100644 new mode 100755 diff --git a/src/graphviz.h b/src/graphviz.h old mode 100644 new mode 100755 diff --git a/src/hash_collision_bench.cc b/src/hash_collision_bench.cc old mode 100644 new mode 100755 diff --git a/src/hash_map.h b/src/hash_map.h old mode 100644 new mode 100755 index 55d2c9d..4353609 --- a/src/hash_map.h +++ b/src/hash_map.h @@ -53,7 +53,6 @@ unsigned int MurmurHash2(const void* key, size_t len) { return h; } -#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900) #include namespace std { @@ -68,56 +67,13 @@ struct hash { }; } -#elif defined(_MSC_VER) -#include - -using stdext::hash_map; -using stdext::hash_compare; - -struct StringPieceCmp : public hash_compare { - size_t operator()(const StringPiece& key) const { - return MurmurHash2(key.str_, key.len_); - } - bool operator()(const StringPiece& a, const StringPiece& b) const { - int cmp = memcmp(a.str_, b.str_, min(a.len_, b.len_)); - if (cmp < 0) { - return true; - } else if (cmp > 0) { - return false; - } else { - return a.len_ < b.len_; - } - } -}; - -#else -#include - -using __gnu_cxx::hash_map; - -namespace __gnu_cxx { -template<> -struct hash { - size_t operator()(StringPiece key) const { - return MurmurHash2(key.str_, key.len_); - } -}; -} -#endif - /// A template for hash_maps keyed by a StringPiece whose string is /// owned externally (typically by the values). Use like: /// ExternalStringHash::Type foos; to make foos into a hash /// mapping StringPiece => Foo*. template struct ExternalStringHashMap { -#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900) typedef std::unordered_map Type; -#elif defined(_MSC_VER) - typedef hash_map Type; -#else - typedef hash_map Type; -#endif }; #endif // NINJA_MAP_H_ diff --git a/src/includes_normalize-win32.cc b/src/includes_normalize-win32.cc old mode 100644 new mode 100755 diff --git a/src/includes_normalize.h b/src/includes_normalize.h old mode 100644 new mode 100755 index 7d50556..8d29a64 --- a/src/includes_normalize.h +++ b/src/includes_normalize.h @@ -12,6 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +#ifndef INCLUDES_NORMALIZE_H_ +#define INCLUDES_NORMALIZE_H_ + #include #include @@ -38,3 +41,5 @@ struct IncludesNormalize { std::string relative_to_; std::vector split_relative_to_; }; + +#endif // INCLUDES_NORMALIZE_H_ diff --git a/src/includes_normalize_test.cc b/src/includes_normalize_test.cc old mode 100644 new mode 100755 index 9214f53..12965f9 --- a/src/includes_normalize_test.cc +++ b/src/includes_normalize_test.cc @@ -117,10 +117,10 @@ TEST(IncludesNormalize, LongInvalidPath) { // Construct max size path having cwd prefix. // kExactlyMaxPath = "$cwd\\a\\aaaa...aaaa\0"; char kExactlyMaxPath[_MAX_PATH + 1]; - ASSERT_NE(_getcwd(kExactlyMaxPath, sizeof kExactlyMaxPath), NULL); + ASSERT_STRNE(_getcwd(kExactlyMaxPath, sizeof kExactlyMaxPath), NULL); int cwd_len = strlen(kExactlyMaxPath); - ASSERT_LE(cwd_len + 3 + 1, _MAX_PATH) + ASSERT_LE(cwd_len + 3 + 1, _MAX_PATH); kExactlyMaxPath[cwd_len] = '\\'; kExactlyMaxPath[cwd_len + 1] = 'a'; kExactlyMaxPath[cwd_len + 2] = '\\'; diff --git a/src/inline.sh b/src/inline.sh index bf20a7d..5092fa2 100755 --- a/src/inline.sh +++ b/src/inline.sh @@ -18,7 +18,6 @@ # It expects the name of the variable as its first argument, and reads # stdin and writes stdout. -set -e varname="$1" # 'od' and 'sed' may not be available on all platforms, and may not support the diff --git a/src/json.cc b/src/json.cc old mode 100644 new mode 100755 diff --git a/src/json.h b/src/json.h old mode 100644 new mode 100755 diff --git a/src/json_test.cc b/src/json_test.cc old mode 100644 new mode 100755 diff --git a/src/lexer.cc b/src/lexer.cc old mode 100644 new mode 100755 diff --git a/src/lexer.h b/src/lexer.h old mode 100644 new mode 100755 diff --git a/src/lexer.in.cc b/src/lexer.in.cc old mode 100644 new mode 100755 diff --git a/src/lexer_test.cc b/src/lexer_test.cc old mode 100644 new mode 100755 diff --git a/src/line_printer.cc b/src/line_printer.cc old mode 100644 new mode 100755 index a3d0528..12e82b3 --- a/src/line_printer.cc +++ b/src/line_printer.cc @@ -46,10 +46,6 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) { } #endif supports_color_ = smart_terminal_; - if (!supports_color_) { - const char* clicolor_force = getenv("CLICOLOR_FORCE"); - supports_color_ = clicolor_force && string(clicolor_force) != "0"; - } #ifdef _WIN32 // Try enabling ANSI escape sequence support on Windows 10 terminals. if (supports_color_) { @@ -61,6 +57,10 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) { } } #endif + if (!supports_color_) { + const char* clicolor_force = getenv("CLICOLOR_FORCE"); + supports_color_ = clicolor_force && std::string(clicolor_force) != "0"; + } } void LinePrinter::Print(string to_print, LineType type) { @@ -118,6 +118,7 @@ void LinePrinter::Print(string to_print, LineType type) { have_blank_line_ = false; } else { printf("%s\n", to_print.c_str()); + fflush(stdout); } } diff --git a/src/line_printer.h b/src/line_printer.h old mode 100644 new mode 100755 diff --git a/src/load_status.h b/src/load_status.h old mode 100644 new mode 100755 diff --git a/src/manifest_parser.cc b/src/manifest_parser.cc old mode 100644 new mode 100755 index 8db6eb3..103c365 --- a/src/manifest_parser.cc +++ b/src/manifest_parser.cc @@ -14,8 +14,10 @@ #include "manifest_parser.h" +#include #include #include + #include #include "graph.h" @@ -416,6 +418,7 @@ bool ManifestParser::ParseEdge(string* err) { if (dgi == edge->inputs_.end()) { return lexer_.Error("dyndep '" + dyndep + "' is not an input", err); } + assert(!edge->dyndep_->generated_by_dep_loader()); } return true; diff --git a/src/manifest_parser.h b/src/manifest_parser.h old mode 100644 new mode 100755 diff --git a/src/manifest_parser_perftest.cc b/src/manifest_parser_perftest.cc old mode 100644 new mode 100755 diff --git a/src/manifest_parser_test.cc b/src/manifest_parser_test.cc old mode 100644 new mode 100755 diff --git a/src/metrics.cc b/src/metrics.cc old mode 100644 new mode 100755 index dbaf221..632ae43 --- a/src/metrics.cc +++ b/src/metrics.cc @@ -18,13 +18,8 @@ #include #include -#ifndef _WIN32 -#include -#else -#include -#endif - #include +#include #include "util.h" @@ -34,49 +29,40 @@ Metrics* g_metrics = NULL; namespace { -#ifndef _WIN32 /// Compute a platform-specific high-res timer value that fits into an int64. int64_t HighResTimer() { - timeval tv; - if (gettimeofday(&tv, NULL) < 0) - Fatal("gettimeofday: %s", strerror(errno)); - return (int64_t)tv.tv_sec * 1000*1000 + tv.tv_usec; + auto now = chrono::steady_clock::now(); + return chrono::duration_cast( + now.time_since_epoch()) + .count(); } -/// Convert a delta of HighResTimer() values to microseconds. -int64_t TimerToMicros(int64_t dt) { - // No conversion necessary. - return dt; -} -#else -int64_t LargeIntegerToInt64(const LARGE_INTEGER& i) { - return ((int64_t)i.HighPart) << 32 | i.LowPart; -} - -int64_t HighResTimer() { - LARGE_INTEGER counter; - if (!QueryPerformanceCounter(&counter)) - Fatal("QueryPerformanceCounter: %s", GetLastErrorString().c_str()); - return LargeIntegerToInt64(counter); +constexpr int64_t GetFrequency() { + // If numerator isn't 1 then we lose precision and that will need to be + // assessed. + static_assert(std::chrono::steady_clock::period::num == 1, + "Numerator must be 1"); + return std::chrono::steady_clock::period::den / + std::chrono::steady_clock::period::num; } int64_t TimerToMicros(int64_t dt) { - static int64_t ticks_per_sec = 0; - if (!ticks_per_sec) { - LARGE_INTEGER freq; - if (!QueryPerformanceFrequency(&freq)) - Fatal("QueryPerformanceFrequency: %s", GetLastErrorString().c_str()); - ticks_per_sec = LargeIntegerToInt64(freq); - } - // dt is in ticks. We want microseconds. - return (dt * 1000000) / ticks_per_sec; + return chrono::duration_cast( + std::chrono::steady_clock::duration{ dt }) + .count(); +} + +int64_t TimerToMicros(double dt) { + // dt is in ticks. We want microseconds. + using DoubleSteadyClock = + std::chrono::duration; + return chrono::duration_cast(DoubleSteadyClock{ dt }) + .count(); } -#endif } // anonymous namespace - ScopedMetric::ScopedMetric(Metric* metric) { metric_ = metric; if (!metric_) @@ -87,7 +73,9 @@ ScopedMetric::~ScopedMetric() { if (!metric_) return; metric_->count++; - int64_t dt = TimerToMicros(HighResTimer() - start_); + // Leave in the timer's natural frequency to avoid paying the conversion cost + // on every measurement. + int64_t dt = HighResTimer() - start_; metric_->sum += dt; } @@ -112,18 +100,23 @@ void Metrics::Report() { for (vector::iterator i = metrics_.begin(); i != metrics_.end(); ++i) { Metric* metric = *i; - double total = metric->sum / (double)1000; - double avg = metric->sum / (double)metric->count; + uint64_t micros = TimerToMicros(metric->sum); + double total = micros / (double)1000; + double avg = micros / (double)metric->count; printf("%-*s\t%-6d\t%-8.1f\t%.1f\n", width, metric->name.c_str(), metric->count, avg, total); } } -uint64_t Stopwatch::Now() const { - return TimerToMicros(HighResTimer()); +double Stopwatch::Elapsed() const { + // Convert to micros after converting to double to minimize error. + return 1e-6 * TimerToMicros(static_cast(NowRaw() - started_)); +} + +uint64_t Stopwatch::NowRaw() const { + return HighResTimer(); } int64_t GetTimeMillis() { return TimerToMicros(HighResTimer()) / 1000; } - diff --git a/src/metrics.h b/src/metrics.h old mode 100644 new mode 100755 index 11239b5..937d905 --- a/src/metrics.h +++ b/src/metrics.h @@ -28,11 +28,10 @@ struct Metric { std::string name; /// Number of times we've hit the code path. int count; - /// Total time (in micros) we've spent on the code path. + /// Total time (in platform-dependent units) we've spent on the code path. int64_t sum; }; - /// A scoped object for recording a metric across the body of a function. /// Used by the METRIC_RECORD macro. struct ScopedMetric { @@ -68,15 +67,15 @@ struct Stopwatch { Stopwatch() : started_(0) {} /// Seconds since Restart() call. - double Elapsed() const { - return 1e-6 * static_cast(Now() - started_); - } + double Elapsed() const; - void Restart() { started_ = Now(); } + void Restart() { started_ = NowRaw(); } private: uint64_t started_; - uint64_t Now() const; + // Return the current time using the native frequency of the high resolution + // timer. + uint64_t NowRaw() const; }; /// The primary interface to metrics. Use METRIC_RECORD("foobar") at the top @@ -86,6 +85,13 @@ struct Stopwatch { g_metrics ? g_metrics->NewMetric(name) : NULL; \ ScopedMetric metrics_h_scoped(metrics_h_metric); +/// A variant of METRIC_RECORD that doesn't record anything if |condition| +/// is false. +#define METRIC_RECORD_IF(name, condition) \ + static Metric* metrics_h_metric = \ + g_metrics ? g_metrics->NewMetric(name) : NULL; \ + ScopedMetric metrics_h_scoped((condition) ? metrics_h_metric : NULL); + extern Metrics* g_metrics; #endif // NINJA_METRICS_H_ diff --git a/src/minidump-win32.cc b/src/minidump-win32.cc old mode 100644 new mode 100755 diff --git a/src/missing_deps.cc b/src/missing_deps.cc old mode 100644 new mode 100755 diff --git a/src/missing_deps.h b/src/missing_deps.h old mode 100644 new mode 100755 index ae57074..7a615da --- a/src/missing_deps.h +++ b/src/missing_deps.h @@ -19,9 +19,7 @@ #include #include -#if __cplusplus >= 201103L #include -#endif struct DepsLog; struct DiskInterface; @@ -68,13 +66,8 @@ struct MissingDependencyScanner { int missing_dep_path_count_; private: -#if __cplusplus >= 201103L using InnerAdjacencyMap = std::unordered_map; using AdjacencyMap = std::unordered_map; -#else - typedef std::map InnerAdjacencyMap; - typedef std::map AdjacencyMap; -#endif AdjacencyMap adjacency_map_; }; diff --git a/src/missing_deps_test.cc b/src/missing_deps_test.cc old mode 100644 new mode 100755 index db66885..12ae8ed --- a/src/missing_deps_test.cc +++ b/src/missing_deps_test.cc @@ -33,7 +33,12 @@ struct MissingDependencyScannerTest : public testing::Test { scanner_(&delegate_, &deps_log_, &state_, &filesystem_) { std::string err; deps_log_.OpenForWrite(kTestDepsLogFilename, &err); - ASSERT_EQ("", err); + EXPECT_EQ("", err); + } + + ~MissingDependencyScannerTest() { + // Remove test file. + deps_log_.Close(); } MissingDependencyScanner& scanner() { return scanner_; } @@ -79,6 +84,7 @@ struct MissingDependencyScannerTest : public testing::Test { ASSERT_EQ(1u, scanner().generator_rules_.count(rule)); } + ScopedFilePath scoped_file_path_ = kTestDepsLogFilename; MissingDependencyTestDelegate delegate_; Rule generator_rule_; Rule compile_rule_; @@ -159,4 +165,3 @@ TEST_F(MissingDependencyScannerTest, CycleInGraph) { std::vector nodes = state_.RootNodes(&err); ASSERT_NE("", err); } - diff --git a/src/msvc_helper-win32.cc b/src/msvc_helper-win32.cc old mode 100644 new mode 100755 diff --git a/src/msvc_helper.h b/src/msvc_helper.h old mode 100644 new mode 100755 index 568b9f9..699b0a1 --- a/src/msvc_helper.h +++ b/src/msvc_helper.h @@ -12,6 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +#ifndef MSVC_HELPER_H_ +#define MSVC_HELPER_H_ + #include std::string EscapeForDepfile(const std::string& path); @@ -30,3 +33,5 @@ struct CLWrapper { void* env_block_; }; + +#endif // MSVC_HELPER_H_ diff --git a/src/msvc_helper_main-win32.cc b/src/msvc_helper_main-win32.cc old mode 100644 new mode 100755 diff --git a/src/msvc_helper_test.cc b/src/msvc_helper_test.cc old mode 100644 new mode 100755 diff --git a/src/ninja.cc b/src/ninja.cc old mode 100644 new mode 100755 index 2b71eb1..d2a7501 --- a/src/ninja.cc +++ b/src/ninja.cc @@ -156,6 +156,10 @@ struct NinjaMain : public BuildLogUser { /// @return true if the manifest was rebuilt. bool RebuildManifest(const char* input_file, string* err, Status* status); + /// For each edge, lookup in build log how long it took last time, + /// and record that in the edge itself. It will be used for ETA predicton. + void ParsePreviousElapsedTimes(); + /// Build the targets listed on the command line. /// @return an exit code. int RunBuild(int argc, char** argv, Status* status); @@ -289,6 +293,19 @@ bool NinjaMain::RebuildManifest(const char* input_file, string* err, return true; } +void NinjaMain::ParsePreviousElapsedTimes() { + for (Edge* edge : state_.edges_) { + for (Node* out : edge->outputs_) { + BuildLog::LogEntry* log_entry = build_log_.LookupByOutput(out->path()); + if (!log_entry) + continue; // Maybe we'll have log entry for next output of this edge? + edge->prev_elapsed_time_millis = + log_entry->end_time - log_entry->start_time; + break; // Onto next edge. + } + } +} + Node* NinjaMain::CollectTarget(const char* cpath, string* err) { string path = cpath; if (path.empty()) { @@ -532,7 +549,7 @@ int NinjaMain::ToolDeps(const Options* options, int argc, char** argv) { if (argc == 0) { for (vector::const_iterator ni = deps_log_.nodes().begin(); ni != deps_log_.nodes().end(); ++ni) { - if (deps_log_.IsDepsEntryLiveFor(*ni)) + if (DepsLog::IsDepsEntryLiveFor(*ni)) nodes.push_back(*ni); } } else { @@ -672,6 +689,7 @@ int NinjaMain::ToolRules(const Options* options, int argc, char* argv[]) { } } printf("\n"); + fflush(stdout); } return 0; } @@ -881,7 +899,10 @@ std::string EvaluateCommandWithRspfile(const Edge* edge, return command; size_t index = command.find(rspfile); - if (index == 0 || index == string::npos || command[index - 1] != '@') + if (index == 0 || index == string::npos || + (command[index - 1] != '@' && + command.find("--option-file=") != index - 14 && + command.find("-f ") != index - 3)) return command; string rspfile_content = edge->GetBinding("rspfile_content"); @@ -891,7 +912,13 @@ std::string EvaluateCommandWithRspfile(const Edge* edge, rspfile_content.replace(newline_index, 1, 1, ' '); ++newline_index; } - command.replace(index - 1, rspfile.length() + 1, rspfile_content); + if (command[index - 1] == '@') { + command.replace(index - 1, rspfile.length() + 1, rspfile_content); + } else if (command.find("-f ") == index - 3) { + command.replace(index - 3, rspfile.length() + 3, rspfile_content); + } else { // --option-file syntax + command.replace(index - 14, rspfile.length() + 14, rspfile_content); + } return command; } @@ -1356,7 +1383,9 @@ int NinjaMain::RunBuild(int argc, char** argv, Status* status) { disk_interface_.AllowStatCache(false); if (builder.AlreadyUpToDate()) { - status->Info("no work to do."); + if (config_.verbosity != BuildConfig::NO_STATUS_UPDATE) { + status->Info("no work to do."); + } return 0; } @@ -1400,7 +1429,7 @@ class DeferGuessParallelism { BuildConfig* config; DeferGuessParallelism(BuildConfig* config) - : config(config), needGuess(true) {} + : needGuess(true), config(config) {} void Refresh() { if (needGuess) { @@ -1588,6 +1617,8 @@ NORETURN void real_main(int argc, char** argv) { exit(1); } + ninja.ParsePreviousElapsedTimes(); + int result = ninja.RunBuild(argc, argv, status); if (g_metrics) ninja.DumpMetrics(); diff --git a/src/ninja_test.cc b/src/ninja_test.cc old mode 100644 new mode 100755 index 6720dec..7616c85 --- a/src/ninja_test.cc +++ b/src/ninja_test.cc @@ -12,151 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include -#include -#include - -#ifdef _WIN32 -#include "getopt.h" -#elif defined(_AIX) -#include "getopt.h" -#include -#else -#include -#endif - -#include "test.h" -#include "line_printer.h" - -using namespace std; - -struct RegisteredTest { - testing::Test* (*factory)(); - const char *name; - bool should_run; -}; -// This can't be a vector because tests call RegisterTest from static -// initializers and the order static initializers run it isn't specified. So -// the vector constructor isn't guaranteed to run before all of the -// RegisterTest() calls. -static RegisteredTest tests[10000]; -testing::Test* g_current_test; -static int ntests; -static LinePrinter printer; - -void RegisterTest(testing::Test* (*factory)(), const char* name) { - tests[ntests].factory = factory; - tests[ntests++].name = name; -} - -namespace { -string StringPrintf(const char* format, ...) { - const int N = 1024; - char buf[N]; - - va_list ap; - va_start(ap, format); - vsnprintf(buf, N, format, ap); - va_end(ap); - - return buf; -} - -void Usage() { - fprintf(stderr, -"usage: ninja_tests [options]\n" -"\n" -"options:\n" -" --gtest_filter=POSITIVE_PATTERN[-NEGATIVE_PATTERN]\n" -" Run tests whose names match the positive but not the negative pattern.\n" -" '*' matches any substring. (gtest's ':', '?' are not implemented).\n"); -} - -bool PatternMatchesString(const char* pattern, const char* str) { - switch (*pattern) { - case '\0': - case '-': return *str == '\0'; - case '*': return (*str != '\0' && PatternMatchesString(pattern, str + 1)) || - PatternMatchesString(pattern + 1, str); - default: return *pattern == *str && - PatternMatchesString(pattern + 1, str + 1); - } -} - -bool TestMatchesFilter(const char* test, const char* filter) { - // Split --gtest_filter at '-' into positive and negative filters. - const char* const dash = strchr(filter, '-'); - const char* pos = dash == filter ? "*" : filter; //Treat '-test1' as '*-test1' - const char* neg = dash ? dash + 1 : ""; - return PatternMatchesString(pos, test) && !PatternMatchesString(neg, test); -} - -bool ReadFlags(int* argc, char*** argv, const char** test_filter) { - enum { OPT_GTEST_FILTER = 1 }; - const option kLongOptions[] = { - { "gtest_filter", required_argument, NULL, OPT_GTEST_FILTER }, - { NULL, 0, NULL, 0 } - }; - - int opt; - while ((opt = getopt_long(*argc, *argv, "h", kLongOptions, NULL)) != -1) { - switch (opt) { - case OPT_GTEST_FILTER: - if (strchr(optarg, '?') == NULL && strchr(optarg, ':') == NULL) { - *test_filter = optarg; - break; - } // else fall through. - default: - Usage(); - return false; - } - } - *argv += optind; - *argc -= optind; - return true; -} - -} // namespace - -bool testing::Test::Check(bool condition, const char* file, int line, - const char* error) { - if (!condition) { - printer.PrintOnNewLine( - StringPrintf("*** Failure in %s:%d\n%s\n", file, line, error)); - failed_ = true; - } - return condition; -} +#include int main(int argc, char **argv) { - int tests_started = 0; - - const char* test_filter = "*"; - if (!ReadFlags(&argc, &argv, &test_filter)) - return 1; - - int nactivetests = 0; - for (int i = 0; i < ntests; i++) - if ((tests[i].should_run = TestMatchesFilter(tests[i].name, test_filter))) - ++nactivetests; - - bool passed = true; - for (int i = 0; i < ntests; i++) { - if (!tests[i].should_run) continue; - - ++tests_started; - testing::Test* test = tests[i].factory(); - printer.Print( - StringPrintf("[%d/%d] %s", tests_started, nactivetests, tests[i].name), - LinePrinter::ELIDE); - test->SetUp(); - test->Run(); - test->TearDown(); - if (test->Failed()) - passed = false; - delete test; - } - - printer.PrintOnNewLine(passed ? "passed\n" : "failed\n"); - return passed ? EXIT_SUCCESS : EXIT_FAILURE; + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); } diff --git a/src/parser.cc b/src/parser.cc old mode 100644 new mode 100755 index 756922d..139a347 --- a/src/parser.cc +++ b/src/parser.cc @@ -20,7 +20,10 @@ using namespace std; bool Parser::Load(const string& filename, string* err, Lexer* parent) { - METRIC_RECORD(".ninja parse"); + // If |parent| is not NULL, metrics collection has been started by a parent + // Parser::Load() in our call stack. Do not start a new one here to avoid + // over-counting parsing times. + METRIC_RECORD_IF(".ninja parse", parent == NULL); string contents; string read_err; if (file_reader_->ReadFile(filename, &contents, &read_err) != @@ -31,13 +34,6 @@ bool Parser::Load(const string& filename, string* err, Lexer* parent) { return false; } - // The lexer needs a nul byte at the end of its input, to know when it's done. - // It takes a StringPiece, and StringPiece's string constructor uses - // string::data(). data()'s return value isn't guaranteed to be - // null-terminated (although in practice - libc++, libstdc++, msvc's stl -- - // it is, and C++11 demands that too), so add an explicit nul byte. - contents.resize(contents.size() + 1); - return Parse(filename, contents, err); } diff --git a/src/parser.h b/src/parser.h old mode 100644 new mode 100755 diff --git a/src/state.cc b/src/state.cc old mode 100644 new mode 100755 index 556b0d8..01bd8a8 --- a/src/state.cc +++ b/src/state.cc @@ -128,16 +128,19 @@ Node* State::SpellcheckNode(const string& path) { void State::AddIn(Edge* edge, StringPiece path, uint64_t slash_bits) { Node* node = GetNode(path, slash_bits); + node->set_generated_by_dep_loader(false); edge->inputs_.push_back(node); node->AddOutEdge(edge); } bool State::AddOut(Edge* edge, StringPiece path, uint64_t slash_bits) { Node* node = GetNode(path, slash_bits); - if (node->in_edge()) + if (node->in_edge()) { return false; + } edge->outputs_.push_back(node); node->set_in_edge(edge); + node->set_generated_by_dep_loader(false); return true; } @@ -145,6 +148,7 @@ void State::AddValidation(Edge* edge, StringPiece path, uint64_t slash_bits) { Node* node = GetNode(path, slash_bits); edge->validations_.push_back(node); node->AddValidationOutEdge(edge); + node->set_generated_by_dep_loader(false); } bool State::AddDefault(StringPiece path, string* err) { diff --git a/src/state.h b/src/state.h old mode 100644 new mode 100755 index 878ac6d..886b78f --- a/src/state.h +++ b/src/state.h @@ -105,6 +105,9 @@ struct State { Node* LookupNode(StringPiece path) const; Node* SpellcheckNode(const std::string& path); + /// Add input / output / validation nodes to a given edge. This also + /// ensures that the generated_by_dep_loader() flag for all these nodes + /// is set to false, to indicate that they come from the input manifest. void AddIn(Edge* edge, StringPiece path, uint64_t slash_bits); bool AddOut(Edge* edge, StringPiece path, uint64_t slash_bits); void AddValidation(Edge* edge, StringPiece path, uint64_t slash_bits); diff --git a/src/state_test.cc b/src/state_test.cc old mode 100644 new mode 100755 diff --git a/src/status.cc b/src/status.cc old mode 100644 new mode 100755 index 88b7781..06f3c20 --- a/src/status.cc +++ b/src/status.cc @@ -14,6 +14,15 @@ #include "status.h" +#ifdef _WIN32 +#include "win32port.h" +#else +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS +#endif +#include +#endif + #include #include @@ -27,11 +36,9 @@ using namespace std; StatusPrinter::StatusPrinter(const BuildConfig& config) - : config_(config), - started_edges_(0), finished_edges_(0), total_edges_(0), running_edges_(0), - time_millis_(0), progress_status_format_(NULL), + : config_(config), started_edges_(0), finished_edges_(0), total_edges_(0), + running_edges_(0), progress_status_format_(NULL), current_rate_(config.parallelism) { - // Don't do anything fancy in verbose mode. if (config_.verbosity != BuildConfig::NORMAL) printer_.set_smart_terminal(false); @@ -41,8 +48,32 @@ StatusPrinter::StatusPrinter(const BuildConfig& config) progress_status_format_ = "[%f/%t] "; } -void StatusPrinter::PlanHasTotalEdges(int total) { - total_edges_ = total; +void StatusPrinter::EdgeAddedToPlan(const Edge* edge) { + ++total_edges_; + + // Do we know how long did this edge take last time? + if (edge->prev_elapsed_time_millis != -1) { + ++eta_predictable_edges_total_; + ++eta_predictable_edges_remaining_; + eta_predictable_cpu_time_total_millis_ += edge->prev_elapsed_time_millis; + eta_predictable_cpu_time_remaining_millis_ += + edge->prev_elapsed_time_millis; + } else + ++eta_unpredictable_edges_remaining_; +} + +void StatusPrinter::EdgeRemovedFromPlan(const Edge* edge) { + --total_edges_; + + // Do we know how long did this edge take last time? + if (edge->prev_elapsed_time_millis != -1) { + --eta_predictable_edges_total_; + --eta_predictable_edges_remaining_; + eta_predictable_cpu_time_total_millis_ -= edge->prev_elapsed_time_millis; + eta_predictable_cpu_time_remaining_millis_ -= + edge->prev_elapsed_time_millis; + } else + --eta_unpredictable_edges_remaining_; } void StatusPrinter::BuildEdgeStarted(const Edge* edge, @@ -58,11 +89,102 @@ void StatusPrinter::BuildEdgeStarted(const Edge* edge, printer_.SetConsoleLocked(true); } -void StatusPrinter::BuildEdgeFinished(Edge* edge, int64_t end_time_millis, - bool success, const string& output) { +void StatusPrinter::RecalculateProgressPrediction() { + time_predicted_percentage_ = 0.0; + + // Sometimes, the previous and actual times may be wildly different. + // For example, the previous build may have been fully recovered from ccache, + // so it was blazing fast, while the new build no longer gets hits from ccache + // for whatever reason, so it actually compiles code, which takes much longer. + // We should detect such cases, and avoid using "wrong" previous times. + + // Note that we will only use the previous times if there are edges with + // previous time knowledge remaining. + bool use_previous_times = eta_predictable_edges_remaining_ && + eta_predictable_cpu_time_remaining_millis_; + + // Iff we have sufficient statistical information for the current run, + // that is, if we have took at least 15 sec AND finished at least 5% of edges, + // we can check whether our performance so far matches the previous one. + if (use_previous_times && total_edges_ && finished_edges_ && + (time_millis_ >= 15 * 1e3) && + (((double)finished_edges_ / total_edges_) >= 0.05)) { + // Over the edges we've just run, how long did they take on average? + double actual_average_cpu_time_millis = + (double)cpu_time_millis_ / finished_edges_; + // What is the previous average, for the edges with such knowledge? + double previous_average_cpu_time_millis = + (double)eta_predictable_cpu_time_total_millis_ / + eta_predictable_edges_total_; + + double ratio = std::max(previous_average_cpu_time_millis, + actual_average_cpu_time_millis) / + std::min(previous_average_cpu_time_millis, + actual_average_cpu_time_millis); + + // Let's say that the average times should differ by less than 10x + use_previous_times = ratio < 10; + } + + int edges_with_known_runtime = finished_edges_; + if (use_previous_times) + edges_with_known_runtime += eta_predictable_edges_remaining_; + if (edges_with_known_runtime == 0) + return; + + int edges_with_unknown_runtime = use_previous_times + ? eta_unpredictable_edges_remaining_ + : (total_edges_ - finished_edges_); + + // Given the time elapsed on the edges we've just run, + // and the runtime of the edges for which we know previous runtime, + // what's the edge's average runtime? + int64_t edges_known_runtime_total_millis = cpu_time_millis_; + if (use_previous_times) + edges_known_runtime_total_millis += + eta_predictable_cpu_time_remaining_millis_; + + double average_cpu_time_millis = + (double)edges_known_runtime_total_millis / edges_with_known_runtime; + + // For the edges for which we do not have the previous runtime, + // let's assume that their average runtime is the same as for the other edges, + // and we therefore can predict their remaining runtime. + double unpredictable_cpu_time_remaining_millis = + average_cpu_time_millis * edges_with_unknown_runtime; + + // And therefore we can predict the remaining and total runtimes. + double total_cpu_time_remaining_millis = + unpredictable_cpu_time_remaining_millis; + if (use_previous_times) + total_cpu_time_remaining_millis += + eta_predictable_cpu_time_remaining_millis_; + double total_cpu_time_millis = + cpu_time_millis_ + total_cpu_time_remaining_millis; + if (total_cpu_time_millis == 0.0) + return; + + // After that we can tell how much work we've completed, in time units. + time_predicted_percentage_ = cpu_time_millis_ / total_cpu_time_millis; +} + +void StatusPrinter::BuildEdgeFinished(Edge* edge, int64_t start_time_millis, + int64_t end_time_millis, bool success, + const string& output) { time_millis_ = end_time_millis; ++finished_edges_; + int64_t elapsed = end_time_millis - start_time_millis; + cpu_time_millis_ += elapsed; + + // Do we know how long did this edge take last time? + if (edge->prev_elapsed_time_millis != -1) { + --eta_predictable_edges_remaining_; + eta_predictable_cpu_time_remaining_millis_ -= + edge->prev_elapsed_time_millis; + } else + --eta_unpredictable_edges_remaining_; + if (edge->use_console()) printer_.SetConsoleLocked(false); @@ -201,16 +323,78 @@ string StatusPrinter::FormatProgressStatus(const char* progress_status_format, out += buf; break; - // Percentage + // Percentage of edges completed case 'p': { - int percent = (100 * finished_edges_) / total_edges_; + int percent = 0; + if (finished_edges_ != 0 && total_edges_ != 0) + percent = (100 * finished_edges_) / total_edges_; snprintf(buf, sizeof(buf), "%3i%%", percent); out += buf; break; } - case 'e': { - snprintf(buf, sizeof(buf), "%.3f", time_millis_ / 1e3); +#define FORMAT_TIME_HMMSS(t) \ + "%" PRId64 ":%02" PRId64 ":%02" PRId64 "", (t) / 3600, ((t) % 3600) / 60, \ + (t) % 60 +#define FORMAT_TIME_MMSS(t) "%02" PRId64 ":%02" PRId64 "", (t) / 60, (t) % 60 + + // Wall time + case 'e': // elapsed, seconds + case 'w': // elapsed, human-readable + case 'E': // ETA, seconds + case 'W': // ETA, human-readable + { + double elapsed_sec = time_millis_ / 1e3; + double eta_sec = -1; // To be printed as "?". + if (time_predicted_percentage_ != 0.0) { + // So, we know that we've spent time_millis_ wall clock, + // and that is time_predicted_percentage_ percent. + // How much time will we need to complete 100%? + double total_wall_time = time_millis_ / time_predicted_percentage_; + // Naturally, that gives us the time remaining. + eta_sec = (total_wall_time - time_millis_) / 1e3; + } + + const bool print_with_hours = + elapsed_sec >= 60 * 60 || eta_sec >= 60 * 60; + + double sec = -1; + switch (*s) { + case 'e': // elapsed, seconds + case 'w': // elapsed, human-readable + sec = elapsed_sec; + break; + case 'E': // ETA, seconds + case 'W': // ETA, human-readable + sec = eta_sec; + break; + } + + if (sec < 0) + snprintf(buf, sizeof(buf), "?"); + else { + switch (*s) { + case 'e': // elapsed, seconds + case 'E': // ETA, seconds + snprintf(buf, sizeof(buf), "%.3f", sec); + break; + case 'w': // elapsed, human-readable + case 'W': // ETA, human-readable + if (print_with_hours) + snprintf(buf, sizeof(buf), FORMAT_TIME_HMMSS((int64_t)sec)); + else + snprintf(buf, sizeof(buf), FORMAT_TIME_MMSS((int64_t)sec)); + break; + } + } + out += buf; + break; + } + + // Percentage of time spent out of the predicted time total + case 'P': { + snprintf(buf, sizeof(buf), "%3i%%", + (int)(100. * time_predicted_percentage_)); out += buf; break; } @@ -232,6 +416,8 @@ void StatusPrinter::PrintStatus(const Edge* edge, int64_t time_millis) { || config_.verbosity == BuildConfig::NO_STATUS_UPDATE) return; + RecalculateProgressPrediction(); + bool force_full_command = config_.verbosity == BuildConfig::VERBOSE; string to_print = edge->GetBinding("description"); diff --git a/src/status.h b/src/status.h old mode 100644 new mode 100755 index e211ba3..a1a8fdd --- a/src/status.h +++ b/src/status.h @@ -24,10 +24,13 @@ /// Abstract interface to object that tracks the status of a build: /// completion fraction, printing updates. struct Status { - virtual void PlanHasTotalEdges(int total) = 0; - virtual void BuildEdgeStarted(const Edge* edge, int64_t start_time_millis) = 0; - virtual void BuildEdgeFinished(Edge* edge, int64_t end_time_millis, - bool success, const std::string& output) = 0; + virtual void EdgeAddedToPlan(const Edge* edge) = 0; + virtual void EdgeRemovedFromPlan(const Edge* edge) = 0; + virtual void BuildEdgeStarted(const Edge* edge, + int64_t start_time_millis) = 0; + virtual void BuildEdgeFinished(Edge* edge, int64_t start_time_millis, + int64_t end_time_millis, bool success, + const std::string& output) = 0; virtual void BuildLoadDyndeps() = 0; virtual void BuildStarted() = 0; virtual void BuildFinished() = 0; @@ -43,10 +46,15 @@ struct Status { /// human-readable strings to stdout struct StatusPrinter : Status { explicit StatusPrinter(const BuildConfig& config); - virtual void PlanHasTotalEdges(int total); + + /// Callbacks for the Plan to notify us about adding/removing Edge's. + virtual void EdgeAddedToPlan(const Edge* edge); + virtual void EdgeRemovedFromPlan(const Edge* edge); + virtual void BuildEdgeStarted(const Edge* edge, int64_t start_time_millis); - virtual void BuildEdgeFinished(Edge* edge, int64_t end_time_millis, - bool success, const std::string& output); + virtual void BuildEdgeFinished(Edge* edge, int64_t start_time_millis, + int64_t end_time_millis, bool success, + const std::string& output); virtual void BuildLoadDyndeps(); virtual void BuildStarted(); virtual void BuildFinished(); @@ -71,7 +79,30 @@ struct StatusPrinter : Status { const BuildConfig& config_; int started_edges_, finished_edges_, total_edges_, running_edges_; - int64_t time_millis_; + + /// How much wall clock elapsed so far? + int64_t time_millis_ = 0; + + /// How much cpu clock elapsed so far? + int64_t cpu_time_millis_ = 0; + + /// What percentage of predicted total time have elapsed already? + double time_predicted_percentage_ = 0.0; + + /// Out of all the edges, for how many do we know previous time? + int eta_predictable_edges_total_ = 0; + /// And how much time did they all take? + int64_t eta_predictable_cpu_time_total_millis_ = 0; + + /// Out of all the non-finished edges, for how many do we know previous time? + int eta_predictable_edges_remaining_ = 0; + /// And how much time will they all take? + int64_t eta_predictable_cpu_time_remaining_millis_ = 0; + + /// For how many edges we don't know the previous run time? + int eta_unpredictable_edges_remaining_ = 0; + + void RecalculateProgressPrediction(); /// Prints progress output. LinePrinter printer_; @@ -92,14 +123,14 @@ struct StatusPrinter : Status { double rate() { return rate_; } - void UpdateRate(int update_hint, int64_t time_millis_) { + void UpdateRate(int update_hint, int64_t time_millis) { if (update_hint == last_update_) return; last_update_ = update_hint; if (times_.size() == N) times_.pop(); - times_.push(time_millis_); + times_.push(time_millis); if (times_.back() != times_.front()) rate_ = times_.size() / ((times_.back() - times_.front()) / 1e3); } diff --git a/src/status_test.cc b/src/status_test.cc old mode 100644 new mode 100755 index 6e42490..411d3ed --- a/src/status_test.cc +++ b/src/status_test.cc @@ -22,8 +22,9 @@ TEST(StatusTest, StatusFormatElapsed) { status.BuildStarted(); // Before any task is done, the elapsed time must be zero. - EXPECT_EQ("[%/e0.000]", - status.FormatProgressStatus("[%%/e%e]", 0)); + EXPECT_EQ("[%/e0.000]", status.FormatProgressStatus("[%%/e%e]", 0)); + // Before any task is done, the elapsed time must be zero. + EXPECT_EQ("[%/e00:00]", status.FormatProgressStatus("[%%/e%w]", 0)); } TEST(StatusTest, StatusFormatReplacePlaceholder) { diff --git a/src/string_piece.h b/src/string_piece.h old mode 100644 new mode 100755 diff --git a/src/string_piece_util.cc b/src/string_piece_util.cc old mode 100644 new mode 100755 diff --git a/src/string_piece_util.h b/src/string_piece_util.h old mode 100644 new mode 100755 diff --git a/src/string_piece_util_test.cc b/src/string_piece_util_test.cc old mode 100644 new mode 100755 diff --git a/src/subprocess-posix.cc b/src/subprocess-posix.cc old mode 100644 new mode 100755 diff --git a/src/subprocess-win32.cc b/src/subprocess-win32.cc old mode 100644 new mode 100755 diff --git a/src/subprocess.h b/src/subprocess.h old mode 100644 new mode 100755 diff --git a/src/subprocess_test.cc b/src/subprocess_test.cc old mode 100644 new mode 100755 diff --git a/src/test.cc b/src/test.cc old mode 100644 new mode 100755 index 11b1c9e..4d063da --- a/src/test.cc +++ b/src/test.cc @@ -235,3 +235,29 @@ void ScopedTempDir::Cleanup() { temp_dir_name_.clear(); } + +ScopedFilePath::ScopedFilePath(ScopedFilePath&& other) noexcept + : path_(std::move(other.path_)), released_(other.released_) { + other.released_ = true; +} + +/// It would be nice to use '= default' here instead but some old compilers +/// such as GCC from Ubuntu 16.06 will not compile it with "noexcept", so just +/// write it manually. +ScopedFilePath& ScopedFilePath::operator=(ScopedFilePath&& other) noexcept { + if (this != &other) { + this->~ScopedFilePath(); + new (this) ScopedFilePath(std::move(other)); + } + return *this; +} + +ScopedFilePath::~ScopedFilePath() { + if (!released_) { + unlink(path_.c_str()); + } +} + +void ScopedFilePath::Release() { + released_ = true; +} diff --git a/src/test.h b/src/test.h old mode 100644 new mode 100755 index 4552c34..a4b9e19 --- a/src/test.h +++ b/src/test.h @@ -15,94 +15,11 @@ #ifndef NINJA_TEST_H_ #define NINJA_TEST_H_ +#include + #include "disk_interface.h" #include "manifest_parser.h" #include "state.h" -#include "util.h" - -// A tiny testing framework inspired by googletest, but much simpler and -// faster to compile. It supports most things commonly used from googltest. The -// most noticeable things missing: EXPECT_* and ASSERT_* don't support -// streaming notes to them with operator<<, and for failing tests the lhs and -// rhs are not printed. That's so that this header does not have to include -// sstream, which slows down building ninja_test almost 20%. -namespace testing { -class Test { - bool failed_; - int assertion_failures_; - public: - Test() : failed_(false), assertion_failures_(0) {} - virtual ~Test() {} - virtual void SetUp() {} - virtual void TearDown() {} - virtual void Run() = 0; - - bool Failed() const { return failed_; } - int AssertionFailures() const { return assertion_failures_; } - void AddAssertionFailure() { assertion_failures_++; } - bool Check(bool condition, const char* file, int line, const char* error); -}; -} - -void RegisterTest(testing::Test* (*)(), const char*); - -extern testing::Test* g_current_test; -#define TEST_F_(x, y, name) \ - struct y : public x { \ - static testing::Test* Create() { return g_current_test = new y; } \ - virtual void Run(); \ - }; \ - struct Register##y { \ - Register##y() { RegisterTest(y::Create, name); } \ - }; \ - Register##y g_register_##y; \ - void y::Run() - -#define TEST_F(x, y) TEST_F_(x, x##y, #x "." #y) -#define TEST(x, y) TEST_F_(testing::Test, x##y, #x "." #y) - -#define EXPECT_EQ(a, b) \ - g_current_test->Check(a == b, __FILE__, __LINE__, #a " == " #b) -#define EXPECT_NE(a, b) \ - g_current_test->Check(a != b, __FILE__, __LINE__, #a " != " #b) -#define EXPECT_GT(a, b) \ - g_current_test->Check(a > b, __FILE__, __LINE__, #a " > " #b) -#define EXPECT_LT(a, b) \ - g_current_test->Check(a < b, __FILE__, __LINE__, #a " < " #b) -#define EXPECT_GE(a, b) \ - g_current_test->Check(a >= b, __FILE__, __LINE__, #a " >= " #b) -#define EXPECT_LE(a, b) \ - g_current_test->Check(a <= b, __FILE__, __LINE__, #a " <= " #b) -#define EXPECT_TRUE(a) \ - g_current_test->Check(static_cast(a), __FILE__, __LINE__, #a) -#define EXPECT_FALSE(a) \ - g_current_test->Check(!static_cast(a), __FILE__, __LINE__, #a) - -#define ASSERT_EQ(a, b) \ - if (!EXPECT_EQ(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_NE(a, b) \ - if (!EXPECT_NE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_GT(a, b) \ - if (!EXPECT_GT(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_LT(a, b) \ - if (!EXPECT_LT(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_GE(a, b) \ - if (!EXPECT_GE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_LE(a, b) \ - if (!EXPECT_LE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_TRUE(a) \ - if (!EXPECT_TRUE(a)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_FALSE(a) \ - if (!EXPECT_FALSE(a)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_NO_FATAL_FAILURE(a) \ - { \ - int fail_count = g_current_test->AssertionFailures(); \ - a; \ - if (fail_count != g_current_test->AssertionFailures()) { \ - g_current_test->AddAssertionFailure(); \ - return; \ - } \ - } // Support utilities for tests. @@ -182,4 +99,31 @@ struct ScopedTempDir { std::string temp_dir_name_; }; +/// A class that records a file path and ensures that it is removed +/// on destruction. This ensures that tests do not keep stale files in the +/// current directory where they run, even in case of assertion failure. +struct ScopedFilePath { + /// Constructor just records the file path. + ScopedFilePath(const std::string& path) : path_(path) {} + ScopedFilePath(const char* path) : path_(path) {} + + /// Allow move operations. + ScopedFilePath(ScopedFilePath&&) noexcept; + ScopedFilePath& operator=(ScopedFilePath&&) noexcept; + + /// Destructor destroys the file, unless Release() was called. + ~ScopedFilePath(); + + /// Release the file, the destructor will not remove the file. + void Release(); + + const char* c_str() const { return path_.c_str(); } + const std::string& path() const { return path_; } + bool released() const { return released_; } + + private: + std::string path_; + bool released_ = false; +}; + #endif // NINJA_TEST_H_ diff --git a/src/timestamp.h b/src/timestamp.h old mode 100644 new mode 100755 diff --git a/src/util.cc b/src/util.cc old mode 100644 new mode 100755 index 483f4a6..dfdf6c8 --- a/src/util.cc +++ b/src/util.cc @@ -143,20 +143,19 @@ void CanonicalizePath(char* path, size_t* len, uint64_t* slash_bits) { return; } - const int kMaxPathComponents = 60; - char* components[kMaxPathComponents]; - int component_count = 0; - char* start = path; char* dst = start; + char* dst_start = dst; const char* src = start; const char* end = start + *len; + const char* src_next; + // For absolute paths, skip the leading directory separator + // as this one should never be removed from the result. if (IsPathSeparator(*src)) { #ifdef _WIN32 - - // network path starts with // - if (*len > 1 && IsPathSeparator(*(src + 1))) { + // Windows network path starts with // + if (src + 2 <= end && IsPathSeparator(src[1])) { src += 2; dst += 2; } else { @@ -167,50 +166,126 @@ void CanonicalizePath(char* path, size_t* len, uint64_t* slash_bits) { ++src; ++dst; #endif + dst_start = dst; + } else { + // For relative paths, skip any leading ../ as these are quite common + // to reference source files in build plans, and doing this here makes + // the loop work below faster in general. + while (src + 3 <= end && src[0] == '.' && src[1] == '.' && + IsPathSeparator(src[2])) { + src += 3; + dst += 3; + } } - while (src < end) { - if (*src == '.') { - if (src + 1 == end || IsPathSeparator(src[1])) { - // '.' component; eliminate. - src += 2; - continue; - } else if (src[1] == '.' && (src + 2 == end || IsPathSeparator(src[2]))) { - // '..' component. Back up if possible. - if (component_count > 0) { - dst = components[component_count - 1]; - src += 3; - --component_count; - } else { - *dst++ = *src++; - *dst++ = *src++; - *dst++ = *src++; + // Loop over all components of the paths _except_ the last one, in + // order to simplify the loop's code and make it faster. + int component_count = 0; + char* dst0 = dst; + for (; src < end; src = src_next) { +#ifndef _WIN32 + // Use memchr() for faster lookups thanks to optimized C library + // implementation. `hyperfine canon_perftest` shows a significant + // difference (e,g, 484ms vs 437ms). + const char* next_sep = + static_cast(::memchr(src, '/', end - src)); + if (!next_sep) { + // This is the last component, will be handled out of the loop. + break; + } +#else + // Need to check for both '/' and '\\' so do not use memchr(). + // Cannot use strpbrk() because end[0] can be \0 or something else! + const char* next_sep = src; + while (next_sep != end && !IsPathSeparator(*next_sep)) + ++next_sep; + if (next_sep == end) { + // This is the last component, will be handled out of the loop. + break; + } +#endif + // Position for next loop iteration. + src_next = next_sep + 1; + // Length of the component, excluding trailing directory. + size_t component_len = next_sep - src; + + if (component_len <= 2) { + if (component_len == 0) { + continue; // Ignore empty component, e.g. 'foo//bar' -> 'foo/bar'. + } + if (src[0] == '.') { + if (component_len == 1) { + continue; // Ignore '.' component, e.g. './foo' -> 'foo'. + } else if (src[1] == '.') { + // Process the '..' component if found. Back up if possible. + if (component_count > 0) { + // Move back to start of previous component. + --component_count; + while (--dst > dst0 && !IsPathSeparator(dst[-1])) { + // nothing to do here, decrement happens before condition check. + } + } else { + dst[0] = '.'; + dst[1] = '.'; + dst[2] = src[2]; + dst += 3; + } + continue; } - continue; } } - - if (IsPathSeparator(*src)) { - src++; - continue; - } - - if (component_count == kMaxPathComponents) - Fatal("path has too many components : %s", path); - components[component_count] = dst; ++component_count; - while (src != end && !IsPathSeparator(*src)) - *dst++ = *src++; - *dst++ = *src++; // Copy '/' or final \0 character as well. + // Copy or skip component, including trailing directory separator. + if (dst != src) { + ::memmove(dst, src, src_next - src); + } + dst += src_next - src; } + // Handling the last component that does not have a trailing separator. + // The logic here is _slightly_ different since there is no trailing + // directory separator. + size_t component_len = end - src; + do { + if (component_len == 0) + break; // Ignore empty component (e.g. 'foo//' -> 'foo/') + if (src[0] == '.') { + if (component_len == 1) + break; // Ignore trailing '.' (e.g. 'foo/.' -> 'foo/') + if (component_len == 2 && src[1] == '.') { + // Handle '..'. Back up if possible. + if (component_count > 0) { + while (--dst > dst0 && !IsPathSeparator(dst[-1])) { + // nothing to do here, decrement happens before condition check. + } + } else { + dst[0] = '.'; + dst[1] = '.'; + dst += 2; + // No separator to add here. + } + break; + } + } + // Skip or copy last component, no trailing separator. + if (dst != src) { + ::memmove(dst, src, component_len); + } + dst += component_len; + } while (0); + + // Remove trailing path separator if any, but keep the initial + // path separator(s) if there was one (or two on Windows). + if (dst > dst_start && IsPathSeparator(dst[-1])) + dst--; + if (dst == start) { + // Handle special cases like "aa/.." -> "." *dst++ = '.'; - *dst++ = '\0'; } - *len = dst - start - 1; + *len = dst - start; // dst points after the trailing char here. #ifdef _WIN32 uint64_t bits = 0; uint64_t bits_mask = 1; @@ -369,8 +444,13 @@ int ReadFile(const string& path, string* contents, string* err) { return -errno; } +#ifdef __USE_LARGEFILE64 + struct stat64 st; + if (fstat64(fileno(f), &st) < 0) { +#else struct stat st; if (fstat(fileno(f), &st) < 0) { +#endif err->assign(strerror(errno)); fclose(f); return -errno; @@ -456,10 +536,17 @@ string GetLastErrorString() { FORMAT_MESSAGE_IGNORE_INSERTS, NULL, err, - MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + MAKELANGID(LANG_ENGLISH, SUBLANG_DEFAULT), (char*)&msg_buf, 0, NULL); + + if (msg_buf == nullptr) { + char fallback_msg[128] = {0}; + snprintf(fallback_msg, sizeof(fallback_msg), "GetLastError() = %d", err); + return fallback_msg; + } + string msg = msg_buf; LocalFree(msg_buf); return msg; @@ -647,6 +734,8 @@ int ParseCPUFromCGroup() { readCount(cpu->second + "/cpu.cfs_period_us"); if (!period.second) return -1; + if (period.first == 0) + return -1; return quota.first / period.first; } #endif @@ -714,7 +803,7 @@ int GetProcessorCount() { return CPU_COUNT(&mask); } #elif defined(CPU_COUNT) - cpu_set_t set; + cpu_set_t set; if (sched_getaffinity(getpid(), sizeof(set), &set) == 0) { schedCount = CPU_COUNT(&set); } diff --git a/src/util.h b/src/util.h old mode 100644 new mode 100755 diff --git a/src/util_test.cc b/src/util_test.cc old mode 100644 new mode 100755 index d58b170..d76954c --- a/src/util_test.cc +++ b/src/util_test.cc @@ -89,13 +89,57 @@ TEST(CanonicalizePath, PathSamples) { EXPECT_EQ("/foo", path); #endif + path = ".."; + CanonicalizePath(&path); + EXPECT_EQ("..", path); + + path = "../"; + CanonicalizePath(&path); + EXPECT_EQ("..", path); + + path = "../foo"; + CanonicalizePath(&path); + EXPECT_EQ("../foo", path); + + path = "../foo/"; + CanonicalizePath(&path); + EXPECT_EQ("../foo", path); + + path = "../.."; + CanonicalizePath(&path); + EXPECT_EQ("../..", path); + + path = "../../"; + CanonicalizePath(&path); + EXPECT_EQ("../..", path); + + path = "./../"; + CanonicalizePath(&path); + EXPECT_EQ("..", path); + + path = "/.."; + CanonicalizePath(&path); + EXPECT_EQ("/..", path); + + path = "/../"; + CanonicalizePath(&path); + EXPECT_EQ("/..", path); + + path = "/../.."; + CanonicalizePath(&path); + EXPECT_EQ("/../..", path); + + path = "/../../"; + CanonicalizePath(&path); + EXPECT_EQ("/../..", path); + path = "/"; CanonicalizePath(&path); - EXPECT_EQ("", path); + EXPECT_EQ("/", path); path = "/foo/.."; CanonicalizePath(&path); - EXPECT_EQ("", path); + EXPECT_EQ("/", path); path = "."; CanonicalizePath(&path); @@ -108,6 +152,10 @@ TEST(CanonicalizePath, PathSamples) { path = "foo/.."; CanonicalizePath(&path); EXPECT_EQ(".", path); + + path = "foo/.._bar"; + CanonicalizePath(&path); + EXPECT_EQ("foo/.._bar", path); } #ifdef _WIN32 @@ -171,7 +219,7 @@ TEST(CanonicalizePath, PathSamplesWindows) { path = "\\"; CanonicalizePath(&path); - EXPECT_EQ("", path); + EXPECT_EQ("/", path); } TEST(CanonicalizePath, SlashTracking) { @@ -321,8 +369,53 @@ TEST(CanonicalizePath, TooManyComponents) { EXPECT_EQ(58, std::count(path.begin(), path.end(), '\\')); CanonicalizePath(&path, &slash_bits); EXPECT_EQ(slash_bits, 0x3ffffffffffffff); + + // More than 60 components is now completely ok too. + path = + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" + "a\\a\\a\\a\\a\\a\\a\\a\\a\\x\\y.h"; + EXPECT_EQ(218, std::count(path.begin(), path.end(), '\\')); + CanonicalizePath(&path, &slash_bits); + EXPECT_EQ(slash_bits, 0xffffffffffffffff); } -#endif +#else // !_WIN32 +TEST(CanonicalizePath, TooManyComponents) { + string path; + uint64_t slash_bits; + + // More than 60 components is now completely ok. + path = + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" + "a/a/a/a/a/a/a/a/a/x/y.h"; + EXPECT_EQ(218, std::count(path.begin(), path.end(), '/')); + CanonicalizePath(&path, &slash_bits); + EXPECT_EQ(slash_bits, 0x0); +} +#endif // !_WIN32 TEST(CanonicalizePath, UpDir) { string path, err; @@ -353,11 +446,13 @@ TEST(CanonicalizePath, NotNullTerminated) { EXPECT_EQ(strlen("foo"), len); EXPECT_EQ("foo/. bar/.", string(path)); + // Verify that foo/..file gets canonicalized to 'file' without + // touching the rest of the string. path = "foo/../file bar/."; len = strlen("foo/../file"); CanonicalizePath(&path[0], &len, &unused); EXPECT_EQ(strlen("file"), len); - EXPECT_EQ("file ./file bar/.", string(path)); + EXPECT_EQ("file../file bar/.", string(path)); } TEST(PathEscaping, TortureTest) { diff --git a/src/version.cc b/src/version.cc old mode 100644 new mode 100755 index 34167b6..ca54c37 --- a/src/version.cc +++ b/src/version.cc @@ -20,7 +20,7 @@ using namespace std; -const char* kNinjaVersion = "1.11.0"; +const char* kNinjaVersion = "1.12.0"; void ParseVersion(const string& version, int* major, int* minor) { size_t end = version.find('.'); diff --git a/src/version.h b/src/version.h old mode 100644 new mode 100755 diff --git a/src/win32port.h b/src/win32port.h old mode 100644 new mode 100755 diff --git a/windows/ninja.manifest b/windows/ninja.manifest index dab929e..47949dd 100644 --- a/windows/ninja.manifest +++ b/windows/ninja.manifest @@ -3,6 +3,7 @@ UTF-8 + true