mirror of
https://gitee.com/openharmony/third_party_ninja
synced 2024-11-23 07:20:07 +00:00
Signed-off-by: liangxinyan <liangxinyan2@huawei.com>
This commit is contained in:
parent
cbcf2bd78e
commit
e9e12353f0
7
.github/dependabot.yml
vendored
Normal file
7
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
177
.github/workflows/linux.yml
vendored
177
.github/workflows/linux.yml
vendored
@ -13,43 +13,59 @@ jobs:
|
||||
image: centos:7
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: codespell-project/actions-codespell@master
|
||||
with:
|
||||
ignore_words_list: fo,wee
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
curl -L -O https://github.com/Kitware/CMake/releases/download/v3.16.4/cmake-3.16.4-Linux-x86_64.sh
|
||||
chmod +x cmake-3.16.4-Linux-x86_64.sh
|
||||
./cmake-3.16.4-Linux-x86_64.sh --skip-license --prefix=/usr/local
|
||||
curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-10.el7.x86_64.rpm
|
||||
curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-10.el7.x86_64.rpm
|
||||
rpm -U --quiet p7zip-16.02-10.el7.x86_64.rpm
|
||||
rpm -U --quiet p7zip-plugins-16.02-10.el7.x86_64.rpm
|
||||
yum install -y make gcc-c++
|
||||
curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-20.el7.x86_64.rpm
|
||||
curl -L -O https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-20.el7.x86_64.rpm
|
||||
rpm -U --quiet p7zip-16.02-20.el7.x86_64.rpm
|
||||
rpm -U --quiet p7zip-plugins-16.02-20.el7.x86_64.rpm
|
||||
yum install -y make gcc-c++ libasan clang-analyzer
|
||||
|
||||
- name: Build ninja
|
||||
- name: Build debug ninja
|
||||
shell: bash
|
||||
env:
|
||||
CFLAGS: -fstack-protector-all -fsanitize=address
|
||||
CXXFLAGS: -fstack-protector-all -fsanitize=address
|
||||
run: |
|
||||
scan-build -o scanlogs cmake -DCMAKE_BUILD_TYPE=Debug -B debug-build
|
||||
scan-build -o scanlogs cmake --build debug-build --parallel --config Debug
|
||||
|
||||
- name: Test debug ninja
|
||||
run: ./ninja_test
|
||||
working-directory: debug-build
|
||||
|
||||
- name: Build release ninja
|
||||
shell: bash
|
||||
run: |
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -B build
|
||||
cmake --build build --parallel --config Release
|
||||
strip build/ninja
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -B release-build
|
||||
cmake --build release-build --parallel --config Release
|
||||
strip release-build/ninja
|
||||
|
||||
- name: Test ninja
|
||||
- name: Test release ninja
|
||||
run: ./ninja_test
|
||||
working-directory: build
|
||||
working-directory: release-build
|
||||
|
||||
- name: Create ninja archive
|
||||
run: |
|
||||
mkdir artifact
|
||||
7z a artifact/ninja-linux.zip ./build/ninja
|
||||
7z a artifact/ninja-linux.zip ./release-build/ninja
|
||||
|
||||
# Upload ninja binary archive as an artifact
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ninja-binary-archives
|
||||
path: artifact
|
||||
|
||||
- name: Upload release asset
|
||||
if: github.event.action == 'published'
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@ -57,3 +73,136 @@ jobs:
|
||||
asset_path: ./artifact/ninja-linux.zip
|
||||
asset_name: ninja-linux.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
test:
|
||||
runs-on: [ubuntu-latest]
|
||||
container:
|
||||
image: ubuntu:20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
apt update
|
||||
apt install -y python3-pytest ninja-build clang-tidy python3-pip clang libgtest-dev
|
||||
pip3 install cmake==3.17.*
|
||||
- name: Configure (GCC)
|
||||
run: cmake -Bbuild-gcc -DCMAKE_BUILD_TYPE=Debug -G'Ninja Multi-Config'
|
||||
|
||||
- name: Build (GCC, Debug)
|
||||
run: cmake --build build-gcc --config Debug
|
||||
- name: Unit tests (GCC, Debug)
|
||||
run: ./build-gcc/Debug/ninja_test
|
||||
- name: Python tests (GCC, Debug)
|
||||
run: pytest-3 --color=yes ../..
|
||||
working-directory: build-gcc/Debug
|
||||
|
||||
- name: Build (GCC, Release)
|
||||
run: cmake --build build-gcc --config Release
|
||||
- name: Unit tests (GCC, Release)
|
||||
run: ./build-gcc/Release/ninja_test
|
||||
- name: Python tests (GCC, Release)
|
||||
run: pytest-3 --color=yes ../..
|
||||
working-directory: build-gcc/Release
|
||||
|
||||
- name: Configure (Clang)
|
||||
run: CC=clang CXX=clang++ cmake -Bbuild-clang -DCMAKE_BUILD_TYPE=Debug -G'Ninja Multi-Config' -DCMAKE_EXPORT_COMPILE_COMMANDS=1
|
||||
|
||||
- name: Build (Clang, Debug)
|
||||
run: cmake --build build-clang --config Debug
|
||||
- name: Unit tests (Clang, Debug)
|
||||
run: ./build-clang/Debug/ninja_test
|
||||
- name: Python tests (Clang, Debug)
|
||||
run: pytest-3 --color=yes ../..
|
||||
working-directory: build-clang/Debug
|
||||
|
||||
- name: Build (Clang, Release)
|
||||
run: cmake --build build-clang --config Release
|
||||
- name: Unit tests (Clang, Release)
|
||||
run: ./build-clang/Release/ninja_test
|
||||
- name: Python tests (Clang, Release)
|
||||
run: pytest-3 --color=yes ../..
|
||||
working-directory: build-clang/Release
|
||||
|
||||
- name: clang-tidy
|
||||
run: /usr/lib/llvm-10/share/clang/run-clang-tidy.py -header-filter=src
|
||||
working-directory: build-clang
|
||||
|
||||
build-with-python:
|
||||
runs-on: [ubuntu-latest]
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ['ubuntu:14.04', 'ubuntu:16.04', 'ubuntu:18.04']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
apt update
|
||||
apt install -y g++ python3
|
||||
- name: ${{ matrix.image }}
|
||||
run: |
|
||||
python3 configure.py --bootstrap
|
||||
./ninja all
|
||||
python3 misc/ninja_syntax_test.py
|
||||
./misc/output_test.py
|
||||
|
||||
build-aarch64:
|
||||
name: Build Linux ARM64
|
||||
runs-on: [ubuntu-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build
|
||||
uses: uraimo/run-on-arch-action@v2
|
||||
with:
|
||||
arch: aarch64
|
||||
distro: ubuntu18.04
|
||||
githubToken: ${{ github.token }}
|
||||
dockerRunArgs: |
|
||||
--volume "${PWD}:/ninja"
|
||||
install: |
|
||||
apt-get update -q -y
|
||||
apt-get install -q -y make gcc g++ libasan5 clang-tools curl p7zip-full file
|
||||
run: |
|
||||
set -x
|
||||
cd /ninja
|
||||
|
||||
# INSTALL CMAKE
|
||||
CMAKE_VERSION=3.23.4
|
||||
curl -L -O https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-Linux-aarch64.sh
|
||||
chmod +x cmake-${CMAKE_VERSION}-Linux-aarch64.sh
|
||||
./cmake-${CMAKE_VERSION}-Linux-aarch64.sh --skip-license --prefix=/usr/local
|
||||
|
||||
# BUILD
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -B release-build
|
||||
cmake --build release-build --parallel --config Release
|
||||
strip release-build/ninja
|
||||
file release-build/ninja
|
||||
|
||||
# TEST
|
||||
pushd release-build
|
||||
./ninja_test
|
||||
popd
|
||||
|
||||
# CREATE ARCHIVE
|
||||
mkdir artifact
|
||||
7z a artifact/ninja-linux-aarch64.zip ./release-build/ninja
|
||||
|
||||
# Upload ninja binary archive as an artifact
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ninja-binary-archives
|
||||
path: artifact
|
||||
|
||||
- name: Upload release asset
|
||||
if: github.event.action == 'published'
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }}
|
||||
asset_path: ./artifact/ninja-linux-aarch64.zip
|
||||
asset_name: ninja-linux-aarch64.zip
|
||||
asset_content_type: application/zip
|
||||
|
16
.github/workflows/macos.yml
vendored
16
.github/workflows/macos.yml
vendored
@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macOS-latest
|
||||
runs-on: macos-12
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -19,31 +19,31 @@ jobs:
|
||||
- name: Build ninja
|
||||
shell: bash
|
||||
env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.12
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.15
|
||||
run: |
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -B build
|
||||
cmake --build build --parallel --config Release
|
||||
cmake -Bbuild -GXcode '-DCMAKE_OSX_ARCHITECTURES=arm64;x86_64'
|
||||
cmake --build build --config Release
|
||||
|
||||
- name: Test ninja
|
||||
run: ctest -vv
|
||||
run: ctest -C Release -vv
|
||||
working-directory: build
|
||||
|
||||
- name: Create ninja archive
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir artifact
|
||||
7z a artifact/ninja-mac.zip ./build/ninja
|
||||
7z a artifact/ninja-mac.zip ./build/Release/ninja
|
||||
|
||||
# Upload ninja binary archive as an artifact
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ninja-binary-archives
|
||||
path: artifact
|
||||
|
||||
- name: Upload release asset
|
||||
if: github.event.action == 'published'
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
30
.github/workflows/windows.yml
vendored
30
.github/workflows/windows.yml
vendored
@ -10,6 +10,15 @@ jobs:
|
||||
build:
|
||||
runs-on: windows-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: 'x64'
|
||||
suffix: ''
|
||||
- arch: 'arm64'
|
||||
suffix: 'arm64'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@ -19,10 +28,17 @@ jobs:
|
||||
- name: Build ninja
|
||||
shell: bash
|
||||
run: |
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -B build
|
||||
cmake -Bbuild -A ${{ matrix.arch }}
|
||||
cmake --build build --parallel --config Debug
|
||||
cmake --build build --parallel --config Release
|
||||
|
||||
- name: Test ninja
|
||||
- name: Test ninja (Debug)
|
||||
if: matrix.arch != 'arm64'
|
||||
run: .\ninja_test.exe
|
||||
working-directory: build/Debug
|
||||
|
||||
- name: Test ninja (Release)
|
||||
if: matrix.arch != 'arm64'
|
||||
run: .\ninja_test.exe
|
||||
working-directory: build/Release
|
||||
|
||||
@ -30,22 +46,22 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir artifact
|
||||
7z a artifact/ninja-win.zip ./build/Release/ninja.exe
|
||||
7z a artifact/ninja-win${{ matrix.suffix }}.zip ./build/Release/ninja.exe
|
||||
|
||||
# Upload ninja binary archive as an artifact
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ninja-binary-archives
|
||||
path: artifact
|
||||
|
||||
- name: Upload release asset
|
||||
if: github.event.action == 'published'
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }}
|
||||
asset_path: ./artifact/ninja-win.zip
|
||||
asset_name: ninja-win.zip
|
||||
asset_path: ./artifact/ninja-win${{ matrix.suffix }}.zip
|
||||
asset_name: ninja-win${{ matrix.suffix }}.zip
|
||||
asset_content_type: application/zip
|
||||
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -38,3 +38,12 @@
|
||||
|
||||
# Qt Creator project files
|
||||
/CMakeLists.txt.user
|
||||
|
||||
# clangd
|
||||
/.clangd/
|
||||
/compile_commands.json
|
||||
/.cache/
|
||||
|
||||
# Visual Studio files
|
||||
/.vs/
|
||||
/out/
|
||||
|
110
CMakeLists.txt
110
CMakeLists.txt
@ -3,7 +3,10 @@ cmake_minimum_required(VERSION 3.15)
|
||||
include(CheckSymbolExists)
|
||||
include(CheckIPOSupported)
|
||||
|
||||
project(ninja)
|
||||
option(NINJA_BUILD_BINARY "Build ninja binary" ON)
|
||||
option(NINJA_FORCE_PSELECT "Use pselect() even on platforms that provide ppoll()" OFF)
|
||||
|
||||
project(ninja CXX)
|
||||
|
||||
# --- optional link-time optimization
|
||||
check_ipo_supported(RESULT lto_supported OUTPUT error)
|
||||
@ -19,6 +22,8 @@ endif()
|
||||
if(MSVC)
|
||||
set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
string(REPLACE "/GR" "" CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
|
||||
# Note that these settings are separately specified in configure.py, and
|
||||
# these lists should be kept in sync.
|
||||
add_compile_options(/W4 /wd4100 /wd4267 /wd4706 /wd4702 /wd4244 /GR- /Zc:__cplusplus)
|
||||
add_compile_definitions(_CRT_SECURE_NO_WARNINGS)
|
||||
else()
|
||||
@ -31,11 +36,34 @@ else()
|
||||
if(flag_color_diag)
|
||||
add_compile_options(-fdiagnostics-color)
|
||||
endif()
|
||||
|
||||
if(NOT NINJA_FORCE_PSELECT)
|
||||
# Check whether ppoll() is usable on the target platform.
|
||||
# Set -DUSE_PPOLL=1 if this is the case.
|
||||
#
|
||||
# NOTE: Use check_cxx_symbol_exists() instead of check_symbol_exists()
|
||||
# because on Linux, <poll.h> only exposes the symbol when _GNU_SOURCE
|
||||
# is defined.
|
||||
#
|
||||
# Both g++ and clang++ define the symbol by default, because the C++
|
||||
# standard library headers require it, but *not* gcc and clang, which
|
||||
# are used by check_symbol_exists().
|
||||
include(CheckSymbolExists)
|
||||
check_symbol_exists(ppoll poll.h HAVE_PPOLL)
|
||||
if(HAVE_PPOLL)
|
||||
add_compile_definitions(USE_PPOLL=1)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- optional re2c
|
||||
set(RE2C_MAJOR_VERSION 0)
|
||||
find_program(RE2C re2c)
|
||||
if(RE2C)
|
||||
execute_process(COMMAND "${RE2C}" --vernum OUTPUT_VARIABLE RE2C_RAW_VERSION)
|
||||
math(EXPR RE2C_MAJOR_VERSION "${RE2C_RAW_VERSION} / 10000")
|
||||
endif()
|
||||
if(${RE2C_MAJOR_VERSION} GREATER 1)
|
||||
# the depfile parser and ninja lexers are generated using re2c.
|
||||
function(re2c IN OUT)
|
||||
add_custom_command(DEPENDS ${IN} OUTPUT ${OUT}
|
||||
@ -46,7 +74,7 @@ if(RE2C)
|
||||
re2c(${PROJECT_SOURCE_DIR}/src/lexer.in.cc ${PROJECT_BINARY_DIR}/lexer.cc)
|
||||
add_library(libninja-re2c OBJECT ${PROJECT_BINARY_DIR}/depfile_parser.cc ${PROJECT_BINARY_DIR}/lexer.cc)
|
||||
else()
|
||||
message(WARNING "re2c was not found; changes to src/*.in.cc will not affect your build.")
|
||||
message(WARNING "re2c 2 or later was not found; changes to src/*.in.cc will not affect your build.")
|
||||
add_library(libninja-re2c OBJECT src/depfile_parser.cc src/lexer.cc)
|
||||
endif()
|
||||
target_include_directories(libninja-re2c PRIVATE src)
|
||||
@ -86,6 +114,8 @@ function(check_platform_supports_browse_mode RESULT)
|
||||
|
||||
endfunction()
|
||||
|
||||
set(NINJA_PYTHON "python" CACHE STRING "Python interpreter to use for the browse tool")
|
||||
|
||||
check_platform_supports_browse_mode(platform_supports_ninja_browse)
|
||||
|
||||
# Core source files all build into ninja library.
|
||||
@ -124,10 +154,18 @@ if(WIN32)
|
||||
src/getopt.c
|
||||
src/minidump-win32.cc
|
||||
)
|
||||
# Build getopt.c, which can be compiled as either C or C++, as C++
|
||||
# so that build environments which lack a C compiler, but have a C++
|
||||
# compiler may build ninja.
|
||||
set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX)
|
||||
else()
|
||||
target_sources(libninja PRIVATE src/subprocess-posix.cc)
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX")
|
||||
target_sources(libninja PRIVATE src/getopt.c)
|
||||
# Build getopt.c, which can be compiled as either C or C++, as C++
|
||||
# so that build environments which lack a C compiler, but have a C++
|
||||
# compiler may build ninja.
|
||||
set_source_files_properties(src/getopt.c PROPERTIES LANGUAGE CXX)
|
||||
endif()
|
||||
|
||||
# Needed for perfstat_cpu_total
|
||||
@ -136,6 +174,8 @@ else()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_compile_features(libninja PUBLIC cxx_std_11)
|
||||
|
||||
#Fixes GetActiveProcessorCount on MinGW
|
||||
if(MINGW)
|
||||
target_compile_definitions(libninja PRIVATE _WIN32_WINNT=0x0601 __USE_MINGW_ANSI_STDIO=1)
|
||||
@ -148,11 +188,13 @@ if(CMAKE_SYSTEM_NAME STREQUAL "OS400" OR CMAKE_SYSTEM_NAME STREQUAL "AIX")
|
||||
endif()
|
||||
|
||||
# Main executable is library plus main() function.
|
||||
add_executable(ninja src/ninja.cc)
|
||||
target_link_libraries(ninja PRIVATE libninja libninja-re2c)
|
||||
if(NINJA_BUILD_BINARY)
|
||||
add_executable(ninja src/ninja.cc)
|
||||
target_link_libraries(ninja PRIVATE libninja libninja-re2c)
|
||||
|
||||
if(WIN32)
|
||||
target_sources(ninja PRIVATE windows/ninja.manifest)
|
||||
if(WIN32)
|
||||
target_sources(ninja PRIVATE windows/ninja.manifest)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Adds browse mode into the ninja binary if it's supported by the host platform.
|
||||
@ -171,18 +213,58 @@ if(platform_supports_ninja_browse)
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE)
|
||||
target_sources(ninja PRIVATE src/browse.cc)
|
||||
if(NINJA_BUILD_BINARY)
|
||||
target_compile_definitions(ninja PRIVATE NINJA_HAVE_BROWSE)
|
||||
target_sources(ninja PRIVATE src/browse.cc)
|
||||
endif()
|
||||
set_source_files_properties(src/browse.cc
|
||||
PROPERTIES
|
||||
OBJECT_DEPENDS "${PROJECT_BINARY_DIR}/build/browse_py.h"
|
||||
INCLUDE_DIRECTORIES "${PROJECT_BINARY_DIR}"
|
||||
COMPILE_DEFINITIONS NINJA_PYTHON="python"
|
||||
COMPILE_DEFINITIONS NINJA_PYTHON="${NINJA_PYTHON}"
|
||||
)
|
||||
endif()
|
||||
|
||||
include(CTest)
|
||||
if(BUILD_TESTING)
|
||||
find_package(GTest)
|
||||
if(NOT GTest_FOUND)
|
||||
include(FetchContent)
|
||||
FetchContent_Declare(
|
||||
googletest
|
||||
URL https://github.com/google/googletest/archive/release-1.10.0.tar.gz
|
||||
URL_HASH SHA1=9c89be7df9c5e8cb0bc20b3c4b39bf7e82686770
|
||||
)
|
||||
FetchContent_MakeAvailable(googletest)
|
||||
|
||||
# Before googletest-1.11.0, the CMake files provided by the source archive
|
||||
# did not define the GTest::gtest target, only the gtest one, so define
|
||||
# an alias when needed to ensure the rest of this file works with all
|
||||
# GoogleTest releases.
|
||||
#
|
||||
# Note that surprisingly, this is not needed when using GTEST_ROOT to
|
||||
# point to a local installation, because this one contains CMake-generated
|
||||
# files that contain the right target definition, and which will be
|
||||
# picked up by the find_package(GTest) file above.
|
||||
#
|
||||
# This comment and the four lines below can be removed once Ninja only
|
||||
# depends on release-1.11.0 or above.
|
||||
if (NOT TARGET GTest::gtest)
|
||||
message(STATUS "Defining GTest::gtest alias to work-around bug in older release.")
|
||||
add_library(GTest::gtest ALIAS gtest)
|
||||
|
||||
# NOTE: gtest uninit some variables, gcc >= 1.11.3 may cause error on compile.
|
||||
# Remove this comment and six lines below, once ninja deps gtest-1.11.0 or above.
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "1.11.3")
|
||||
check_cxx_compiler_flag(-Wmaybe-uninitialized flag_maybe_uninit)
|
||||
if (flag_maybe_uninit)
|
||||
target_compile_options(gtest PRIVATE -Wno-maybe-uninitialized)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Tests all build into ninja_test executable.
|
||||
add_executable(ninja_test
|
||||
src/build_log_test.cc
|
||||
@ -207,9 +289,11 @@ if(BUILD_TESTING)
|
||||
src/util_test.cc
|
||||
)
|
||||
if(WIN32)
|
||||
target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc)
|
||||
target_sources(ninja_test PRIVATE src/includes_normalize_test.cc src/msvc_helper_test.cc
|
||||
windows/ninja.manifest)
|
||||
endif()
|
||||
target_link_libraries(ninja_test PRIVATE libninja libninja-re2c)
|
||||
find_package(Threads REQUIRED)
|
||||
target_link_libraries(ninja_test PRIVATE libninja libninja-re2c GTest::gtest Threads::Threads)
|
||||
|
||||
foreach(perftest
|
||||
build_log_perftest
|
||||
@ -232,4 +316,6 @@ if(BUILD_TESTING)
|
||||
add_test(NAME NinjaTest COMMAND ninja_test)
|
||||
endif()
|
||||
|
||||
install(TARGETS ninja)
|
||||
if(NINJA_BUILD_BINARY)
|
||||
install(TARGETS ninja)
|
||||
endif()
|
||||
|
@ -14,14 +14,10 @@ Generally it's the
|
||||
[Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) with
|
||||
a few additions:
|
||||
|
||||
* Any code merged into the Ninja codebase which will be part of the main
|
||||
executable must compile as C++03. You may use C++11 features in a test or an
|
||||
unimportant tool if you guard your code with `#if __cplusplus >= 201103L`.
|
||||
* We have used `using namespace std;` a lot in the past. For new contributions,
|
||||
please try to avoid relying on it and instead whenever possible use `std::`.
|
||||
However, please do not change existing code simply to add `std::` unless your
|
||||
contribution already needs to change that line of code anyway.
|
||||
* All source files should have the Google Inc. license header.
|
||||
* Use `///` for [Doxygen](http://www.doxygen.nl/) (use `\a` to refer to
|
||||
arguments).
|
||||
* It's not necessary to document each argument, especially when they're
|
||||
|
38
README.md
38
README.md
@ -7,7 +7,7 @@ See [the manual](https://ninja-build.org/manual.html) or
|
||||
`doc/manual.asciidoc` included in the distribution for background
|
||||
and more details.
|
||||
|
||||
Binaries for Linux, Mac, and Windows are available at
|
||||
Binaries for Linux, Mac and Windows are available on
|
||||
[GitHub](https://github.com/ninja-build/ninja/releases).
|
||||
Run `./ninja -h` for Ninja help.
|
||||
|
||||
@ -49,3 +49,39 @@ To run the unit tests:
|
||||
```
|
||||
./build-cmake/ninja_test
|
||||
```
|
||||
|
||||
## Generating documentation
|
||||
|
||||
### Ninja Manual
|
||||
|
||||
You must have `asciidoc` and `xsltproc` in your PATH, then do:
|
||||
|
||||
```
|
||||
./configure.py
|
||||
ninja manual doc/manual.pdf
|
||||
```
|
||||
|
||||
Which will generate `doc/manual.html`.
|
||||
|
||||
To generate the PDF version of the manual, you must have `dblatext` in your PATH then do:
|
||||
|
||||
```
|
||||
./configure.py # only if you didn't do it previously.
|
||||
ninja doc/manual.pdf
|
||||
```
|
||||
|
||||
Which will generate `doc/manual.pdf`.
|
||||
|
||||
### Doxygen documentation
|
||||
|
||||
If you have `doxygen` installed, you can build documentation extracted from C++
|
||||
declarations and comments to help you navigate the code. Note that Ninja is a standalone
|
||||
executable, not a library, so there is no public API, all details exposed here are
|
||||
internal.
|
||||
|
||||
```
|
||||
./configure.py # if needed
|
||||
ninja doxygen
|
||||
```
|
||||
|
||||
Then open `doc/doxygen/html/index.html` in a browser to look at it.
|
||||
|
41
RELEASING.md
Normal file
41
RELEASING.md
Normal file
@ -0,0 +1,41 @@
|
||||
Notes to myself on all the steps to make for a Ninja release.
|
||||
|
||||
### Push new release branch:
|
||||
1. Run afl-fuzz for a day or so and run ninja_test
|
||||
2. Consider sending a heads-up to the ninja-build mailing list first
|
||||
3. Make sure branches 'master' and 'release' are synced up locally
|
||||
4. Update src/version.cc with new version (with ".git"), then
|
||||
```
|
||||
git commit -am 'mark this 1.5.0.git'
|
||||
```
|
||||
5. git checkout release; git merge master
|
||||
6. Fix version number in src/version.cc (it will likely conflict in the above)
|
||||
7. Fix version in doc/manual.asciidoc (exists only on release branch)
|
||||
8. commit, tag, push (don't forget to push --tags)
|
||||
```
|
||||
git commit -am v1.5.0; git push origin release
|
||||
git tag v1.5.0; git push --tags
|
||||
# Push the 1.5.0.git change on master too:
|
||||
git checkout master; git push origin master
|
||||
```
|
||||
9. Construct release notes from prior notes
|
||||
|
||||
credits: `git shortlog -s --no-merges REV..`
|
||||
|
||||
|
||||
### Release on GitHub:
|
||||
1. Go to [Tags](https://github.com/ninja-build/ninja/tags)
|
||||
2. Open the newly created tag and select "Create release from tag"
|
||||
3. Create the release which will trigger a build which automatically attaches
|
||||
the binaries
|
||||
|
||||
### Make announcement on mailing list:
|
||||
1. copy old mail
|
||||
|
||||
### Update website:
|
||||
1. Make sure your ninja checkout is on the v1.5.0 tag
|
||||
2. Clone https://github.com/ninja-build/ninja-build.github.io
|
||||
3. In that repo, `./update-docs.sh`
|
||||
4. Update index.html with newest version and link to release notes
|
||||
5. `git commit -m 'run update-docs.sh, 1.5.0 release'`
|
||||
6. `git push origin master`
|
21
appveyor.yml
21
appveyor.yml
@ -8,7 +8,6 @@ environment:
|
||||
CHERE_INVOKING: 1 # Tell Bash to inherit the current working directory
|
||||
matrix:
|
||||
- MSYSTEM: MINGW64
|
||||
- MSYSTEM: MSVC
|
||||
- MSYSTEM: LINUX
|
||||
|
||||
matrix:
|
||||
@ -17,8 +16,6 @@ matrix:
|
||||
MSYSTEM: LINUX
|
||||
- image: Ubuntu1804
|
||||
MSYSTEM: MINGW64
|
||||
- image: Ubuntu1804
|
||||
MSYSTEM: MSVC
|
||||
|
||||
for:
|
||||
-
|
||||
@ -30,31 +27,13 @@ for:
|
||||
pacman -S --quiet --noconfirm --needed re2c 2>&1\n
|
||||
./configure.py --bootstrap --platform mingw 2>&1\n
|
||||
./ninja all\n
|
||||
./ninja_test 2>&1\n
|
||||
./misc/ninja_syntax_test.py 2>&1\n\"@"
|
||||
-
|
||||
matrix:
|
||||
only:
|
||||
- MSYSTEM: MSVC
|
||||
build_script:
|
||||
- cmd: >-
|
||||
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvars64.bat"
|
||||
|
||||
python configure.py --bootstrap
|
||||
|
||||
ninja.bootstrap.exe all
|
||||
|
||||
ninja_test
|
||||
|
||||
python misc/ninja_syntax_test.py
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- image: Ubuntu1804
|
||||
build_script:
|
||||
- ./configure.py --bootstrap
|
||||
- ./ninja all
|
||||
- ./ninja_test
|
||||
- misc/ninja_syntax_test.py
|
||||
- misc/output_test.py
|
||||
|
||||
|
@ -1,19 +1,13 @@
|
||||
# build ninja
|
||||
|
||||
## 获取代码
|
||||
```
|
||||
#build ninja
|
||||
|
||||
#获取代码
|
||||
git clone https://gitee.com/openharmony/third_party_ninja.git
|
||||
```
|
||||
|
||||
## 通过python脚本构建生成二进制文件
|
||||
```
|
||||
|
||||
#通过python脚本构建生成二进制文件
|
||||
cd third_party_ninja
|
||||
./configure.py --bootstrap
|
||||
```
|
||||
|
||||
## 通过CMake编译生成二进制文件
|
||||
```
|
||||
|
||||
#通过CMake编译生成二进制文件
|
||||
cd third_party_ninja
|
||||
cmake -Bbuild-cmake
|
||||
cmake -Bbuild-cmake -H.
|
||||
cmake --build build-cmake
|
||||
```
|
||||
|
80
configure.py
80
configure.py
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2001 Google Inc. All Rights Reserved.
|
||||
#
|
||||
@ -19,12 +19,9 @@
|
||||
Projects that use ninja themselves should either write a similar script
|
||||
or use a meta-build system that supports Ninja output."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import pipes
|
||||
import string
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
@ -264,7 +261,7 @@ n.variable('configure_args', ' '.join(configure_args))
|
||||
env_keys = set(['CXX', 'AR', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS'])
|
||||
configure_env = dict((k, os.environ[k]) for k in os.environ if k in env_keys)
|
||||
if configure_env:
|
||||
config_str = ' '.join([k + '=' + pipes.quote(configure_env[k])
|
||||
config_str = ' '.join([k + '=' + shlex.quote(configure_env[k])
|
||||
for k in configure_env])
|
||||
n.variable('configure_env', config_str + '$ ')
|
||||
n.newline()
|
||||
@ -305,7 +302,18 @@ if platform.is_msvc():
|
||||
else:
|
||||
n.variable('ar', configure_env.get('AR', 'ar'))
|
||||
|
||||
def search_system_path(file_name):
|
||||
"""Find a file in the system path."""
|
||||
for dir in os.environ['path'].split(';'):
|
||||
path = os.path.join(dir, file_name)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
# Note that build settings are separately specified in CMakeLists.txt and
|
||||
# these lists should be kept in sync.
|
||||
if platform.is_msvc():
|
||||
if not search_system_path('cl.exe'):
|
||||
raise Exception('cl.exe not found. Run again from the Developer Command Prompt for VS')
|
||||
cflags = ['/showIncludes',
|
||||
'/nologo', # Don't print startup banner.
|
||||
'/Zi', # Create pdb with debug info.
|
||||
@ -320,6 +328,7 @@ if platform.is_msvc():
|
||||
# Disable warnings about ignored typedef in DbgHelp.h
|
||||
'/wd4091',
|
||||
'/GR-', # Disable RTTI.
|
||||
'/Zc:__cplusplus',
|
||||
# Disable size_t -> int truncation warning.
|
||||
# We never have strings or arrays larger than 2**31.
|
||||
'/wd4267',
|
||||
@ -339,6 +348,7 @@ else:
|
||||
'-Wno-unused-parameter',
|
||||
'-fno-rtti',
|
||||
'-fno-exceptions',
|
||||
'-std=c++11',
|
||||
'-fvisibility=hidden', '-pipe',
|
||||
'-DNINJA_PYTHON="%s"' % options.with_python]
|
||||
if options.debug:
|
||||
@ -474,7 +484,7 @@ n.comment('the depfile parser and ninja lexers are generated using re2c.')
|
||||
def has_re2c():
|
||||
try:
|
||||
proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE)
|
||||
return int(proc.communicate()[0], 10) >= 1103
|
||||
return int(proc.communicate()[0], 10) >= 1503
|
||||
except OSError:
|
||||
return False
|
||||
if has_re2c():
|
||||
@ -485,20 +495,31 @@ if has_re2c():
|
||||
n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc'))
|
||||
n.build(src('lexer.cc'), 're2c', src('lexer.in.cc'))
|
||||
else:
|
||||
print("warning: A compatible version of re2c (>= 0.11.3) was not found; "
|
||||
print("warning: A compatible version of re2c (>= 0.15.3) was not found; "
|
||||
"changes to src/*.in.cc will not affect your build.")
|
||||
n.newline()
|
||||
|
||||
n.comment('Core source files all build into ninja library.')
|
||||
cxxvariables = []
|
||||
if platform.is_msvc():
|
||||
cxxvariables = [('pdb', 'ninja.pdb')]
|
||||
|
||||
n.comment('Generate a library for `ninja-re2c`.')
|
||||
re2c_objs = []
|
||||
for name in ['depfile_parser', 'lexer']:
|
||||
re2c_objs += cxx(name, variables=cxxvariables)
|
||||
if platform.is_msvc():
|
||||
n.build(built('ninja-re2c.lib'), 'ar', re2c_objs)
|
||||
else:
|
||||
n.build(built('libninja-re2c.a'), 'ar', re2c_objs)
|
||||
n.newline()
|
||||
|
||||
n.comment('Core source files all build into ninja library.')
|
||||
objs.extend(re2c_objs)
|
||||
for name in ['build',
|
||||
'build_log',
|
||||
'clean',
|
||||
'clparser',
|
||||
'debug_flags',
|
||||
'depfile_parser',
|
||||
'deps_log',
|
||||
'disk_interface',
|
||||
'dyndep',
|
||||
@ -508,7 +529,6 @@ for name in ['build',
|
||||
'graph',
|
||||
'graphviz',
|
||||
'json',
|
||||
'lexer',
|
||||
'line_printer',
|
||||
'manifest_parser',
|
||||
'metrics',
|
||||
@ -562,44 +582,6 @@ if options.bootstrap:
|
||||
# build.ninja file.
|
||||
n = ninja_writer
|
||||
|
||||
n.comment('Tests all build into ninja_test executable.')
|
||||
|
||||
objs = []
|
||||
if platform.is_msvc():
|
||||
cxxvariables = [('pdb', 'ninja_test.pdb')]
|
||||
|
||||
for name in ['build_log_test',
|
||||
'build_test',
|
||||
'clean_test',
|
||||
'clparser_test',
|
||||
'depfile_parser_test',
|
||||
'deps_log_test',
|
||||
'dyndep_parser_test',
|
||||
'disk_interface_test',
|
||||
'edit_distance_test',
|
||||
'graph_test',
|
||||
'json_test',
|
||||
'lexer_test',
|
||||
'manifest_parser_test',
|
||||
'missing_deps_test',
|
||||
'ninja_test',
|
||||
'state_test',
|
||||
'status_test',
|
||||
'string_piece_util_test',
|
||||
'subprocess_test',
|
||||
'test',
|
||||
'util_test']:
|
||||
objs += cxx(name, variables=cxxvariables)
|
||||
if platform.is_windows():
|
||||
for name in ['includes_normalize_test', 'msvc_helper_test']:
|
||||
objs += cxx(name, variables=cxxvariables)
|
||||
|
||||
ninja_test = n.build(binary('ninja_test'), 'link', objs, implicit=ninja_lib,
|
||||
variables=[('libs', libs)])
|
||||
n.newline()
|
||||
all_targets += ninja_test
|
||||
|
||||
|
||||
n.comment('Ancillary executables.')
|
||||
|
||||
if platform.is_aix() and '-maix64' not in ldflags:
|
||||
|
@ -1,6 +1,6 @@
|
||||
The Ninja build system
|
||||
======================
|
||||
v1.11.0, Nov 2020
|
||||
v1.12.0, Apr 2024
|
||||
|
||||
|
||||
Introduction
|
||||
@ -25,7 +25,7 @@ Where other build systems are high-level languages, Ninja aims to be
|
||||
an assembler.
|
||||
|
||||
Build systems get slow when they need to make decisions. When you are
|
||||
in a edit-compile cycle you want it to be as fast as possible -- you
|
||||
in an edit-compile cycle you want it to be as fast as possible -- you
|
||||
want the build system to do the minimum work necessary to figure out
|
||||
what needs to be built immediately.
|
||||
|
||||
@ -205,7 +205,11 @@ Several placeholders are available:
|
||||
`%o`:: Overall rate of finished edges per second
|
||||
`%c`:: Current rate of finished edges per second (average over builds
|
||||
specified by `-j` or its default)
|
||||
`%e`:: Elapsed time in seconds. _(Available since Ninja 1.2.)_
|
||||
`%e`:: Elapsed time in seconds. _(Available since Ninja 1.2.)_
|
||||
`%E`:: Remaining time (ETA) in seconds. _(Available since Ninja 1.12.)_
|
||||
`%w`:: Elapsed time in [h:]mm:ss format. _(Available since Ninja 1.12.)_
|
||||
`%W`:: Remaining time (ETA) in [h:]mm:ss format. _(Available since Ninja 1.12.)_
|
||||
`%P`:: The percentage (in ppp% format) of time elapsed out of predicted total runtime. _(Available since Ninja 1.12.)_
|
||||
`%%`:: A plain `%` character.
|
||||
|
||||
The default progress status is `"[%f/%t] "` (note the trailing space
|
||||
@ -223,14 +227,14 @@ found useful during Ninja's development. The current tools are:
|
||||
|
||||
`browse`:: browse the dependency graph in a web browser. Clicking a
|
||||
file focuses the view on that file, showing inputs and outputs. This
|
||||
feature requires a Python installation. By default port 8000 is used
|
||||
feature requires a Python installation. By default, port 8000 is used
|
||||
and a web browser will be opened. This can be changed as follows:
|
||||
+
|
||||
----
|
||||
ninja -t browse --port=8000 --no-browser mytarget
|
||||
----
|
||||
+
|
||||
`graph`:: output a file in the syntax used by `graphviz`, a automatic
|
||||
`graph`:: output a file in the syntax used by `graphviz`, an automatic
|
||||
graph layout tool. Use it like:
|
||||
+
|
||||
----
|
||||
@ -262,7 +266,7 @@ output files are out of date.
|
||||
rebuild those targets.
|
||||
_Available since Ninja 1.11._
|
||||
|
||||
`clean`:: remove built files. By default it removes all built files
|
||||
`clean`:: remove built files. By default, it removes all built files
|
||||
except for those created by the generator. Adding the `-g` flag also
|
||||
removes built files created by the generator (see <<ref_rule,the rule
|
||||
reference for the +generator+ attribute>>). Additional arguments are
|
||||
@ -325,20 +329,19 @@ Where `ENVFILE` is a binary file that contains an environment block suitable
|
||||
for CreateProcessA() on Windows (i.e. a series of zero-terminated strings that
|
||||
look like NAME=VALUE, followed by an extra zero terminator). Note that this uses
|
||||
the local codepage encoding.
|
||||
|
||||
+
|
||||
This tool also supports a deprecated way of parsing the compiler's output when
|
||||
the `/showIncludes` flag is used, and generating a GCC-compatible depfile from it.
|
||||
the `/showIncludes` flag is used, and generating a GCC-compatible depfile from it:
|
||||
+
|
||||
---
|
||||
----
|
||||
ninja -t msvc -o DEPFILE [-p STRING] -- cl.exe /showIncludes <arguments>
|
||||
---
|
||||
----
|
||||
+
|
||||
|
||||
When using this option, `-p STRING` can be used to pass the localized line prefix
|
||||
that `cl.exe` uses to output dependency information. For English-speaking regions
|
||||
this is `"Note: including file: "` without the double quotes, but will be different
|
||||
for other regions.
|
||||
|
||||
+
|
||||
Note that Ninja supports this natively now, with the use of `deps = msvc` and
|
||||
`msvc_deps_prefix` in Ninja files. Native support also avoids launching an extra
|
||||
tool process each time the compiler must be called, which can speed up builds
|
||||
@ -675,14 +678,14 @@ Ninja supports this processing in two forms.
|
||||
as a temporary).
|
||||
|
||||
2. `deps = msvc` specifies that the tool outputs header dependencies
|
||||
in the form produced by Visual Studio's compiler's
|
||||
in the form produced by the Visual Studio compiler's
|
||||
http://msdn.microsoft.com/en-us/library/hdkef6tk(v=vs.90).aspx[`/showIncludes`
|
||||
flag]. Briefly, this means the tool outputs specially-formatted lines
|
||||
to its stdout. Ninja then filters these lines from the displayed
|
||||
output. No `depfile` attribute is necessary, but the localized string
|
||||
in front of the the header file path. For instance
|
||||
in front of the header file path should be globally defined. For instance,
|
||||
`msvc_deps_prefix = Note: including file:`
|
||||
for a English Visual Studio (the default). Should be globally defined.
|
||||
for an English Visual Studio (the default).
|
||||
+
|
||||
----
|
||||
msvc_deps_prefix = Note: including file:
|
||||
@ -965,14 +968,14 @@ Fundamentally, command lines behave differently on Unixes and Windows.
|
||||
|
||||
On Unixes, commands are arrays of arguments. The Ninja `command`
|
||||
variable is passed directly to `sh -c`, which is then responsible for
|
||||
interpreting that string into an argv array. Therefore the quoting
|
||||
interpreting that string into an argv array. Therefore, the quoting
|
||||
rules are those of the shell, and you can use all the normal shell
|
||||
operators, like `&&` to chain multiple commands, or `VAR=value cmd` to
|
||||
set environment variables.
|
||||
|
||||
On Windows, commands are strings, so Ninja passes the `command` string
|
||||
directly to `CreateProcess`. (In the common case of simply executing
|
||||
a compiler this means there is less overhead.) Consequently the
|
||||
a compiler this means there is less overhead.) Consequently, the
|
||||
quoting rules are determined by the called program, which on Windows
|
||||
are usually provided by the C library. If you need shell
|
||||
interpretation of the command (such as the use of `&&` to chain
|
||||
@ -1047,6 +1050,9 @@ relative path, pointing to the same file, are considered different by Ninja.
|
||||
[[validations]]
|
||||
Validations
|
||||
~~~~~~~~~~~
|
||||
|
||||
_Available since Ninja 1.11._
|
||||
|
||||
Validations listed on the build line cause the specified files to be
|
||||
added to the top level of the build graph (as if they were specified
|
||||
on the Ninja command line) whenever the build line is a transitive
|
||||
@ -1062,7 +1068,7 @@ A build edge can list another build edge as a validation even if the second
|
||||
edge depends on the first.
|
||||
|
||||
Validations are designed to handle rules that perform error checking but
|
||||
don't produce any artifacts needed by the build, for example static
|
||||
don't produce any artifacts needed by the build, for example, static
|
||||
analysis tools. Marking the static analysis rule as an implicit input
|
||||
of the main build rule of the source files or of the rules that depend
|
||||
on the main build rule would slow down the critical path of the build,
|
||||
|
@ -1,15 +1,22 @@
|
||||
:root {
|
||||
color-scheme: light dark;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 5ex 10ex;
|
||||
max-width: 80ex;
|
||||
line-height: 1.5;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
h1, h2, h3 {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
pre, code {
|
||||
font-family: x, monospace;
|
||||
}
|
||||
|
||||
pre {
|
||||
padding: 1ex;
|
||||
background: #eee;
|
||||
@ -17,13 +24,32 @@ pre {
|
||||
min-width: 0;
|
||||
font-size: 90%;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) {
|
||||
pre {
|
||||
background: #333;
|
||||
border: solid 1px #444;
|
||||
}
|
||||
}
|
||||
|
||||
code {
|
||||
color: #007;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) {
|
||||
code {
|
||||
color: #a7cec8;
|
||||
}
|
||||
}
|
||||
|
||||
div.chapter {
|
||||
margin-top: 4em;
|
||||
border-top: solid 2px black;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) {
|
||||
div.chapter {
|
||||
border-top: solid 2px white;
|
||||
}
|
||||
}
|
||||
|
||||
p {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2011 Google Inc. All Rights Reserved.
|
||||
#
|
||||
@ -17,8 +17,6 @@
|
||||
"""measure the runtime of a command by repeatedly running it.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import time
|
||||
import subprocess
|
||||
import sys
|
||||
|
@ -19,16 +19,22 @@
|
||||
;;; Commentary:
|
||||
|
||||
;; Simple emacs mode for editing .ninja files.
|
||||
;; Just some syntax highlighting for now.
|
||||
|
||||
;;; Code:
|
||||
|
||||
(defcustom ninja-indent-offset 2
|
||||
"*Amount of offset per level of indentation."
|
||||
:type 'integer
|
||||
:safe 'natnump
|
||||
:group 'ninja)
|
||||
|
||||
(defconst ninja-keywords-re
|
||||
(concat "^" (regexp-opt '("rule" "build" "subninja" "include" "pool" "default")
|
||||
'words)))
|
||||
|
||||
(defvar ninja-keywords
|
||||
`((,(concat "^" (regexp-opt '("rule" "build" "subninja" "include"
|
||||
"pool" "default")
|
||||
'words))
|
||||
. font-lock-keyword-face)
|
||||
("\\([[:alnum:]_]+\\) =" 1 font-lock-variable-name-face)
|
||||
`((,ninja-keywords-re . font-lock-keyword-face)
|
||||
("^[[:space:]]*\\([[:alnum:]_]+\\)[[:space:]]*=" 1 font-lock-variable-name-face)
|
||||
;; Variable expansion.
|
||||
("$[[:alnum:]_]+" . font-lock-variable-name-face)
|
||||
("${[[:alnum:]._]+}" . font-lock-variable-name-face)
|
||||
@ -69,11 +75,30 @@
|
||||
(unless (= line-end (1+ (buffer-size)))
|
||||
(put-text-property line-end (1+ line-end) 'syntax-table '(12)))))))))
|
||||
|
||||
(defun ninja-compute-indentation ()
|
||||
"Calculate indentation for the current line."
|
||||
(save-excursion
|
||||
(beginning-of-line)
|
||||
(if (or (looking-at ninja-keywords-re)
|
||||
(= (line-number-at-pos) 1))
|
||||
0
|
||||
(forward-line -1)
|
||||
(if (looking-at ninja-keywords-re)
|
||||
ninja-indent-offset
|
||||
(current-indentation)))))
|
||||
|
||||
(defun ninja-indent-line ()
|
||||
"Indent the current line. Uses previous indentation level if
|
||||
available or `ninja-indent-offset'"
|
||||
(interactive "*")
|
||||
(indent-line-to (ninja-compute-indentation)))
|
||||
|
||||
;;;###autoload
|
||||
(define-derived-mode ninja-mode prog-mode "ninja"
|
||||
(set (make-local-variable 'comment-start) "#")
|
||||
(set (make-local-variable 'parse-sexp-lookup-properties) t)
|
||||
(set (make-local-variable 'syntax-propertize-function) #'ninja-syntax-propertize)
|
||||
(set (make-local-variable 'indent-line-function) 'ninja-indent-line)
|
||||
(setq font-lock-defaults '(ninja-keywords)))
|
||||
|
||||
;; Run ninja-mode for files ending in .ninja.
|
||||
|
@ -23,37 +23,54 @@ use Python.
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
from io import TextIOWrapper
|
||||
from typing import Dict, List, Match, Optional, Tuple, Union
|
||||
|
||||
def escape_path(word):
|
||||
def escape_path(word: str) -> str:
|
||||
return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, output, width=78):
|
||||
def __init__(self, output: TextIOWrapper, width: int = 78) -> None:
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
def newline(self) -> None:
|
||||
self.output.write('\n')
|
||||
|
||||
def comment(self, text):
|
||||
def comment(self, text: str) -> None:
|
||||
for line in textwrap.wrap(text, self.width - 2, break_long_words=False,
|
||||
break_on_hyphens=False):
|
||||
self.output.write('# ' + line + '\n')
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
def variable(
|
||||
self,
|
||||
key: str,
|
||||
value: Optional[Union[bool, int, float, str, List[str]]],
|
||||
indent: int = 0,
|
||||
) -> None:
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = ' '.join(filter(None, value)) # Filter out empty strings.
|
||||
self._line('%s = %s' % (key, value), indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
def pool(self, name: str, depth: int) -> None:
|
||||
self._line('pool %s' % name)
|
||||
self.variable('depth', depth, indent=1)
|
||||
|
||||
def rule(self, name, command, description=None, depfile=None,
|
||||
generator=False, pool=None, restat=False, rspfile=None,
|
||||
rspfile_content=None, deps=None):
|
||||
def rule(
|
||||
self,
|
||||
name: str,
|
||||
command: str,
|
||||
description: Optional[str] = None,
|
||||
depfile: Optional[str] = None,
|
||||
generator: bool = False,
|
||||
pool: Optional[str] = None,
|
||||
restat: bool = False,
|
||||
rspfile: Optional[str] = None,
|
||||
rspfile_content: Optional[str] = None,
|
||||
deps: Optional[Union[str, List[str]]] = None,
|
||||
) -> None:
|
||||
self._line('rule %s' % name)
|
||||
self.variable('command', command, indent=1)
|
||||
if description:
|
||||
@ -73,8 +90,23 @@ class Writer(object):
|
||||
if deps:
|
||||
self.variable('deps', deps, indent=1)
|
||||
|
||||
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
|
||||
variables=None, implicit_outputs=None, pool=None, dyndep=None):
|
||||
def build(
|
||||
self,
|
||||
outputs: Union[str, List[str]],
|
||||
rule: str,
|
||||
inputs: Optional[Union[str, List[str]]] = None,
|
||||
implicit: Optional[Union[str, List[str]]] = None,
|
||||
order_only: Optional[Union[str, List[str]]] = None,
|
||||
variables: Optional[
|
||||
Union[
|
||||
List[Tuple[str, Optional[Union[str, List[str]]]]],
|
||||
Dict[str, Optional[Union[str, List[str]]]],
|
||||
]
|
||||
] = None,
|
||||
implicit_outputs: Optional[Union[str, List[str]]] = None,
|
||||
pool: Optional[str] = None,
|
||||
dyndep: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
outputs = as_list(outputs)
|
||||
out_outputs = [escape_path(x) for x in outputs]
|
||||
all_inputs = [escape_path(x) for x in as_list(inputs)]
|
||||
@ -111,16 +143,16 @@ class Writer(object):
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
def include(self, path: str) -> None:
|
||||
self._line('include %s' % path)
|
||||
|
||||
def subninja(self, path):
|
||||
def subninja(self, path: str) -> None:
|
||||
self._line('subninja %s' % path)
|
||||
|
||||
def default(self, paths):
|
||||
def default(self, paths: Union[str, List[str]]) -> None:
|
||||
self._line('default %s' % ' '.join(as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
def _count_dollars_before_index(self, s: str, i: int) -> int:
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
@ -129,7 +161,7 @@ class Writer(object):
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
def _line(self, text: str, indent: int = 0) -> None:
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = ' ' * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
@ -165,11 +197,11 @@ class Writer(object):
|
||||
|
||||
self.output.write(leading_space + text + '\n')
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
self.output.close()
|
||||
|
||||
|
||||
def as_list(input):
|
||||
def as_list(input: Optional[Union[str, List[str]]]) -> List[str]:
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
@ -177,7 +209,7 @@ def as_list(input):
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
def escape(string: str) -> str:
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert '\n' not in string, 'Ninja syntax does not allow newlines'
|
||||
@ -185,13 +217,13 @@ def escape(string):
|
||||
return string.replace('$', '$$')
|
||||
|
||||
|
||||
def expand(string, vars, local_vars={}):
|
||||
def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str:
|
||||
"""Expand a string containing $vars as Ninja would.
|
||||
|
||||
Note: doesn't handle the full Ninja variable syntax, but it's enough
|
||||
to make configure.py's use of it work.
|
||||
"""
|
||||
def exp(m):
|
||||
def exp(m: Match[str]) -> str:
|
||||
var = m.group(1)
|
||||
if var == '$':
|
||||
return '$'
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2011 Google Inc. All Rights Reserved.
|
||||
#
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -eu
|
||||
# Copyright 2020 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -15,7 +15,6 @@
|
||||
#
|
||||
################################################################################
|
||||
|
||||
set -eu
|
||||
cmake -Bbuild-cmake -H.
|
||||
cmake --build build-cmake
|
||||
|
||||
|
@ -13,29 +13,26 @@ import tempfile
|
||||
import unittest
|
||||
|
||||
default_env = dict(os.environ)
|
||||
if 'NINJA_STATUS' in default_env:
|
||||
del default_env['NINJA_STATUS']
|
||||
if 'CLICOLOR_FORCE' in default_env:
|
||||
del default_env['CLICOLOR_FORCE']
|
||||
default_env.pop('NINJA_STATUS', None)
|
||||
default_env.pop('CLICOLOR_FORCE', None)
|
||||
default_env['TERM'] = ''
|
||||
NINJA_PATH = os.path.abspath('./ninja')
|
||||
|
||||
def run(build_ninja, flags='', pipe=False, env=default_env):
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
os.chdir(d)
|
||||
with open('build.ninja', 'w') as f:
|
||||
with open(os.path.join(d, 'build.ninja'), 'w') as f:
|
||||
f.write(build_ninja)
|
||||
f.flush()
|
||||
ninja_cmd = '{} {}'.format(NINJA_PATH, flags)
|
||||
try:
|
||||
if pipe:
|
||||
output = subprocess.check_output([ninja_cmd], shell=True, env=env)
|
||||
output = subprocess.check_output([ninja_cmd], shell=True, cwd=d, env=env)
|
||||
elif platform.system() == 'Darwin':
|
||||
output = subprocess.check_output(['script', '-q', '/dev/null', 'bash', '-c', ninja_cmd],
|
||||
env=env)
|
||||
cwd=d, env=env)
|
||||
else:
|
||||
output = subprocess.check_output(['script', '-qfec', ninja_cmd, '/dev/null'],
|
||||
env=env)
|
||||
cwd=d, env=env)
|
||||
except subprocess.CalledProcessError as err:
|
||||
sys.stdout.buffer.write(err.output)
|
||||
raise err
|
||||
@ -112,14 +109,51 @@ red
|
||||
\x1b[31mred\x1b[0m
|
||||
''')
|
||||
|
||||
def test_issue_1966(self):
|
||||
self.assertEqual(run(
|
||||
'''rule cat
|
||||
command = cat $rspfile $rspfile > $out
|
||||
rspfile = cat.rsp
|
||||
rspfile_content = a b c
|
||||
|
||||
build a: cat
|
||||
''', '-j3'),
|
||||
'''[1/1] cat cat.rsp cat.rsp > a\x1b[K
|
||||
''')
|
||||
|
||||
|
||||
def test_pr_1685(self):
|
||||
# Running those tools without .ninja_deps and .ninja_log shouldn't fail.
|
||||
self.assertEqual(run('', flags='-t recompact'), '')
|
||||
self.assertEqual(run('', flags='-t restat'), '')
|
||||
|
||||
def test_issue_2048(self):
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
with open(os.path.join(d, 'build.ninja'), 'w'):
|
||||
pass
|
||||
|
||||
with open(os.path.join(d, '.ninja_log'), 'w') as f:
|
||||
f.write('# ninja log v4\n')
|
||||
|
||||
try:
|
||||
output = subprocess.check_output([NINJA_PATH, '-t', 'recompact'],
|
||||
cwd=d,
|
||||
env=default_env,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
output.strip(),
|
||||
"ninja: warning: build log version is too old; starting over"
|
||||
)
|
||||
except subprocess.CalledProcessError as err:
|
||||
self.fail("non-zero exit code with: " + err.output)
|
||||
|
||||
def test_status(self):
|
||||
self.assertEqual(run(''), 'ninja: no work to do.\n')
|
||||
self.assertEqual(run('', pipe=True), 'ninja: no work to do.\n')
|
||||
self.assertEqual(run('', flags='--quiet'), '')
|
||||
|
||||
def test_ninja_status_default(self):
|
||||
'Do we show the default status by default?'
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""Writes large manifest files, for manifest parser performance testing.
|
||||
|
||||
|
@ -16,7 +16,7 @@
|
||||
# Add the following to your .zshrc to tab-complete ninja targets
|
||||
# fpath=(path/to/ninja/misc/zsh-completion $fpath)
|
||||
|
||||
__get_targets() {
|
||||
(( $+functions[_ninja-get-targets] )) || _ninja-get-targets() {
|
||||
dir="."
|
||||
if [ -n "${opt_args[-C]}" ];
|
||||
then
|
||||
@ -31,42 +31,45 @@ __get_targets() {
|
||||
eval ${targets_command} 2>/dev/null | cut -d: -f1
|
||||
}
|
||||
|
||||
__get_tools() {
|
||||
ninja -t list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2
|
||||
(( $+functions[_ninja-get-tools] )) || _ninja-get-tools() {
|
||||
# remove the first line; remove the leading spaces; replace spaces with colon
|
||||
ninja -t list 2> /dev/null | sed -e '1d;s/^ *//;s/ \+/:/'
|
||||
}
|
||||
|
||||
__get_modes() {
|
||||
ninja -d list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 | sed '$d'
|
||||
(( $+functions[_ninja-get-modes] )) || _ninja-get-modes() {
|
||||
# remove the first line; remove the last line; remove the leading spaces; replace spaces with colon
|
||||
ninja -d list 2> /dev/null | sed -e '1d;$d;s/^ *//;s/ \+/:/'
|
||||
}
|
||||
|
||||
__modes() {
|
||||
(( $+functions[_ninja-modes] )) || _ninja-modes() {
|
||||
local -a modes
|
||||
modes=(${(fo)"$(__get_modes)"})
|
||||
modes=(${(fo)"$(_ninja-get-modes)"})
|
||||
_describe 'modes' modes
|
||||
}
|
||||
|
||||
__tools() {
|
||||
(( $+functions[_ninja-tools] )) || _ninja-tools() {
|
||||
local -a tools
|
||||
tools=(${(fo)"$(__get_tools)"})
|
||||
tools=(${(fo)"$(_ninja-get-tools)"})
|
||||
_describe 'tools' tools
|
||||
}
|
||||
|
||||
__targets() {
|
||||
(( $+functions[_ninja-targets] )) || _ninja-targets() {
|
||||
local -a targets
|
||||
targets=(${(fo)"$(__get_targets)"})
|
||||
targets=(${(fo)"$(_ninja-get-targets)"})
|
||||
_describe 'targets' targets
|
||||
}
|
||||
|
||||
_arguments \
|
||||
{-h,--help}'[Show help]' \
|
||||
'--version[Print ninja version]' \
|
||||
'(- *)'{-h,--help}'[Show help]' \
|
||||
'(- *)--version[Print ninja version]' \
|
||||
'-C+[Change to directory before doing anything else]:directories:_directories' \
|
||||
'-f+[Specify input build file (default=build.ninja)]:files:_files' \
|
||||
'-j+[Run N jobs in parallel (default=number of CPUs available)]:number of jobs' \
|
||||
'-l+[Do not start new jobs if the load average is greater than N]:number of jobs' \
|
||||
'-k+[Keep going until N jobs fail (default=1)]:number of jobs' \
|
||||
'-n[Dry run (do not run commands but act like they succeeded)]' \
|
||||
'-v[Show all command lines while building]' \
|
||||
'-d+[Enable debugging (use -d list to list modes)]:modes:__modes' \
|
||||
'-t+[Run a subtool (use -t list to list subtools)]:tools:__tools' \
|
||||
'*::targets:__targets'
|
||||
'(-v --verbose --quiet)'{-v,--verbose}'[Show all command lines while building]' \
|
||||
"(-v --verbose --quiet)--quiet[Don't show progress status, just command output]" \
|
||||
'-d+[Enable debugging (use -d list to list modes)]:modes:_ninja-modes' \
|
||||
'-t+[Run a subtool (use -t list to list subtools)]:tools:_ninja-tools' \
|
||||
'*::targets:_ninja-targets'
|
||||
|
9
src/browse.cc
Normal file → Executable file
9
src/browse.cc
Normal file → Executable file
@ -71,8 +71,13 @@ void RunBrowsePython(State* state, const char* ninja_command,
|
||||
close(pipefd[0]);
|
||||
|
||||
// Write the script file into the stdin of the Python process.
|
||||
ssize_t len = write(pipefd[1], kBrowsePy, sizeof(kBrowsePy));
|
||||
if (len < (ssize_t)sizeof(kBrowsePy))
|
||||
// Only write n - 1 bytes, because Python 3.11 does not allow null
|
||||
// bytes in source code anymore, so avoid writing the null string
|
||||
// terminator.
|
||||
// See https://github.com/python/cpython/issues/96670
|
||||
auto kBrowsePyLength = sizeof(kBrowsePy) - 1;
|
||||
ssize_t len = write(pipefd[1], kBrowsePy, kBrowsePyLength);
|
||||
if (len < (ssize_t)kBrowsePyLength)
|
||||
perror("ninja: write");
|
||||
close(pipefd[1]);
|
||||
exit(0);
|
||||
|
0
src/browse.h
Normal file → Executable file
0
src/browse.h
Normal file → Executable file
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2001 Google Inc. All Rights Reserved.
|
||||
#
|
||||
@ -20,8 +20,6 @@ This script is inlined into the final executable and spawned by
|
||||
it when needed.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
import http.server as httpserver
|
||||
import socketserver
|
||||
|
381
src/build.cc
Normal file → Executable file
381
src/build.cc
Normal file → Executable file
@ -18,6 +18,8 @@
|
||||
#include <errno.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <climits>
|
||||
#include <stdint.h>
|
||||
#include <functional>
|
||||
|
||||
#if defined(__SVR4) && defined(__sun)
|
||||
@ -46,7 +48,7 @@ struct DryRunCommandRunner : public CommandRunner {
|
||||
virtual ~DryRunCommandRunner() {}
|
||||
|
||||
// Overridden from CommandRunner:
|
||||
virtual bool CanRunMore() const;
|
||||
virtual size_t CanRunMore() const;
|
||||
virtual bool StartCommand(Edge* edge);
|
||||
virtual bool WaitForCommand(Result* result);
|
||||
|
||||
@ -54,8 +56,8 @@ struct DryRunCommandRunner : public CommandRunner {
|
||||
queue<Edge*> finished_;
|
||||
};
|
||||
|
||||
bool DryRunCommandRunner::CanRunMore() const {
|
||||
return true;
|
||||
size_t DryRunCommandRunner::CanRunMore() const {
|
||||
return SIZE_MAX;
|
||||
}
|
||||
|
||||
bool DryRunCommandRunner::StartCommand(Edge* edge) {
|
||||
@ -95,15 +97,20 @@ bool Plan::AddTarget(const Node* target, string* err) {
|
||||
bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err,
|
||||
set<Edge*>* dyndep_walk) {
|
||||
Edge* edge = node->in_edge();
|
||||
if (!edge) { // Leaf node.
|
||||
if (node->dirty()) {
|
||||
string referenced;
|
||||
if (dependent)
|
||||
referenced = ", needed by '" + dependent->path() + "',";
|
||||
*err = "'" + node->path() + "'" + referenced + " missing "
|
||||
"and no known rule to make it";
|
||||
}
|
||||
return false;
|
||||
if (!edge) {
|
||||
// Leaf node, this can be either a regular input from the manifest
|
||||
// (e.g. a source file), or an implicit input from a depfile or dyndep
|
||||
// file. In the first case, a dirty flag means the file is missing,
|
||||
// and the build should stop. In the second, do not do anything here
|
||||
// since there is no producing edge to add to the plan.
|
||||
if (node->dirty() && !node->generated_by_dep_loader()) {
|
||||
string referenced;
|
||||
if (dependent)
|
||||
referenced = ", needed by '" + dependent->path() + "',";
|
||||
*err = "'" + node->path() + "'" + referenced +
|
||||
" missing and no known rule to make it";
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (edge->outputs_ready())
|
||||
@ -144,13 +151,17 @@ bool Plan::AddSubTarget(const Node* node, const Node* dependent, string* err,
|
||||
|
||||
void Plan::EdgeWanted(const Edge* edge) {
|
||||
++wanted_edges_;
|
||||
if (!edge->is_phony())
|
||||
if (!edge->is_phony()) {
|
||||
++command_edges_;
|
||||
if (builder_)
|
||||
builder_->status_->EdgeAddedToPlan(edge);
|
||||
}
|
||||
}
|
||||
|
||||
Edge* Plan::FindWork() {
|
||||
if (ready_.empty())
|
||||
return NULL;
|
||||
|
||||
EdgeSet::iterator e = ready_.begin();
|
||||
Edge* edge = *e;
|
||||
ready_.erase(e);
|
||||
@ -294,8 +305,11 @@ bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) {
|
||||
|
||||
want_e->second = kWantNothing;
|
||||
--wanted_edges_;
|
||||
if (!(*oe)->is_phony())
|
||||
if (!(*oe)->is_phony()) {
|
||||
--command_edges_;
|
||||
if (builder_)
|
||||
builder_->status_->EdgeRemovedFromPlan(*oe);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -450,7 +464,7 @@ void Plan::Dump() const {
|
||||
struct RealCommandRunner : public CommandRunner {
|
||||
explicit RealCommandRunner(const BuildConfig& config) : config_(config) {}
|
||||
virtual ~RealCommandRunner() {}
|
||||
virtual bool CanRunMore() const;
|
||||
virtual size_t CanRunMore() const;
|
||||
virtual bool StartCommand(Edge* edge);
|
||||
virtual bool WaitForCommand(Result* result);
|
||||
virtual vector<Edge*> GetActiveEdges();
|
||||
@ -473,12 +487,26 @@ void RealCommandRunner::Abort() {
|
||||
subprocs_.Clear();
|
||||
}
|
||||
|
||||
bool RealCommandRunner::CanRunMore() const {
|
||||
size_t RealCommandRunner::CanRunMore() const {
|
||||
size_t subproc_number =
|
||||
subprocs_.running_.size() + subprocs_.finished_.size();
|
||||
return (int)subproc_number < config_.parallelism
|
||||
&& ((subprocs_.running_.empty() || config_.max_load_average <= 0.0f)
|
||||
|| GetLoadAverage() < config_.max_load_average);
|
||||
|
||||
int64_t capacity = config_.parallelism - subproc_number;
|
||||
|
||||
if (config_.max_load_average > 0.0f) {
|
||||
int load_capacity = config_.max_load_average - GetLoadAverage();
|
||||
if (load_capacity < capacity)
|
||||
capacity = load_capacity;
|
||||
}
|
||||
|
||||
if (capacity < 0)
|
||||
capacity = 0;
|
||||
|
||||
if (capacity == 0 && subprocs_.running_.empty())
|
||||
// Ensure that we make progress.
|
||||
capacity = 1;
|
||||
|
||||
return capacity;
|
||||
}
|
||||
|
||||
bool RealCommandRunner::StartCommand(Edge* edge) {
|
||||
@ -518,6 +546,10 @@ Builder::Builder(State* state, const BuildConfig& config,
|
||||
start_time_millis_(start_time_millis), disk_interface_(disk_interface),
|
||||
scan_(state, build_log, deps_log, disk_interface,
|
||||
&config_.depfile_parser_options) {
|
||||
lock_file_path_ = ".ninja_lock";
|
||||
string build_dir = state_->bindings_.LookupVariable("builddir");
|
||||
if (!build_dir.empty())
|
||||
lock_file_path_ = build_dir + "/" + lock_file_path_;
|
||||
}
|
||||
|
||||
Builder::~Builder() {
|
||||
@ -552,6 +584,10 @@ void Builder::Cleanup() {
|
||||
disk_interface_->RemoveFile(depfile);
|
||||
}
|
||||
}
|
||||
|
||||
string err;
|
||||
if (disk_interface_->Stat(lock_file_path_, &err) > 0)
|
||||
disk_interface_->RemoveFile(lock_file_path_);
|
||||
}
|
||||
|
||||
Node* Builder::AddTarget(const string& name, string* err) {
|
||||
@ -592,108 +628,6 @@ bool Builder::AddTarget(Node* target, string* err) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Builder::AlreadyUpToDate() const {
|
||||
return !plan_.more_to_do();
|
||||
}
|
||||
|
||||
bool Builder::Build(string* err) {
|
||||
assert(!AlreadyUpToDate());
|
||||
|
||||
status_->PlanHasTotalEdges(plan_.command_edge_count());
|
||||
int pending_commands = 0;
|
||||
int failures_allowed = config_.failures_allowed;
|
||||
|
||||
// Set up the command runner if we haven't done so already.
|
||||
if (!command_runner_.get()) {
|
||||
if (config_.dry_run)
|
||||
command_runner_.reset(new DryRunCommandRunner);
|
||||
else
|
||||
command_runner_.reset(new RealCommandRunner(config_));
|
||||
}
|
||||
|
||||
// We are about to start the build process.
|
||||
status_->BuildStarted();
|
||||
|
||||
// This main loop runs the entire build process.
|
||||
// It is structured like this:
|
||||
// First, we attempt to start as many commands as allowed by the
|
||||
// command runner.
|
||||
// Second, we attempt to wait for / reap the next finished command.
|
||||
while (plan_.more_to_do()) {
|
||||
// See if we can start any more commands.
|
||||
if (failures_allowed && command_runner_->CanRunMore()) {
|
||||
if (Edge* edge = plan_.FindWork()) {
|
||||
if (edge->GetBindingBool("generator")) {
|
||||
scan_.build_log()->Close();
|
||||
}
|
||||
|
||||
if (!StartEdge(edge, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (edge->is_phony()) {
|
||||
if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
++pending_commands;
|
||||
}
|
||||
|
||||
// We made some progress; go back to the main loop.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// See if we can reap any finished commands.
|
||||
if (pending_commands) {
|
||||
CommandRunner::Result result;
|
||||
if (!command_runner_->WaitForCommand(&result) ||
|
||||
result.status == ExitInterrupted) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
*err = "interrupted by user";
|
||||
return false;
|
||||
}
|
||||
|
||||
--pending_commands;
|
||||
if (!FinishCommand(&result, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!result.success()) {
|
||||
if (failures_allowed)
|
||||
failures_allowed--;
|
||||
}
|
||||
|
||||
// We made some progress; start the main loop over.
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we get here, we cannot make any more progress.
|
||||
status_->BuildFinished();
|
||||
if (failures_allowed == 0) {
|
||||
if (config_.failures_allowed > 1)
|
||||
*err = "subcommands failed";
|
||||
else
|
||||
*err = "subcommand failed";
|
||||
} else if (failures_allowed < config_.failures_allowed)
|
||||
*err = "cannot make progress due to previous errors";
|
||||
else
|
||||
*err = "stuck [this is a bug]";
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
status_->BuildFinished();
|
||||
return true;
|
||||
}
|
||||
|
||||
static std::string &Trim(std::string &s)
|
||||
{
|
||||
if (s.empty()) {
|
||||
@ -769,6 +703,120 @@ std::string Builder::GetContent(Edge* edge) {
|
||||
}
|
||||
}
|
||||
|
||||
bool Builder::AlreadyUpToDate() const {
|
||||
return !plan_.more_to_do();
|
||||
}
|
||||
|
||||
bool Builder::Build(string* err) {
|
||||
assert(!AlreadyUpToDate());
|
||||
|
||||
int pending_commands = 0;
|
||||
int failures_allowed = config_.failures_allowed;
|
||||
|
||||
// Set up the command runner if we haven't done so already.
|
||||
if (!command_runner_.get()) {
|
||||
if (config_.dry_run)
|
||||
command_runner_.reset(new DryRunCommandRunner);
|
||||
else
|
||||
command_runner_.reset(new RealCommandRunner(config_));
|
||||
}
|
||||
|
||||
// We are about to start the build process.
|
||||
status_->BuildStarted();
|
||||
|
||||
// This main loop runs the entire build process.
|
||||
// It is structured like this:
|
||||
// First, we attempt to start as many commands as allowed by the
|
||||
// command runner.
|
||||
// Second, we attempt to wait for / reap the next finished command.
|
||||
while (plan_.more_to_do()) {
|
||||
// See if we can start any more commands.
|
||||
if (failures_allowed) {
|
||||
size_t capacity = command_runner_->CanRunMore();
|
||||
while (capacity > 0) {
|
||||
Edge* edge = plan_.FindWork();
|
||||
if (!edge)
|
||||
break;
|
||||
|
||||
if (edge->GetBindingBool("generator")) {
|
||||
scan_.build_log()->Close();
|
||||
}
|
||||
|
||||
if (!StartEdge(edge, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (edge->is_phony()) {
|
||||
if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
++pending_commands;
|
||||
|
||||
--capacity;
|
||||
|
||||
// Re-evaluate capacity.
|
||||
size_t current_capacity = command_runner_->CanRunMore();
|
||||
if (current_capacity < capacity)
|
||||
capacity = current_capacity;
|
||||
}
|
||||
}
|
||||
|
||||
// We are finished with all work items and have no pending
|
||||
// commands. Therefore, break out of the main loop.
|
||||
if (pending_commands == 0 && !plan_.more_to_do()) break;
|
||||
}
|
||||
|
||||
// See if we can reap any finished commands.
|
||||
if (pending_commands) {
|
||||
CommandRunner::Result result;
|
||||
if (!command_runner_->WaitForCommand(&result) ||
|
||||
result.status == ExitInterrupted) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
*err = "interrupted by user";
|
||||
return false;
|
||||
}
|
||||
|
||||
--pending_commands;
|
||||
if (!FinishCommand(&result, err)) {
|
||||
Cleanup();
|
||||
status_->BuildFinished();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!result.success()) {
|
||||
if (failures_allowed)
|
||||
failures_allowed--;
|
||||
}
|
||||
|
||||
// We made some progress; start the main loop over.
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we get here, we cannot make any more progress.
|
||||
status_->BuildFinished();
|
||||
if (failures_allowed == 0) {
|
||||
if (config_.failures_allowed > 1)
|
||||
*err = "subcommands failed";
|
||||
else
|
||||
*err = "subcommand failed";
|
||||
} else if (failures_allowed < config_.failures_allowed)
|
||||
*err = "cannot make progress due to previous errors";
|
||||
else
|
||||
*err = "stuck [this is a bug]";
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
status_->BuildFinished();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Builder::StartEdge(Edge* edge, string* err) {
|
||||
METRIC_RECORD("StartEdge");
|
||||
if (edge->is_phony())
|
||||
@ -779,19 +827,30 @@ bool Builder::StartEdge(Edge* edge, string* err) {
|
||||
|
||||
status_->BuildEdgeStarted(edge, start_time_millis);
|
||||
|
||||
// Create directories necessary for outputs.
|
||||
TimeStamp build_start = -1;
|
||||
|
||||
// Create directories necessary for outputs and remember the current
|
||||
// filesystem mtime to record later
|
||||
// XXX: this will block; do we care?
|
||||
for (vector<Node*>::iterator o = edge->outputs_.begin();
|
||||
o != edge->outputs_.end(); ++o) {
|
||||
if (!disk_interface_->MakeDirs((*o)->path()))
|
||||
return false;
|
||||
if (build_start == -1) {
|
||||
disk_interface_->WriteFile(lock_file_path_, "");
|
||||
build_start = disk_interface_->Stat(lock_file_path_, err);
|
||||
if (build_start == -1)
|
||||
build_start = 0;
|
||||
}
|
||||
}
|
||||
|
||||
edge->command_start_time_ = build_start;
|
||||
|
||||
// Create response file, if needed
|
||||
// XXX: this may also block; do we care?
|
||||
string rspfile = edge->GetUnescapedRspfile();
|
||||
if (!rspfile.empty()) {
|
||||
string content = GetContent(edge);
|
||||
string content = GetContent(edge);;
|
||||
if (!disk_interface_->WriteFile(rspfile, content))
|
||||
return false;
|
||||
}
|
||||
@ -836,8 +895,8 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
|
||||
end_time_millis = GetTimeMillis() - start_time_millis_;
|
||||
running_edges_.erase(it);
|
||||
|
||||
status_->BuildEdgeFinished(edge, end_time_millis, result->success(),
|
||||
result->output);
|
||||
status_->BuildEdgeFinished(edge, start_time_millis, end_time_millis,
|
||||
result->success(), result->output);
|
||||
|
||||
// The rest of this function only applies to successful commands.
|
||||
if (!result->success()) {
|
||||
@ -845,55 +904,38 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
|
||||
}
|
||||
|
||||
// Restat the edge outputs
|
||||
TimeStamp output_mtime = 0;
|
||||
bool restat = edge->GetBindingBool("restat");
|
||||
TimeStamp record_mtime = 0;
|
||||
if (!config_.dry_run) {
|
||||
const bool restat = edge->GetBindingBool("restat");
|
||||
const bool generator = edge->GetBindingBool("generator");
|
||||
bool node_cleaned = false;
|
||||
record_mtime = edge->command_start_time_;
|
||||
|
||||
for (vector<Node*>::iterator o = edge->outputs_.begin();
|
||||
o != edge->outputs_.end(); ++o) {
|
||||
TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err);
|
||||
if (new_mtime == -1)
|
||||
return false;
|
||||
if (new_mtime > output_mtime)
|
||||
output_mtime = new_mtime;
|
||||
if ((*o)->mtime() == new_mtime && restat) {
|
||||
// The rule command did not change the output. Propagate the clean
|
||||
// state through the build graph.
|
||||
// Note that this also applies to nonexistent outputs (mtime == 0).
|
||||
if (!plan_.CleanNode(&scan_, *o, err))
|
||||
// restat and generator rules must restat the outputs after the build
|
||||
// has finished. if record_mtime == 0, then there was an error while
|
||||
// attempting to touch/stat the temp file when the edge started and
|
||||
// we should fall back to recording the outputs' current mtime in the
|
||||
// log.
|
||||
if (record_mtime == 0 || restat || generator) {
|
||||
for (vector<Node*>::iterator o = edge->outputs_.begin();
|
||||
o != edge->outputs_.end(); ++o) {
|
||||
TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err);
|
||||
if (new_mtime == -1)
|
||||
return false;
|
||||
node_cleaned = true;
|
||||
if (new_mtime > record_mtime)
|
||||
record_mtime = new_mtime;
|
||||
if ((*o)->mtime() == new_mtime && restat) {
|
||||
// The rule command did not change the output. Propagate the clean
|
||||
// state through the build graph.
|
||||
// Note that this also applies to nonexistent outputs (mtime == 0).
|
||||
if (!plan_.CleanNode(&scan_, *o, err))
|
||||
return false;
|
||||
node_cleaned = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (node_cleaned) {
|
||||
TimeStamp restat_mtime = 0;
|
||||
// If any output was cleaned, find the most recent mtime of any
|
||||
// (existing) non-order-only input or the depfile.
|
||||
for (vector<Node*>::iterator i = edge->inputs_.begin();
|
||||
i != edge->inputs_.end() - edge->order_only_deps_; ++i) {
|
||||
TimeStamp input_mtime = disk_interface_->Stat((*i)->path(), err);
|
||||
if (input_mtime == -1)
|
||||
return false;
|
||||
if (input_mtime > restat_mtime)
|
||||
restat_mtime = input_mtime;
|
||||
}
|
||||
|
||||
string depfile = edge->GetUnescapedDepfile();
|
||||
if (restat_mtime != 0 && deps_type.empty() && !depfile.empty()) {
|
||||
TimeStamp depfile_mtime = disk_interface_->Stat(depfile, err);
|
||||
if (depfile_mtime == -1)
|
||||
return false;
|
||||
if (depfile_mtime > restat_mtime)
|
||||
restat_mtime = depfile_mtime;
|
||||
}
|
||||
|
||||
// The total number of edges in the plan may have changed as a result
|
||||
// of a restat.
|
||||
status_->PlanHasTotalEdges(plan_.command_edge_count());
|
||||
|
||||
output_mtime = restat_mtime;
|
||||
record_mtime = edge->command_start_time_;
|
||||
}
|
||||
}
|
||||
|
||||
@ -907,7 +949,7 @@ bool Builder::FinishCommand(CommandRunner::Result* result, string* err) {
|
||||
|
||||
if (scan_.build_log()) {
|
||||
if (!scan_.build_log()->RecordCommand(edge, start_time_millis,
|
||||
end_time_millis, output_mtime)) {
|
||||
end_time_millis, record_mtime)) {
|
||||
*err = string("Error writing to build log: ") + strerror(errno);
|
||||
return false;
|
||||
}
|
||||
@ -1007,8 +1049,5 @@ bool Builder::LoadDyndeps(Node* node, string* err) {
|
||||
if (!plan_.DyndepsLoaded(&scan_, node, ddf, err))
|
||||
return false;
|
||||
|
||||
// New command edges may have been added to the plan.
|
||||
status_->PlanHasTotalEdges(plan_.command_edge_count());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
17
src/build.h
Normal file → Executable file
17
src/build.h
Normal file → Executable file
@ -18,12 +18,13 @@
|
||||
#include <cstdio>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <queue>
|
||||
|
||||
#include "depfile_parser.h"
|
||||
#include "graph.h" // XXX needed for DependencyScan; should rearrange.
|
||||
#include "graph.h"
|
||||
#include "exit_status.h"
|
||||
#include "util.h" // int64_t
|
||||
|
||||
@ -80,6 +81,7 @@ struct Plan {
|
||||
/// by information loaded from a dyndep file.
|
||||
bool DyndepsLoaded(DependencyScan* scan, const Node* node,
|
||||
const DyndepFile& ddf, std::string* err);
|
||||
|
||||
private:
|
||||
bool RefreshDyndepDependents(DependencyScan* scan, const Node* node, std::string* err);
|
||||
void UnmarkDependents(const Node* node, std::set<Node*>* dependents);
|
||||
@ -91,7 +93,7 @@ private:
|
||||
/// loads dynamic dependencies from the node's path.
|
||||
/// Returns 'false' if loading dyndep info fails and 'true' otherwise.
|
||||
bool NodeFinished(Node* node, std::string* err);
|
||||
|
||||
|
||||
/// Enumerate possible steps we want for an edge.
|
||||
enum Want
|
||||
{
|
||||
@ -135,7 +137,7 @@ private:
|
||||
/// RealCommandRunner is an implementation that actually runs commands.
|
||||
struct CommandRunner {
|
||||
virtual ~CommandRunner() {}
|
||||
virtual bool CanRunMore() const = 0;
|
||||
virtual size_t CanRunMore() const = 0;
|
||||
virtual bool StartCommand(Edge* edge) = 0;
|
||||
|
||||
/// The result of waiting for a command.
|
||||
@ -197,7 +199,7 @@ struct Builder {
|
||||
/// Run the build. Returns false on error.
|
||||
/// It is an error to call this function when AlreadyUpToDate() is true.
|
||||
bool Build(std::string* err);
|
||||
|
||||
|
||||
bool StartEdge(Edge* edge, std::string* err);
|
||||
|
||||
std::string GetContent(Edge* edge);
|
||||
@ -217,11 +219,7 @@ struct Builder {
|
||||
State* state_;
|
||||
const BuildConfig& config_;
|
||||
Plan plan_;
|
||||
#if __cplusplus < 201703L
|
||||
std::auto_ptr<CommandRunner> command_runner_;
|
||||
#else
|
||||
std::unique_ptr<CommandRunner> command_runner_; // auto_ptr was removed in C++17.
|
||||
#endif
|
||||
std::unique_ptr<CommandRunner> command_runner_;
|
||||
Status* status_;
|
||||
|
||||
private:
|
||||
@ -236,6 +234,7 @@ struct Builder {
|
||||
/// Time the build started.
|
||||
int64_t start_time_millis_;
|
||||
|
||||
std::string lock_file_path_;
|
||||
DiskInterface* disk_interface_;
|
||||
DependencyScan scan_;
|
||||
|
||||
|
40
src/build_log.cc
Normal file → Executable file
40
src/build_log.cc
Normal file → Executable file
@ -53,8 +53,8 @@ using namespace std;
|
||||
namespace {
|
||||
|
||||
const char kFileSignature[] = "# ninja log v%d\n";
|
||||
const int kOldestSupportedVersion = 4;
|
||||
const int kCurrentVersion = 5;
|
||||
const int kOldestSupportedVersion = 6;
|
||||
const int kCurrentVersion = 6;
|
||||
|
||||
// 64bit MurmurHash2, by Austin Appleby
|
||||
#if defined(_MSC_VER)
|
||||
@ -116,9 +116,9 @@ BuildLog::LogEntry::LogEntry(const string& output)
|
||||
: output(output) {}
|
||||
|
||||
BuildLog::LogEntry::LogEntry(const string& output, uint64_t command_hash,
|
||||
int start_time, int end_time, TimeStamp restat_mtime)
|
||||
int start_time, int end_time, TimeStamp mtime)
|
||||
: output(output), command_hash(command_hash),
|
||||
start_time(start_time), end_time(end_time), mtime(restat_mtime)
|
||||
start_time(start_time), end_time(end_time), mtime(mtime)
|
||||
{}
|
||||
|
||||
BuildLog::BuildLog()
|
||||
@ -279,14 +279,21 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
|
||||
if (!log_version) {
|
||||
sscanf(line_start, kFileSignature, &log_version);
|
||||
|
||||
bool invalid_log_version = false;
|
||||
if (log_version < kOldestSupportedVersion) {
|
||||
*err = ("build log version invalid, perhaps due to being too old; "
|
||||
"starting over");
|
||||
invalid_log_version = true;
|
||||
*err = "build log version is too old; starting over";
|
||||
|
||||
} else if (log_version > kCurrentVersion) {
|
||||
invalid_log_version = true;
|
||||
*err = "build log version is too new; starting over";
|
||||
}
|
||||
if (invalid_log_version) {
|
||||
fclose(file);
|
||||
unlink(path.c_str());
|
||||
// Don't report this as a failure. An empty build log will cause
|
||||
// Don't report this as a failure. A missing build log will cause
|
||||
// us to rebuild the outputs anyway.
|
||||
return LOAD_SUCCESS;
|
||||
return LOAD_NOT_FOUND;
|
||||
}
|
||||
}
|
||||
|
||||
@ -303,7 +310,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
|
||||
*end = 0;
|
||||
|
||||
int start_time = 0, end_time = 0;
|
||||
TimeStamp restat_mtime = 0;
|
||||
TimeStamp mtime = 0;
|
||||
|
||||
start_time = atoi(start);
|
||||
start = end + 1;
|
||||
@ -319,7 +326,7 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
|
||||
if (!end)
|
||||
continue;
|
||||
*end = 0;
|
||||
restat_mtime = strtoll(start, NULL, 10);
|
||||
mtime = strtoll(start, NULL, 10);
|
||||
start = end + 1;
|
||||
|
||||
end = (char*)memchr(start, kFieldSeparator, line_end - start);
|
||||
@ -343,15 +350,10 @@ LoadStatus BuildLog::Load(const string& path, string* err) {
|
||||
|
||||
entry->start_time = start_time;
|
||||
entry->end_time = end_time;
|
||||
entry->mtime = restat_mtime;
|
||||
if (log_version >= 5) {
|
||||
char c = *end; *end = '\0';
|
||||
entry->command_hash = (uint64_t)strtoull(start, NULL, 16);
|
||||
*end = c;
|
||||
} else {
|
||||
entry->command_hash = LogEntry::HashCommand(StringPiece(start,
|
||||
end - start));
|
||||
}
|
||||
entry->mtime = mtime;
|
||||
char c = *end; *end = '\0';
|
||||
entry->command_hash = (uint64_t)strtoull(start, NULL, 16);
|
||||
*end = c;
|
||||
}
|
||||
fclose(file);
|
||||
|
||||
|
2
src/build_log.h
Normal file → Executable file
2
src/build_log.h
Normal file → Executable file
@ -73,7 +73,7 @@ struct BuildLog {
|
||||
|
||||
explicit LogEntry(const std::string& output);
|
||||
LogEntry(const std::string& output, uint64_t command_hash,
|
||||
int start_time, int end_time, TimeStamp restat_mtime);
|
||||
int start_time, int end_time, TimeStamp mtime);
|
||||
};
|
||||
|
||||
/// Lookup a previously-run command by its output path.
|
||||
|
0
src/build_log_perftest.cc
Normal file → Executable file
0
src/build_log_perftest.cc
Normal file → Executable file
38
src/build_log_test.cc
Normal file → Executable file
38
src/build_log_test.cc
Normal file → Executable file
@ -104,9 +104,11 @@ TEST_F(BuildLogTest, FirstWriteAddsSignature) {
|
||||
|
||||
TEST_F(BuildLogTest, DoubleEntry) {
|
||||
FILE* f = fopen(kTestFilename, "wb");
|
||||
fprintf(f, "# ninja log v4\n");
|
||||
fprintf(f, "0\t1\t2\tout\tcommand abc\n");
|
||||
fprintf(f, "3\t4\t5\tout\tcommand def\n");
|
||||
fprintf(f, "# ninja log v6\n");
|
||||
fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command abc"));
|
||||
fprintf(f, "0\t1\t2\tout\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command def"));
|
||||
fclose(f);
|
||||
|
||||
string err;
|
||||
@ -133,9 +135,13 @@ TEST_F(BuildLogTest, Truncate) {
|
||||
log1.RecordCommand(state_.edges_[1], 20, 25);
|
||||
log1.Close();
|
||||
}
|
||||
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 statbuf;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &statbuf));
|
||||
#else
|
||||
struct stat statbuf;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &statbuf));
|
||||
#endif
|
||||
ASSERT_GT(statbuf.st_size, 0);
|
||||
|
||||
// For all possible truncations of the input file, assert that we don't
|
||||
@ -169,10 +175,11 @@ TEST_F(BuildLogTest, ObsoleteOldVersion) {
|
||||
ASSERT_NE(err.find("version"), string::npos);
|
||||
}
|
||||
|
||||
TEST_F(BuildLogTest, SpacesInOutputV4) {
|
||||
TEST_F(BuildLogTest, SpacesInOutput) {
|
||||
FILE* f = fopen(kTestFilename, "wb");
|
||||
fprintf(f, "# ninja log v4\n");
|
||||
fprintf(f, "123\t456\t456\tout with space\tcommand\n");
|
||||
fprintf(f, "# ninja log v6\n");
|
||||
fprintf(f, "123\t456\t456\tout with space\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command"));
|
||||
fclose(f);
|
||||
|
||||
string err;
|
||||
@ -193,10 +200,12 @@ TEST_F(BuildLogTest, DuplicateVersionHeader) {
|
||||
// build log on Windows. This shouldn't crash, and the second version header
|
||||
// should be ignored.
|
||||
FILE* f = fopen(kTestFilename, "wb");
|
||||
fprintf(f, "# ninja log v4\n");
|
||||
fprintf(f, "123\t456\t456\tout\tcommand\n");
|
||||
fprintf(f, "# ninja log v4\n");
|
||||
fprintf(f, "456\t789\t789\tout2\tcommand2\n");
|
||||
fprintf(f, "# ninja log v6\n");
|
||||
fprintf(f, "123\t456\t456\tout\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command"));
|
||||
fprintf(f, "# ninja log v6\n");
|
||||
fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command2"));
|
||||
fclose(f);
|
||||
|
||||
string err;
|
||||
@ -243,7 +252,7 @@ struct TestDiskInterface : public DiskInterface {
|
||||
|
||||
TEST_F(BuildLogTest, Restat) {
|
||||
FILE* f = fopen(kTestFilename, "wb");
|
||||
fprintf(f, "# ninja log v4\n"
|
||||
fprintf(f, "# ninja log v6\n"
|
||||
"1\t2\t3\tout\tcommand\n");
|
||||
fclose(f);
|
||||
std::string err;
|
||||
@ -271,12 +280,13 @@ TEST_F(BuildLogTest, VeryLongInputLine) {
|
||||
// Ninja's build log buffer is currently 256kB. Lines longer than that are
|
||||
// silently ignored, but don't affect parsing of other lines.
|
||||
FILE* f = fopen(kTestFilename, "wb");
|
||||
fprintf(f, "# ninja log v4\n");
|
||||
fprintf(f, "# ninja log v6\n");
|
||||
fprintf(f, "123\t456\t456\tout\tcommand start");
|
||||
for (size_t i = 0; i < (512 << 10) / strlen(" more_command"); ++i)
|
||||
fputs(" more_command", f);
|
||||
fprintf(f, "\n");
|
||||
fprintf(f, "456\t789\t789\tout2\tcommand2\n");
|
||||
fprintf(f, "456\t789\t789\tout2\t%" PRIx64 "\n",
|
||||
BuildLog::LogEntry::HashCommand("command2"));
|
||||
fclose(f);
|
||||
|
||||
string err;
|
||||
|
480
src/build_test.cc
Normal file → Executable file
480
src/build_test.cc
Normal file → Executable file
@ -15,6 +15,8 @@
|
||||
#include "build.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <climits>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "build_log.h"
|
||||
#include "deps_log.h"
|
||||
@ -63,6 +65,7 @@ TEST_F(PlanTest, Basic) {
|
||||
EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err));
|
||||
ASSERT_EQ("", err);
|
||||
ASSERT_TRUE(plan_.more_to_do());
|
||||
PrepareForTarget("out");
|
||||
|
||||
Edge* edge = plan_.FindWork();
|
||||
ASSERT_TRUE(edge);
|
||||
@ -95,7 +98,6 @@ TEST_F(PlanTest, DoubleOutputDirect) {
|
||||
GetNode("mid1")->MarkDirty();
|
||||
GetNode("mid2")->MarkDirty();
|
||||
GetNode("out")->MarkDirty();
|
||||
|
||||
string err;
|
||||
EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err));
|
||||
ASSERT_EQ("", err);
|
||||
@ -104,6 +106,7 @@ TEST_F(PlanTest, DoubleOutputDirect) {
|
||||
Edge* edge;
|
||||
edge = plan_.FindWork();
|
||||
ASSERT_TRUE(edge); // cat in
|
||||
string err;
|
||||
plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err);
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
@ -169,7 +172,6 @@ TEST_F(PlanTest, DoubleDependent) {
|
||||
GetNode("a1")->MarkDirty();
|
||||
GetNode("a2")->MarkDirty();
|
||||
GetNode("out")->MarkDirty();
|
||||
|
||||
string err;
|
||||
EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err));
|
||||
ASSERT_EQ("", err);
|
||||
@ -284,7 +286,6 @@ TEST_F(PlanTest, PoolsWithDepthTwo) {
|
||||
GetNode("outb" + string(1, '1' + static_cast<char>(i)))->MarkDirty();
|
||||
}
|
||||
GetNode("allTheThings")->MarkDirty();
|
||||
|
||||
string err;
|
||||
EXPECT_TRUE(plan_.AddTarget(GetNode("allTheThings"), &err));
|
||||
ASSERT_EQ("", err);
|
||||
@ -473,7 +474,7 @@ struct FakeCommandRunner : public CommandRunner {
|
||||
max_active_edges_(1), fs_(fs) {}
|
||||
|
||||
// CommandRunner impl
|
||||
virtual bool CanRunMore() const;
|
||||
virtual size_t CanRunMore() const;
|
||||
virtual bool StartCommand(Edge* edge);
|
||||
virtual bool WaitForCommand(Result* result);
|
||||
virtual vector<Edge*> GetActiveEdges();
|
||||
@ -574,8 +575,11 @@ void BuildTest::RebuildTarget(const string& target, const char* manifest,
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
bool FakeCommandRunner::CanRunMore() const {
|
||||
return active_edges_.size() < max_active_edges_;
|
||||
size_t FakeCommandRunner::CanRunMore() const {
|
||||
if (active_edges_.size() < max_active_edges_)
|
||||
return SIZE_MAX;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool FakeCommandRunner::StartCommand(Edge* edge) {
|
||||
@ -611,6 +615,7 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
|
||||
fs_->WriteFile(edge->outputs_[0]->path(), content);
|
||||
} else if (edge->rule().name() == "touch-implicit-dep-out") {
|
||||
string dep = edge->GetBinding("test_dependency");
|
||||
fs_->Tick();
|
||||
fs_->Create(dep, "");
|
||||
fs_->Tick();
|
||||
for (vector<Node*>::iterator out = edge->outputs_.begin();
|
||||
@ -627,7 +632,12 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
|
||||
fs_->Create(dep, "");
|
||||
} else if (edge->rule().name() == "generate-depfile") {
|
||||
string dep = edge->GetBinding("test_dependency");
|
||||
bool touch_dep = edge->GetBindingBool("touch_dependency");
|
||||
string depfile = edge->GetUnescapedDepfile();
|
||||
if (touch_dep) {
|
||||
fs_->Tick();
|
||||
fs_->Create(dep, "");
|
||||
}
|
||||
string contents;
|
||||
for (vector<Node*>::iterator out = edge->outputs_.begin();
|
||||
out != edge->outputs_.end(); ++out) {
|
||||
@ -635,6 +645,20 @@ bool FakeCommandRunner::StartCommand(Edge* edge) {
|
||||
fs_->Create((*out)->path(), "");
|
||||
}
|
||||
fs_->Create(depfile, contents);
|
||||
} else if (edge->rule().name() == "long-cc") {
|
||||
string dep = edge->GetBinding("test_dependency");
|
||||
string depfile = edge->GetUnescapedDepfile();
|
||||
string contents;
|
||||
for (vector<Node*>::iterator out = edge->outputs_.begin();
|
||||
out != edge->outputs_.end(); ++out) {
|
||||
fs_->Tick();
|
||||
fs_->Tick();
|
||||
fs_->Tick();
|
||||
fs_->Create((*out)->path(), "");
|
||||
contents += (*out)->path() + ": " + dep + "\n";
|
||||
}
|
||||
if (!dep.empty() && !depfile.empty())
|
||||
fs_->Create(depfile, contents);
|
||||
} else {
|
||||
printf("unknown command\n");
|
||||
return false;
|
||||
@ -690,6 +714,18 @@ bool FakeCommandRunner::WaitForCommand(Result* result) {
|
||||
else
|
||||
result->status = ExitSuccess;
|
||||
|
||||
// This rule simulates an external process modifying files while the build command runs.
|
||||
// See TestInputMtimeRaceCondition and TestInputMtimeRaceConditionWithDepFile.
|
||||
// Note: only the first and third time the rule is run per test is the file modified, so
|
||||
// the test can verify that subsequent runs without the race have no work to do.
|
||||
if (edge->rule().name() == "long-cc") {
|
||||
string dep = edge->GetBinding("test_dependency");
|
||||
if (fs_->now_ == 4)
|
||||
fs_->files_[dep].mtime = 3;
|
||||
if (fs_->now_ == 10)
|
||||
fs_->files_[dep].mtime = 9;
|
||||
}
|
||||
|
||||
// Provide a way for test cases to verify when an edge finishes that
|
||||
// some other edge is still active. This is useful for test cases
|
||||
// covering behavior involving multiple active edges.
|
||||
@ -954,9 +990,19 @@ TEST_F(BuildTest, DepFileOK) {
|
||||
ASSERT_EQ(1u, fs_.files_read_.size());
|
||||
EXPECT_EQ("foo.o.d", fs_.files_read_[0]);
|
||||
|
||||
// Expect three new edges: one generating foo.o, and two more from
|
||||
// loading the depfile.
|
||||
ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size());
|
||||
// Expect one new edge generating foo.o. Loading the depfile should have
|
||||
// added nodes, but not phony edges to the graph.
|
||||
ASSERT_EQ(orig_edges + 1, (int)state_.edges_.size());
|
||||
|
||||
// Verify that nodes for blah.h and bar.h were added and that they
|
||||
// are marked as generated by a dep loader.
|
||||
ASSERT_FALSE(state_.LookupNode("foo.o")->generated_by_dep_loader());
|
||||
ASSERT_FALSE(state_.LookupNode("foo.c")->generated_by_dep_loader());
|
||||
ASSERT_TRUE(state_.LookupNode("blah.h"));
|
||||
ASSERT_TRUE(state_.LookupNode("blah.h")->generated_by_dep_loader());
|
||||
ASSERT_TRUE(state_.LookupNode("bar.h"));
|
||||
ASSERT_TRUE(state_.LookupNode("bar.h")->generated_by_dep_loader());
|
||||
|
||||
// Expect our edge to now have three inputs: foo.c and two headers.
|
||||
ASSERT_EQ(3u, edge->inputs_.size());
|
||||
|
||||
@ -1122,7 +1168,6 @@ TEST_F(BuildTest, DepFileCanonicalize) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
|
||||
"rule cc\n command = cc $in\n depfile = $out.d\n"
|
||||
"build gen/stuff\\things/foo.o: cc x\\y/z\\foo.c\n"));
|
||||
Edge* edge = state_.edges_.back();
|
||||
|
||||
fs_.Create("x/y/z/foo.c", "");
|
||||
GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing.
|
||||
@ -1135,10 +1180,10 @@ TEST_F(BuildTest, DepFileCanonicalize) {
|
||||
// The depfile path does not get Canonicalize as it seems unnecessary.
|
||||
EXPECT_EQ("gen/stuff\\things/foo.o.d", fs_.files_read_[0]);
|
||||
|
||||
// Expect three new edges: one generating foo.o, and two more from
|
||||
// loading the depfile.
|
||||
ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size());
|
||||
// Expect one new edge enerating foo.o.
|
||||
ASSERT_EQ(orig_edges + 1, (int)state_.edges_.size());
|
||||
// Expect our edge to now have three inputs: foo.c and two headers.
|
||||
Edge* edge = state_.edges_.back();
|
||||
ASSERT_EQ(3u, edge->inputs_.size());
|
||||
|
||||
// Expect the command line we generate to only use the original input, and
|
||||
@ -1471,7 +1516,7 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate) {
|
||||
TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
|
||||
"rule touch-implicit-dep-out\n"
|
||||
" command = touch $test_dependency ; sleep 1 ; touch $out\n"
|
||||
" command = sleep 1 ; touch $test_dependency ; sleep 1 ; touch $out\n"
|
||||
" generator = 1\n"
|
||||
"build out.imp: touch-implicit-dep-out | inimp inimp2\n"
|
||||
" test_dependency = inimp\n"));
|
||||
@ -1497,6 +1542,29 @@ TEST_F(BuildWithLogTest, ImplicitGeneratedOutOfDate2) {
|
||||
EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
|
||||
EXPECT_TRUE(builder_.AlreadyUpToDate());
|
||||
EXPECT_FALSE(GetNode("out.imp")->dirty());
|
||||
|
||||
command_runner_.commands_ran_.clear();
|
||||
state_.Reset();
|
||||
builder_.Cleanup();
|
||||
builder_.plan_.Reset();
|
||||
|
||||
fs_.Tick();
|
||||
fs_.Create("inimp", "");
|
||||
|
||||
EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
|
||||
EXPECT_FALSE(builder_.AlreadyUpToDate());
|
||||
|
||||
EXPECT_TRUE(builder_.Build(&err));
|
||||
EXPECT_TRUE(builder_.AlreadyUpToDate());
|
||||
|
||||
command_runner_.commands_ran_.clear();
|
||||
state_.Reset();
|
||||
builder_.Cleanup();
|
||||
builder_.plan_.Reset();
|
||||
|
||||
EXPECT_TRUE(builder_.AddTarget("out.imp", &err));
|
||||
EXPECT_TRUE(builder_.AlreadyUpToDate());
|
||||
EXPECT_FALSE(GetNode("out.imp")->dirty());
|
||||
}
|
||||
|
||||
TEST_F(BuildWithLogTest, NotInLogButOnDisk) {
|
||||
@ -1800,6 +1868,52 @@ TEST_F(BuildWithLogTest, RestatMissingInput) {
|
||||
ASSERT_EQ(restat_mtime, log_entry->mtime);
|
||||
}
|
||||
|
||||
TEST_F(BuildWithLogTest, RestatInputChangesDueToRule) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
|
||||
"rule generate-depfile\n"
|
||||
" command = sleep 1 ; touch $touch_dependency; touch $out ; echo \"$out: $test_dependency\" > $depfile\n"
|
||||
"build out1: generate-depfile || cat1\n"
|
||||
" test_dependency = in2\n"
|
||||
" touch_dependency = 1\n"
|
||||
" restat = 1\n"
|
||||
" depfile = out.d\n"));
|
||||
|
||||
// Perform the first build. out1 is a restat rule, so its recorded mtime in the build
|
||||
// log should be the time the command completes, not the time the command started. One
|
||||
// of out1's discovered dependencies will have a newer mtime than when out1 started
|
||||
// running, due to its command touching the dependency itself.
|
||||
string err;
|
||||
EXPECT_TRUE(builder_.AddTarget("out1", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder_.Build(&err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_EQ(2u, command_runner_.commands_ran_.size());
|
||||
EXPECT_EQ(2u, builder_.plan_.command_edge_count());
|
||||
BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out1");
|
||||
ASSERT_TRUE(NULL != log_entry);
|
||||
ASSERT_EQ(2u, log_entry->mtime);
|
||||
|
||||
command_runner_.commands_ran_.clear();
|
||||
state_.Reset();
|
||||
builder_.Cleanup();
|
||||
builder_.plan_.Reset();
|
||||
|
||||
fs_.Tick();
|
||||
fs_.Create("in1", "");
|
||||
|
||||
// Touching a dependency of an order-only dependency of out1 should not cause out1 to
|
||||
// rebuild. If out1 were not a restat rule, then it would rebuild here because its
|
||||
// recorded mtime would have been an earlier mtime than its most recent input's (in2)
|
||||
// mtime
|
||||
EXPECT_TRUE(builder_.AddTarget("out1", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(!state_.GetNode("out1", 0)->dirty());
|
||||
EXPECT_TRUE(builder_.Build(&err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
EXPECT_EQ(1u, builder_.plan_.command_edge_count());
|
||||
}
|
||||
|
||||
TEST_F(BuildWithLogTest, GeneratedPlainDepfileMtime) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
|
||||
"rule generate-depfile\n"
|
||||
@ -1904,10 +2018,11 @@ TEST_F(BuildTest, RspFileSuccess)
|
||||
EXPECT_TRUE(builder_.Build(&err));
|
||||
ASSERT_EQ(3u, command_runner_.commands_ran_.size());
|
||||
|
||||
// The RSP files were created
|
||||
ASSERT_EQ(files_created + 2, fs_.files_created_.size());
|
||||
// The RSP files and temp file to acquire output mtimes were created
|
||||
ASSERT_EQ(files_created + 3, fs_.files_created_.size());
|
||||
ASSERT_EQ(1u, fs_.files_created_.count("out 2.rsp"));
|
||||
ASSERT_EQ(1u, fs_.files_created_.count("out 3.rsp"));
|
||||
ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
|
||||
|
||||
// The RSP files were removed
|
||||
ASSERT_EQ(files_removed + 2, fs_.files_removed_.size());
|
||||
@ -1941,9 +2056,10 @@ TEST_F(BuildTest, RspFileFailure) {
|
||||
ASSERT_EQ("subcommand failed", err);
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
// The RSP file was created
|
||||
ASSERT_EQ(files_created + 1, fs_.files_created_.size());
|
||||
// The RSP file and temp file to acquire output mtimes were created
|
||||
ASSERT_EQ(files_created + 2, fs_.files_created_.size());
|
||||
ASSERT_EQ(1u, fs_.files_created_.count("out.rsp"));
|
||||
ASSERT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
|
||||
|
||||
// The RSP file was NOT removed
|
||||
ASSERT_EQ(files_removed, fs_.files_removed_.size());
|
||||
@ -2094,11 +2210,28 @@ TEST_F(BuildTest, DepsGccWithEmptyDepfileErrorsOut) {
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, StatusFormatElapsed) {
|
||||
TEST_F(BuildTest, StatusFormatElapsed_e) {
|
||||
status_.BuildStarted();
|
||||
// Before any task is done, the elapsed time must be zero.
|
||||
EXPECT_EQ("[%/e0.000]",
|
||||
status_.FormatProgressStatus("[%%/e%e]", 0));
|
||||
EXPECT_EQ("[%/e0.000]", status_.FormatProgressStatus("[%%/e%e]", 0));
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, StatusFormatElapsed_w) {
|
||||
status_.BuildStarted();
|
||||
// Before any task is done, the elapsed time must be zero.
|
||||
EXPECT_EQ("[%/e00:00]", status_.FormatProgressStatus("[%%/e%w]", 0));
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, StatusFormatETA) {
|
||||
status_.BuildStarted();
|
||||
// Before any task is done, the ETA time must be unknown.
|
||||
EXPECT_EQ("[%/E?]", status_.FormatProgressStatus("[%%/E%E]", 0));
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, StatusFormatTimeProgress) {
|
||||
status_.BuildStarted();
|
||||
// Before any task is done, the percentage of elapsed time must be zero.
|
||||
EXPECT_EQ("[%/p 0%]", status_.FormatProgressStatus("[%%/p%p]", 0));
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, StatusFormatReplacePlaceholder) {
|
||||
@ -2125,8 +2258,8 @@ TEST_F(BuildTest, FailedDepsParse) {
|
||||
}
|
||||
|
||||
struct BuildWithQueryDepsLogTest : public BuildTest {
|
||||
BuildWithQueryDepsLogTest() : BuildTest(&log_) {
|
||||
}
|
||||
BuildWithQueryDepsLogTest()
|
||||
: BuildTest(&log_), deps_log_file_("ninja_deps") {}
|
||||
|
||||
~BuildWithQueryDepsLogTest() {
|
||||
log_.Close();
|
||||
@ -2138,12 +2271,13 @@ struct BuildWithQueryDepsLogTest : public BuildTest {
|
||||
temp_dir_.CreateAndEnter("BuildWithQueryDepsLogTest");
|
||||
|
||||
std::string err;
|
||||
ASSERT_TRUE(log_.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(log_.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
}
|
||||
|
||||
ScopedTempDir temp_dir_;
|
||||
|
||||
ScopedFilePath deps_log_file_;
|
||||
DepsLog log_;
|
||||
};
|
||||
|
||||
@ -2337,7 +2471,8 @@ TEST_F(BuildWithQueryDepsLogTest, TwoOutputsDepFileGCCOnlySecondaryOutput) {
|
||||
/// builder_ it sets up, because we want pristine objects for
|
||||
/// each build.
|
||||
struct BuildWithDepsLogTest : public BuildTest {
|
||||
BuildWithDepsLogTest() {}
|
||||
BuildWithDepsLogTest()
|
||||
: build_log_file_("build_log"), deps_log_file_("ninja_deps") {}
|
||||
|
||||
virtual void SetUp() {
|
||||
BuildTest::SetUp();
|
||||
@ -2350,6 +2485,8 @@ struct BuildWithDepsLogTest : public BuildTest {
|
||||
}
|
||||
|
||||
ScopedTempDir temp_dir_;
|
||||
ScopedFilePath build_log_file_;
|
||||
ScopedFilePath deps_log_file_;
|
||||
|
||||
/// Shadow parent class builder_ so we don't accidentally use it.
|
||||
void* builder_;
|
||||
@ -2363,6 +2500,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
|
||||
"build out: cat in1\n"
|
||||
" deps = gcc\n"
|
||||
" depfile = in1.d\n";
|
||||
|
||||
{
|
||||
State state;
|
||||
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
|
||||
@ -2370,7 +2508,7 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
|
||||
|
||||
// Run the build once, everything should be ok.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2400,8 +2538,8 @@ TEST_F(BuildWithDepsLogTest, Straightforward) {
|
||||
|
||||
// Run the build again.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
@ -2441,7 +2579,7 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) {
|
||||
|
||||
// Run the build once, everything should be ok.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2470,8 +2608,8 @@ TEST_F(BuildWithDepsLogTest, ObsoleteDeps) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
@ -2522,6 +2660,210 @@ TEST_F(BuildWithDepsLogTest, DepsIgnoredInDryRun) {
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceCondition) {
|
||||
string err;
|
||||
const char* manifest =
|
||||
"rule long-cc\n"
|
||||
" command = long-cc\n"
|
||||
"build out: long-cc in1\n"
|
||||
" test_dependency = in1\n";
|
||||
|
||||
State state;
|
||||
ASSERT_NO_FATAL_FAILURE(AddCatRule(&state));
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
BuildLog build_log;
|
||||
ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err));
|
||||
ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
|
||||
BuildLog::LogEntry* log_entry = NULL;
|
||||
{
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
// Run the build, out gets built, dep file is created
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
// See that an entry in the logfile is created. the input_mtime is 1 since that was
|
||||
// the mtime of in1 when the command was started
|
||||
log_entry = build_log.LookupByOutput("out");
|
||||
ASSERT_TRUE(NULL != log_entry);
|
||||
ASSERT_EQ(1u, log_entry->mtime);
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
// Trigger the build again - "out" should rebuild despite having a newer mtime than
|
||||
// "in1", since "in1" was touched during the build of out (simulated by changing its
|
||||
// mtime in the the test builder's WaitForCommand() which runs before FinishCommand()
|
||||
command_runner_.commands_ran_.clear();
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
// Check that the logfile entry is still correct
|
||||
log_entry = build_log.LookupByOutput("out");
|
||||
ASSERT_TRUE(NULL != log_entry);
|
||||
ASSERT_TRUE(fs_.files_["in1"].mtime < log_entry->mtime);
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
// And a subsequent run should not have any work to do
|
||||
command_runner_.commands_ran_.clear();
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.AlreadyUpToDate());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(BuildWithDepsLogTest, TestInputMtimeRaceConditionWithDepFile) {
|
||||
string err;
|
||||
const char* manifest =
|
||||
"rule long-cc\n"
|
||||
" command = long-cc\n"
|
||||
"build out: long-cc\n"
|
||||
" deps = gcc\n"
|
||||
" depfile = out.d\n"
|
||||
" test_dependency = header.h\n";
|
||||
|
||||
fs_.Create("header.h", "");
|
||||
|
||||
State state;
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
BuildLog build_log;
|
||||
ASSERT_TRUE(build_log.Load(build_log_file_.path(), &err));
|
||||
ASSERT_TRUE(build_log.OpenForWrite(build_log_file_.path(), *this, &err));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
|
||||
{
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
|
||||
// Run the build, out gets built, dep file is created
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
// See that an entry in the logfile is created. the mtime is 1 due to the command
|
||||
// starting when the file system's mtime was 1.
|
||||
BuildLog::LogEntry* log_entry = build_log.LookupByOutput("out");
|
||||
ASSERT_TRUE(NULL != log_entry);
|
||||
ASSERT_EQ(1u, log_entry->mtime);
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
// Trigger the build again - "out" will rebuild since its newest input mtime (header.h)
|
||||
// is newer than the recorded mtime of out in the build log
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
// Trigger the build again - "out" won't rebuild since the file wasn't updated during
|
||||
// the previous build
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
ASSERT_TRUE(builder.AlreadyUpToDate());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
// touch the header to trigger a rebuild
|
||||
fs_.Create("header.h", "");
|
||||
ASSERT_EQ(fs_.now_, 7);
|
||||
|
||||
{
|
||||
// Rebuild. This time, long-cc will cause header.h to be updated while the build is
|
||||
// in progress
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
// Rebuild. Because header.h is now in the deplog for out, it should be detectable as
|
||||
// a change-while-in-progress and should cause a rebuild of out.
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.Build(&err));
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
|
||||
{
|
||||
// This time, the header.h file was not updated during the build, so the target should
|
||||
// not be considered dirty.
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
command_runner_.commands_ran_.clear();
|
||||
|
||||
state.Reset();
|
||||
EXPECT_TRUE(builder.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_TRUE(builder.AlreadyUpToDate());
|
||||
|
||||
builder.command_runner_.release();
|
||||
}
|
||||
}
|
||||
|
||||
/// Check that a restat rule generating a header cancels compilations correctly.
|
||||
TEST_F(BuildTest, RestatDepfileDependency) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state_,
|
||||
@ -2564,7 +2906,7 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) {
|
||||
|
||||
// Run the build once, everything should be ok.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2590,8 +2932,8 @@ TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) {
|
||||
|
||||
// Run the build again.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
@ -2623,7 +2965,7 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
|
||||
|
||||
// Run the build once, everything should be ok.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2643,8 +2985,8 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2656,9 +2998,9 @@ TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) {
|
||||
EXPECT_TRUE(builder.AddTarget("fo o.o", &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
// Expect three new edges: one generating fo o.o, and two more from
|
||||
// loading the depfile.
|
||||
ASSERT_EQ(3u, state.edges_.size());
|
||||
// Expect one new edge generating fo o.o, loading the depfile should
|
||||
// not generate new edges.
|
||||
ASSERT_EQ(1u, state.edges_.size());
|
||||
// Expect our edge to now have three inputs: foo.c and two headers.
|
||||
ASSERT_EQ(3u, edge->inputs_.size());
|
||||
|
||||
@ -2694,7 +3036,7 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
@ -2717,8 +3059,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
@ -2740,8 +3082,8 @@ TEST_F(BuildWithDepsLogTest, DiscoveredDepDuringBuildChanged) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, &build_log, &deps_log, &fs_, &status_, 0);
|
||||
@ -2769,7 +3111,7 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) {
|
||||
|
||||
// Run the build once, everything should be ok.
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -2791,23 +3133,21 @@ TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
builder.command_runner_.reset(&command_runner_);
|
||||
|
||||
Edge* edge = state.edges_.back();
|
||||
|
||||
state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing.
|
||||
EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
// Expect three new edges: one generating fo o.o, and two more from
|
||||
// loading the depfile.
|
||||
ASSERT_EQ(3u, state.edges_.size());
|
||||
// Expect one new edge generating fo o.o.
|
||||
ASSERT_EQ(1u, state.edges_.size());
|
||||
// Expect our edge to now have three inputs: foo.c and two headers.
|
||||
Edge* edge = state.edges_.back();
|
||||
ASSERT_EQ(3u, edge->inputs_.size());
|
||||
|
||||
// Expect the command line we generate to only use the original input.
|
||||
@ -2862,11 +3202,13 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
|
||||
fs_.Create("out.d", "out: header.h");
|
||||
fs_.Create("header.h", "");
|
||||
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(),
|
||||
deps_log_file_.c_str());
|
||||
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
|
||||
|
||||
// Sanity: this rebuild should be NOOP
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(),
|
||||
deps_log_file_.c_str());
|
||||
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
|
||||
|
||||
// Touch 'header.in', blank dependencies log (create a different one).
|
||||
@ -2875,12 +3217,14 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
|
||||
fs_.Tick();
|
||||
fs_.Create("header.in", "");
|
||||
|
||||
ScopedFilePath deps2_file_("ninja_deps2");
|
||||
|
||||
// (switch to a new blank deps_log "ninja_deps2")
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
|
||||
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
|
||||
|
||||
// Sanity: this build should be NOOP
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
|
||||
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
|
||||
|
||||
// Check that invalidating deps by target timestamp also works here
|
||||
@ -2888,11 +3232,11 @@ TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) {
|
||||
fs_.Tick();
|
||||
fs_.Create("header.in", "");
|
||||
fs_.Create("out", "");
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
|
||||
ASSERT_EQ(2u, command_runner_.commands_ran_.size());
|
||||
|
||||
// And this build should be NOOP again
|
||||
RebuildTarget("out", manifest, "build_log", "ninja_deps2");
|
||||
RebuildTarget("out", manifest, build_log_file_.c_str(), deps2_file_.c_str());
|
||||
ASSERT_EQ(0u, command_runner_.commands_ran_.size());
|
||||
}
|
||||
|
||||
@ -2909,7 +3253,10 @@ TEST_F(BuildTest, WrongOutputInDepfileCausesRebuild) {
|
||||
fs_.Create("header.h", "");
|
||||
fs_.Create("foo.o.d", "bar.o.d: header.h\n");
|
||||
|
||||
RebuildTarget("foo.o", manifest, "build_log", "ninja_deps");
|
||||
ScopedFilePath build_log("build_log");
|
||||
ScopedFilePath deps_file("ninja_deps");
|
||||
|
||||
RebuildTarget("foo.o", manifest, build_log.c_str(), deps_file.c_str());
|
||||
ASSERT_EQ(1u, command_runner_.commands_ran_.size());
|
||||
}
|
||||
|
||||
@ -3042,9 +3389,10 @@ TEST_F(BuildTest, DyndepBuild) {
|
||||
ASSERT_EQ(2u, fs_.files_read_.size());
|
||||
EXPECT_EQ("dd-in", fs_.files_read_[0]);
|
||||
EXPECT_EQ("dd", fs_.files_read_[1]);
|
||||
ASSERT_EQ(2u + files_created, fs_.files_created_.size());
|
||||
ASSERT_EQ(3u + files_created, fs_.files_created_.size());
|
||||
EXPECT_EQ(1u, fs_.files_created_.count("dd"));
|
||||
EXPECT_EQ(1u, fs_.files_created_.count("out"));
|
||||
EXPECT_EQ(1u, fs_.files_created_.count(".ninja_lock"));
|
||||
}
|
||||
|
||||
TEST_F(BuildTest, DyndepBuildSyntaxError) {
|
||||
@ -3355,8 +3703,8 @@ TEST_F(BuildTest, DyndepBuildDiscoverOutputAndDepfileInput) {
|
||||
EXPECT_TRUE(builder_.AddTarget("out", &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
// Loading the depfile gave tmp.imp a phony input edge.
|
||||
ASSERT_TRUE(GetNode("tmp.imp")->in_edge()->is_phony());
|
||||
// Loading the depfile did not give tmp.imp a phony input edge.
|
||||
ASSERT_FALSE(GetNode("tmp.imp")->in_edge());
|
||||
|
||||
EXPECT_TRUE(builder_.Build(&err));
|
||||
EXPECT_EQ("", err);
|
||||
@ -3865,7 +4213,7 @@ TEST_F(BuildWithDepsLogTest, ValidationThroughDepfile) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
@ -3900,8 +4248,8 @@ TEST_F(BuildWithDepsLogTest, ValidationThroughDepfile) {
|
||||
ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest));
|
||||
|
||||
DepsLog deps_log;
|
||||
ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err));
|
||||
ASSERT_TRUE(deps_log.Load(deps_log_file_.path(), &state, &err));
|
||||
ASSERT_TRUE(deps_log.OpenForWrite(deps_log_file_.path(), &err));
|
||||
ASSERT_EQ("", err);
|
||||
|
||||
Builder builder(&state, config_, NULL, &deps_log, &fs_, &status_, 0);
|
||||
|
0
src/canon_perftest.cc
Normal file → Executable file
0
src/canon_perftest.cc
Normal file → Executable file
4
src/clean.cc
Normal file → Executable file
4
src/clean.cc
Normal file → Executable file
@ -127,6 +127,7 @@ int Cleaner::CleanAll(bool generator) {
|
||||
int Cleaner::CleanDead(const BuildLog::Entries& entries) {
|
||||
Reset();
|
||||
PrintHeader();
|
||||
LoadDyndeps();
|
||||
for (BuildLog::Entries::const_iterator i = entries.begin(); i != entries.end(); ++i) {
|
||||
Node* n = state_->LookupNode(i->first);
|
||||
// Detecting stale outputs works as follows:
|
||||
@ -292,7 +293,8 @@ void Cleaner::LoadDyndeps() {
|
||||
// Load dyndep files that exist, before they are cleaned.
|
||||
for (vector<Edge*>::iterator e = state_->edges_.begin();
|
||||
e != state_->edges_.end(); ++e) {
|
||||
if (Node* dyndep = (*e)->dyndep_) {
|
||||
Node* dyndep;
|
||||
if ((dyndep = (*e)->dyndep_) && dyndep->dyndep_pending()) {
|
||||
// Capture and ignore errors loading the dyndep file.
|
||||
// We clean as much of the graph as we know.
|
||||
std::string err;
|
||||
|
0
src/clean.h
Normal file → Executable file
0
src/clean.h
Normal file → Executable file
0
src/clean_test.cc
Normal file → Executable file
0
src/clean_test.cc
Normal file → Executable file
0
src/clparser.cc
Normal file → Executable file
0
src/clparser.cc
Normal file → Executable file
0
src/clparser.h
Normal file → Executable file
0
src/clparser.h
Normal file → Executable file
0
src/clparser_perftest.cc
Normal file → Executable file
0
src/clparser_perftest.cc
Normal file → Executable file
0
src/clparser_test.cc
Normal file → Executable file
0
src/clparser_test.cc
Normal file → Executable file
0
src/debug_flags.cc
Normal file → Executable file
0
src/debug_flags.cc
Normal file → Executable file
0
src/debug_flags.h
Normal file → Executable file
0
src/debug_flags.h
Normal file → Executable file
4
src/depfile_parser.cc
Normal file → Executable file
4
src/depfile_parser.cc
Normal file → Executable file
@ -54,6 +54,7 @@ bool DepfileParser::Parse(string* content, string* err) {
|
||||
bool have_target = false;
|
||||
bool parsing_targets = true;
|
||||
bool poisoned_input = false;
|
||||
bool is_empty = true;
|
||||
while (in < end) {
|
||||
bool have_newline = false;
|
||||
// out: current output point (typically same as in, but can fall behind
|
||||
@ -335,6 +336,7 @@ yy32:
|
||||
}
|
||||
|
||||
if (len > 0) {
|
||||
is_empty = false;
|
||||
StringPiece piece = StringPiece(filename, len);
|
||||
// If we've seen this as an input before, skip it.
|
||||
std::vector<StringPiece>::iterator pos = std::find(ins_.begin(), ins_.end(), piece);
|
||||
@ -363,7 +365,7 @@ yy32:
|
||||
poisoned_input = false;
|
||||
}
|
||||
}
|
||||
if (!have_target) {
|
||||
if (!have_target && !is_empty) {
|
||||
*err = "expected ':' in depfile";
|
||||
return false;
|
||||
}
|
||||
|
0
src/depfile_parser.h
Normal file → Executable file
0
src/depfile_parser.h
Normal file → Executable file
4
src/depfile_parser.in.cc
Normal file → Executable file
4
src/depfile_parser.in.cc
Normal file → Executable file
@ -53,6 +53,7 @@ bool DepfileParser::Parse(string* content, string* err) {
|
||||
bool have_target = false;
|
||||
bool parsing_targets = true;
|
||||
bool poisoned_input = false;
|
||||
bool is_empty = true;
|
||||
while (in < end) {
|
||||
bool have_newline = false;
|
||||
// out: current output point (typically same as in, but can fall behind
|
||||
@ -171,6 +172,7 @@ bool DepfileParser::Parse(string* content, string* err) {
|
||||
}
|
||||
|
||||
if (len > 0) {
|
||||
is_empty = false;
|
||||
StringPiece piece = StringPiece(filename, len);
|
||||
// If we've seen this as an input before, skip it.
|
||||
std::vector<StringPiece>::iterator pos = std::find(ins_.begin(), ins_.end(), piece);
|
||||
@ -199,7 +201,7 @@ bool DepfileParser::Parse(string* content, string* err) {
|
||||
poisoned_input = false;
|
||||
}
|
||||
}
|
||||
if (!have_target) {
|
||||
if (!have_target && !is_empty) {
|
||||
*err = "expected ':' in depfile";
|
||||
return false;
|
||||
}
|
||||
|
0
src/depfile_parser_perftest.cc
Normal file → Executable file
0
src/depfile_parser_perftest.cc
Normal file → Executable file
21
src/depfile_parser_test.cc
Normal file → Executable file
21
src/depfile_parser_test.cc
Normal file → Executable file
@ -378,3 +378,24 @@ TEST_F(DepfileParserTest, BuggyMP) {
|
||||
"z:\n", &err));
|
||||
ASSERT_EQ("inputs may not also have inputs", err);
|
||||
}
|
||||
|
||||
TEST_F(DepfileParserTest, EmptyFile) {
|
||||
std::string err;
|
||||
EXPECT_TRUE(Parse("", &err));
|
||||
ASSERT_EQ(0u, parser_.outs_.size());
|
||||
ASSERT_EQ(0u, parser_.ins_.size());
|
||||
}
|
||||
|
||||
TEST_F(DepfileParserTest, EmptyLines) {
|
||||
std::string err;
|
||||
EXPECT_TRUE(Parse("\n\n", &err));
|
||||
ASSERT_EQ(0u, parser_.outs_.size());
|
||||
ASSERT_EQ(0u, parser_.ins_.size());
|
||||
}
|
||||
|
||||
TEST_F(DepfileParserTest, MissingColon) {
|
||||
// The file is not empty but is missing a colon separator.
|
||||
std::string err;
|
||||
EXPECT_FALSE(Parse("foo.o foo.c\n", &err));
|
||||
EXPECT_EQ("expected ':' in depfile", err);
|
||||
}
|
||||
|
2
src/deps_log.cc
Normal file → Executable file
2
src/deps_log.cc
Normal file → Executable file
@ -361,7 +361,7 @@ bool DepsLog::Recompact(const string& path, string* err) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DepsLog::IsDepsEntryLiveFor(Node* node) {
|
||||
bool DepsLog::IsDepsEntryLiveFor(const Node* node) {
|
||||
// Skip entries that don't have in-edges or whose edges don't have a
|
||||
// "deps" attribute. They were in the deps log from previous builds, but
|
||||
// the the files they were for were removed from the build and their deps
|
||||
|
2
src/deps_log.h
Normal file → Executable file
2
src/deps_log.h
Normal file → Executable file
@ -97,7 +97,7 @@ struct DepsLog {
|
||||
/// past but are no longer part of the manifest. This function returns if
|
||||
/// this is the case for a given node. This function is slow, don't call
|
||||
/// it from code that runs on every build.
|
||||
bool IsDepsEntryLiveFor(Node* node);
|
||||
static bool IsDepsEntryLiveFor(const Node* node);
|
||||
|
||||
/// Used for tests.
|
||||
const std::vector<Node*>& nodes() const { return nodes_; }
|
||||
|
43
src/deps_log_test.cc
Normal file → Executable file
43
src/deps_log_test.cc
Normal file → Executable file
@ -138,9 +138,13 @@ TEST_F(DepsLogTest, DoubleEntry) {
|
||||
deps.push_back(state.GetNode("bar.h", 0));
|
||||
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
|
||||
log.Close();
|
||||
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
file_size = (int)st.st_size;
|
||||
ASSERT_GT(file_size, 0);
|
||||
}
|
||||
@ -160,9 +164,13 @@ TEST_F(DepsLogTest, DoubleEntry) {
|
||||
deps.push_back(state.GetNode("bar.h", 0));
|
||||
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
|
||||
log.Close();
|
||||
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
int file_size_2 = (int)st.st_size;
|
||||
ASSERT_EQ(file_size, file_size_2);
|
||||
}
|
||||
@ -198,9 +206,13 @@ TEST_F(DepsLogTest, Recompact) {
|
||||
log.RecordDeps(state.GetNode("other_out.o", 0), 1, deps);
|
||||
|
||||
log.Close();
|
||||
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
file_size = (int)st.st_size;
|
||||
ASSERT_GT(file_size, 0);
|
||||
}
|
||||
@ -222,8 +234,13 @@ TEST_F(DepsLogTest, Recompact) {
|
||||
log.RecordDeps(state.GetNode("out.o", 0), 1, deps);
|
||||
log.Close();
|
||||
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
file_size_2 = (int)st.st_size;
|
||||
// The file should grow to record the new deps.
|
||||
ASSERT_GT(file_size_2, file_size);
|
||||
@ -273,8 +290,13 @@ TEST_F(DepsLogTest, Recompact) {
|
||||
ASSERT_EQ(other_out, log.nodes()[other_out->id()]);
|
||||
|
||||
// The file should have shrunk a bit for the smaller deps.
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
file_size_3 = (int)st.st_size;
|
||||
ASSERT_LT(file_size_3, file_size_2);
|
||||
}
|
||||
@ -317,8 +339,13 @@ TEST_F(DepsLogTest, Recompact) {
|
||||
ASSERT_EQ(-1, state.LookupNode("baz.h")->id());
|
||||
|
||||
// The file should have shrunk more.
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
int file_size_4 = (int)st.st_size;
|
||||
ASSERT_LT(file_size_4, file_size_3);
|
||||
}
|
||||
@ -374,8 +401,13 @@ TEST_F(DepsLogTest, Truncated) {
|
||||
}
|
||||
|
||||
// Get the file size.
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
|
||||
// Try reloading at truncated sizes.
|
||||
// Track how many nodes/deps were found; they should decrease with
|
||||
@ -434,8 +466,13 @@ TEST_F(DepsLogTest, TruncatedRecovery) {
|
||||
|
||||
// Shorten the file, corrupting the last record.
|
||||
{
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
ASSERT_EQ(0, stat64(kTestFilename, &st));
|
||||
#else
|
||||
struct stat st;
|
||||
ASSERT_EQ(0, stat(kTestFilename, &st));
|
||||
#endif
|
||||
string err;
|
||||
ASSERT_TRUE(Truncate(kTestFilename, st.st_size - 2, &err));
|
||||
}
|
||||
|
49
src/disk_interface.cc
Normal file → Executable file
49
src/disk_interface.cc
Normal file → Executable file
@ -23,9 +23,10 @@
|
||||
#include <sys/types.h>
|
||||
|
||||
#ifdef _WIN32
|
||||
#include <sstream>
|
||||
#include <windows.h>
|
||||
#include <direct.h> // _mkdir
|
||||
#include <windows.h>
|
||||
|
||||
#include <sstream>
|
||||
#else
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
@ -110,7 +111,8 @@ bool StatAllFilesInDir(const string& dir, map<string, TimeStamp>* stamps,
|
||||
|
||||
if (find_handle == INVALID_HANDLE_VALUE) {
|
||||
DWORD win_err = GetLastError();
|
||||
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND)
|
||||
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND ||
|
||||
win_err == ERROR_DIRECTORY)
|
||||
return true;
|
||||
*err = "FindFirstFileExA(" + dir + "): " + GetLastErrorString();
|
||||
return false;
|
||||
@ -156,13 +158,33 @@ bool DiskInterface::MakeDirs(const string& path) {
|
||||
}
|
||||
|
||||
// RealDiskInterface -----------------------------------------------------------
|
||||
RealDiskInterface::RealDiskInterface()
|
||||
#ifdef _WIN32
|
||||
: use_cache_(false), long_paths_enabled_(false) {
|
||||
setlocale(LC_ALL, "");
|
||||
|
||||
// Probe ntdll.dll for RtlAreLongPathsEnabled, and call it if it exists.
|
||||
HINSTANCE ntdll_lib = ::GetModuleHandleW(L"ntdll");
|
||||
if (ntdll_lib) {
|
||||
typedef BOOLEAN(WINAPI FunctionType)();
|
||||
auto* func_ptr = reinterpret_cast<FunctionType*>(
|
||||
::GetProcAddress(ntdll_lib, "RtlAreLongPathsEnabled"));
|
||||
if (func_ptr) {
|
||||
long_paths_enabled_ = (*func_ptr)();
|
||||
}
|
||||
}
|
||||
}
|
||||
#else
|
||||
{}
|
||||
#endif
|
||||
|
||||
TimeStamp RealDiskInterface::Stat(const string& path, string* err) const {
|
||||
METRIC_RECORD("node stat");
|
||||
#ifdef _WIN32
|
||||
// MSDN: "Naming Files, Paths, and Namespaces"
|
||||
// http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
|
||||
if (!path.empty() && path[0] != '\\' && path.size() > MAX_PATH) {
|
||||
if (!path.empty() && !AreLongPathsEnabled() && path[0] != '\\' &&
|
||||
path.size() > MAX_PATH) {
|
||||
ostringstream err_stream;
|
||||
err_stream << "Stat(" << path << "): Filename longer than " << MAX_PATH
|
||||
<< " characters";
|
||||
@ -194,9 +216,14 @@ TimeStamp RealDiskInterface::Stat(const string& path, string* err) const {
|
||||
}
|
||||
DirCache::iterator di = ci->second.find(base);
|
||||
return di != ci->second.end() ? di->second : 0;
|
||||
#else
|
||||
#ifdef __USE_LARGEFILE64
|
||||
struct stat64 st;
|
||||
if (stat64(path.c_str(), &st) < 0) {
|
||||
#else
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st) < 0) {
|
||||
#endif
|
||||
if (errno == ENOENT || errno == ENOTDIR)
|
||||
return 0;
|
||||
*err = "stat(" + path + "): " + strerror(errno);
|
||||
@ -267,7 +294,7 @@ FileReader::Status RealDiskInterface::ReadFile(const string& path,
|
||||
|
||||
int RealDiskInterface::RemoveFile(const string& path) {
|
||||
#ifdef _WIN32
|
||||
DWORD attributes = GetFileAttributes(path.c_str());
|
||||
DWORD attributes = GetFileAttributesA(path.c_str());
|
||||
if (attributes == INVALID_FILE_ATTRIBUTES) {
|
||||
DWORD win_err = GetLastError();
|
||||
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
|
||||
@ -278,7 +305,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
|
||||
// On Windows Ninja should behave the same:
|
||||
// https://github.com/ninja-build/ninja/issues/1886
|
||||
// Skip error checking. If this fails, accept whatever happens below.
|
||||
SetFileAttributes(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY);
|
||||
SetFileAttributesA(path.c_str(), attributes & ~FILE_ATTRIBUTE_READONLY);
|
||||
}
|
||||
if (attributes & FILE_ATTRIBUTE_DIRECTORY) {
|
||||
// remove() deletes both files and directories. On Windows we have to
|
||||
@ -286,7 +313,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
|
||||
// used on a directory)
|
||||
// This fixes the behavior of ninja -t clean in some cases
|
||||
// https://github.com/ninja-build/ninja/issues/828
|
||||
if (!RemoveDirectory(path.c_str())) {
|
||||
if (!RemoveDirectoryA(path.c_str())) {
|
||||
DWORD win_err = GetLastError();
|
||||
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
|
||||
return 1;
|
||||
@ -296,7 +323,7 @@ int RealDiskInterface::RemoveFile(const string& path) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
if (!DeleteFile(path.c_str())) {
|
||||
if (!DeleteFileA(path.c_str())) {
|
||||
DWORD win_err = GetLastError();
|
||||
if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) {
|
||||
return 1;
|
||||
@ -327,3 +354,9 @@ void RealDiskInterface::AllowStatCache(bool allow) {
|
||||
cache_.clear();
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
bool RealDiskInterface::AreLongPathsEnabled(void) const {
|
||||
return long_paths_enabled_;
|
||||
}
|
||||
#endif
|
||||
|
14
src/disk_interface.h
Normal file → Executable file
14
src/disk_interface.h
Normal file → Executable file
@ -69,11 +69,7 @@ struct DiskInterface: public FileReader {
|
||||
|
||||
/// Implementation of DiskInterface that actually hits the disk.
|
||||
struct RealDiskInterface : public DiskInterface {
|
||||
RealDiskInterface()
|
||||
#ifdef _WIN32
|
||||
: use_cache_(false)
|
||||
#endif
|
||||
{}
|
||||
RealDiskInterface();
|
||||
virtual ~RealDiskInterface() {}
|
||||
virtual TimeStamp Stat(const std::string& path, std::string* err) const;
|
||||
virtual bool MakeDir(const std::string& path);
|
||||
@ -85,11 +81,19 @@ struct RealDiskInterface : public DiskInterface {
|
||||
/// Whether stat information can be cached. Only has an effect on Windows.
|
||||
void AllowStatCache(bool allow);
|
||||
|
||||
#ifdef _WIN32
|
||||
/// Whether long paths are enabled. Only has an effect on Windows.
|
||||
bool AreLongPathsEnabled() const;
|
||||
#endif
|
||||
|
||||
private:
|
||||
#ifdef _WIN32
|
||||
/// Whether stat information can be cached.
|
||||
bool use_cache_;
|
||||
|
||||
/// Whether long paths are enabled.
|
||||
bool long_paths_enabled_;
|
||||
|
||||
typedef std::map<std::string, TimeStamp> DirCache;
|
||||
// TODO: Neither a map nor a hashmap seems ideal here. If the statcache
|
||||
// works out, come up with a better data structure.
|
||||
|
32
src/disk_interface_test.cc
Normal file → Executable file
32
src/disk_interface_test.cc
Normal file → Executable file
@ -17,6 +17,7 @@
|
||||
#ifdef _WIN32
|
||||
#include <io.h>
|
||||
#include <windows.h>
|
||||
#include <direct.h>
|
||||
#endif
|
||||
|
||||
#include "disk_interface.h"
|
||||
@ -65,6 +66,17 @@ TEST_F(DiskInterfaceTest, StatMissingFile) {
|
||||
EXPECT_EQ("", err);
|
||||
}
|
||||
|
||||
TEST_F(DiskInterfaceTest, StatMissingFileWithCache) {
|
||||
disk_.AllowStatCache(true);
|
||||
string err;
|
||||
|
||||
// On Windows, the errno for FindFirstFileExA, which is used when the stat
|
||||
// cache is enabled, is different when the directory name is not a directory.
|
||||
ASSERT_TRUE(Touch("notadir"));
|
||||
EXPECT_EQ(0, disk_.Stat("notadir/nosuchfile", &err));
|
||||
EXPECT_EQ("", err);
|
||||
}
|
||||
|
||||
TEST_F(DiskInterfaceTest, StatBadPath) {
|
||||
string err;
|
||||
#ifdef _WIN32
|
||||
@ -85,6 +97,24 @@ TEST_F(DiskInterfaceTest, StatExistingFile) {
|
||||
EXPECT_EQ("", err);
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
TEST_F(DiskInterfaceTest, StatExistingFileWithLongPath) {
|
||||
string err;
|
||||
char currentdir[32767];
|
||||
_getcwd(currentdir, sizeof(currentdir));
|
||||
const string filename = string(currentdir) +
|
||||
"\\filename_with_256_characters_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
|
||||
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
|
||||
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
|
||||
xxxxxxxxxxxxxxxxxxxxx";
|
||||
const string prefixed = "\\\\?\\" + filename;
|
||||
ASSERT_TRUE(Touch(prefixed.c_str()));
|
||||
EXPECT_GT(disk_.Stat(disk_.AreLongPathsEnabled() ?
|
||||
filename : prefixed, &err), 1);
|
||||
EXPECT_EQ("", err);
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_F(DiskInterfaceTest, StatExistingDir) {
|
||||
string err;
|
||||
ASSERT_TRUE(disk_.MakeDir("subdir"));
|
||||
@ -198,7 +228,7 @@ TEST_F(DiskInterfaceTest, MakeDirs) {
|
||||
EXPECT_EQ(0, fclose(f));
|
||||
#ifdef _WIN32
|
||||
string path2 = "another\\with\\back\\\\slashes\\";
|
||||
EXPECT_TRUE(disk_.MakeDirs(path2.c_str()));
|
||||
EXPECT_TRUE(disk_.MakeDirs(path2));
|
||||
FILE* f2 = fopen((path2 + "a_file").c_str(), "w");
|
||||
EXPECT_TRUE(f2);
|
||||
EXPECT_EQ(0, fclose(f2));
|
||||
|
13
src/dyndep.cc
Normal file → Executable file
13
src/dyndep.cc
Normal file → Executable file
@ -97,15 +97,10 @@ bool DyndepLoader::UpdateEdge(Edge* edge, Dyndeps const* dyndeps,
|
||||
for (std::vector<Node*>::const_iterator i =
|
||||
dyndeps->implicit_outputs_.begin();
|
||||
i != dyndeps->implicit_outputs_.end(); ++i) {
|
||||
if (Edge* old_in_edge = (*i)->in_edge()) {
|
||||
// This node already has an edge producing it. Fail with an error
|
||||
// unless the edge was generated by ImplicitDepLoader, in which
|
||||
// case we can replace it with the now-known real producer.
|
||||
if (!old_in_edge->generated_by_dep_loader_) {
|
||||
*err = "multiple rules generate " + (*i)->path();
|
||||
return false;
|
||||
}
|
||||
old_in_edge->outputs_.clear();
|
||||
if ((*i)->in_edge()) {
|
||||
// This node already has an edge producing it.
|
||||
*err = "multiple rules generate " + (*i)->path();
|
||||
return false;
|
||||
}
|
||||
(*i)->set_in_edge(edge);
|
||||
}
|
||||
|
0
src/dyndep.h
Normal file → Executable file
0
src/dyndep.h
Normal file → Executable file
0
src/dyndep_parser.cc
Normal file → Executable file
0
src/dyndep_parser.cc
Normal file → Executable file
0
src/dyndep_parser.h
Normal file → Executable file
0
src/dyndep_parser.h
Normal file → Executable file
0
src/dyndep_parser_test.cc
Normal file → Executable file
0
src/dyndep_parser_test.cc
Normal file → Executable file
0
src/edit_distance.cc
Normal file → Executable file
0
src/edit_distance.cc
Normal file → Executable file
0
src/edit_distance.h
Normal file → Executable file
0
src/edit_distance.h
Normal file → Executable file
0
src/edit_distance_test.cc
Normal file → Executable file
0
src/edit_distance_test.cc
Normal file → Executable file
0
src/eval_env.cc
Normal file → Executable file
0
src/eval_env.cc
Normal file → Executable file
0
src/eval_env.h
Normal file → Executable file
0
src/eval_env.h
Normal file → Executable file
0
src/exit_status.h
Normal file → Executable file
0
src/exit_status.h
Normal file → Executable file
0
src/getopt.c
Normal file → Executable file
0
src/getopt.c
Normal file → Executable file
0
src/getopt.h
Normal file → Executable file
0
src/getopt.h
Normal file → Executable file
123
src/graph.cc
Normal file → Executable file
123
src/graph.cc
Normal file → Executable file
@ -32,7 +32,6 @@
|
||||
using namespace std;
|
||||
|
||||
bool Node::Stat(DiskInterface* disk_interface, string* err) {
|
||||
METRIC_RECORD("node stat");
|
||||
mtime_ = disk_interface->Stat(path_, err);
|
||||
if (mtime_ == -1) {
|
||||
return false;
|
||||
@ -298,37 +297,34 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge,
|
||||
return false;
|
||||
}
|
||||
|
||||
BuildLog::LogEntry* entry = 0;
|
||||
|
||||
// Dirty if we're missing the output.
|
||||
if (!output->exists()) {
|
||||
EXPLAIN("output %s doesn't exist", output->path().c_str());
|
||||
return true;
|
||||
}
|
||||
|
||||
BuildLog::LogEntry* entry = 0;
|
||||
|
||||
// If this is a restat rule, we may have cleaned the output in a
|
||||
// previous run and stored the command start time in the build log.
|
||||
// We don't want to consider a restat rule's outputs as dirty unless
|
||||
// an input changed since the last run, so we'll skip checking the
|
||||
// output file's actual mtime and simply check the recorded mtime from
|
||||
// the log against the most recent input's mtime (see below)
|
||||
bool used_restat = false;
|
||||
if (edge->GetBindingBool("restat") && build_log() &&
|
||||
(entry = build_log()->LookupByOutput(output->path()))) {
|
||||
used_restat = true;
|
||||
}
|
||||
|
||||
// Dirty if the output is older than the input.
|
||||
if (most_recent_input && output->mtime() < most_recent_input->mtime()) {
|
||||
TimeStamp output_mtime = output->mtime();
|
||||
|
||||
// If this is a restat rule, we may have cleaned the output with a restat
|
||||
// rule in a previous run and stored the most recent input mtime in the
|
||||
// build log. Use that mtime instead, so that the file will only be
|
||||
// considered dirty if an input was modified since the previous run.
|
||||
bool used_restat = false;
|
||||
if (edge->GetBindingBool("restat") && build_log() &&
|
||||
(entry = build_log()->LookupByOutput(output->path()))) {
|
||||
output_mtime = entry->mtime;
|
||||
used_restat = true;
|
||||
}
|
||||
|
||||
if (output_mtime < most_recent_input->mtime()) {
|
||||
EXPLAIN("%soutput %s older than most recent input %s "
|
||||
"(%" PRId64 " vs %" PRId64 ")",
|
||||
used_restat ? "restat of " : "", output->path().c_str(),
|
||||
most_recent_input->path().c_str(),
|
||||
output_mtime, most_recent_input->mtime());
|
||||
return true;
|
||||
}
|
||||
if (!used_restat && most_recent_input && output->mtime() < most_recent_input->mtime()) {
|
||||
EXPLAIN("output %s older than most recent input %s "
|
||||
"(%" PRId64 " vs %" PRId64 ")",
|
||||
output->path().c_str(),
|
||||
most_recent_input->path().c_str(),
|
||||
output->mtime(), most_recent_input->mtime());
|
||||
return true;
|
||||
}
|
||||
|
||||
if (build_log()) {
|
||||
@ -346,7 +342,9 @@ bool DependencyScan::RecomputeOutputDirty(const Edge* edge,
|
||||
// May also be dirty due to the mtime in the log being older than the
|
||||
// mtime of the most recent input. This can occur even when the mtime
|
||||
// on disk is newer if a previous run wrote to the output file but
|
||||
// exited with an error or was interrupted.
|
||||
// exited with an error or was interrupted. If this was a restat rule,
|
||||
// then we only check the recorded mtime against the most recent input
|
||||
// mtime and ignore the actual output's mtime above.
|
||||
EXPLAIN("recorded mtime of %s older than most recent input %s (%" PRId64 " vs %" PRId64 ")",
|
||||
output->path().c_str(), most_recent_input->path().c_str(),
|
||||
entry->mtime, most_recent_input->mtime());
|
||||
@ -393,7 +391,7 @@ struct EdgeEnv : public Env {
|
||||
std::string MakePathList(const Node* const* span, size_t size, char sep) const;
|
||||
|
||||
private:
|
||||
vector<string> lookups_;
|
||||
std::vector<std::string> lookups_;
|
||||
const Edge* const edge_;
|
||||
EscapeKind escape_in_out_;
|
||||
bool recursive_;
|
||||
@ -403,21 +401,50 @@ string EdgeEnv::LookupVariable(const string& var) {
|
||||
if (var == "in" || var == "in_newline") {
|
||||
int explicit_deps_count = edge_->inputs_.size() - edge_->implicit_deps_ -
|
||||
edge_->order_only_deps_;
|
||||
#if __cplusplus >= 201103L
|
||||
return MakePathList(edge_->inputs_.data(), explicit_deps_count,
|
||||
#else
|
||||
return MakePathList(&edge_->inputs_[0], explicit_deps_count,
|
||||
#endif
|
||||
var == "in" ? ' ' : '\n');
|
||||
} else if (var == "out") {
|
||||
int explicit_outs_count = edge_->outputs_.size() - edge_->implicit_outs_;
|
||||
return MakePathList(&edge_->outputs_[0], explicit_outs_count, ' ');
|
||||
}
|
||||
|
||||
// Technical note about the lookups_ vector.
|
||||
//
|
||||
// This is used to detect cycles during recursive variable expansion
|
||||
// which can be seen as a graph traversal problem. Consider the following
|
||||
// example:
|
||||
//
|
||||
// rule something
|
||||
// command = $foo $foo $var1
|
||||
// var1 = $var2
|
||||
// var2 = $var3
|
||||
// var3 = $var1
|
||||
// foo = FOO
|
||||
//
|
||||
// Each variable definition can be seen as a node in a graph that looks
|
||||
// like the following:
|
||||
//
|
||||
// command --> foo
|
||||
// |
|
||||
// v
|
||||
// var1 <-----.
|
||||
// | |
|
||||
// v |
|
||||
// var2 ---> var3
|
||||
//
|
||||
// The lookups_ vector is used as a stack of visited nodes/variables
|
||||
// during recursive expansion. Entering a node adds an item to the
|
||||
// stack, leaving the node removes it.
|
||||
//
|
||||
// The recursive_ flag is used as a small performance optimization
|
||||
// to never record the starting node in the stack when beginning a new
|
||||
// expansion, since in most cases, expansions are not recursive
|
||||
// at all.
|
||||
//
|
||||
if (recursive_) {
|
||||
vector<string>::const_iterator it;
|
||||
if ((it = find(lookups_.begin(), lookups_.end(), var)) != lookups_.end()) {
|
||||
string cycle;
|
||||
auto it = std::find(lookups_.begin(), lookups_.end(), var);
|
||||
if (it != lookups_.end()) {
|
||||
std::string cycle;
|
||||
for (; it != lookups_.end(); ++it)
|
||||
cycle.append(*it + " -> ");
|
||||
cycle.append(var);
|
||||
@ -427,13 +454,17 @@ string EdgeEnv::LookupVariable(const string& var) {
|
||||
|
||||
// See notes on BindingEnv::LookupWithFallback.
|
||||
const EvalString* eval = edge_->rule_->GetBinding(var);
|
||||
if (recursive_ && eval)
|
||||
bool record_varname = recursive_ && eval;
|
||||
if (record_varname)
|
||||
lookups_.push_back(var);
|
||||
|
||||
// In practice, variables defined on rules never use another rule variable.
|
||||
// For performance, only start checking for cycles after the first lookup.
|
||||
recursive_ = true;
|
||||
return edge_->env_->LookupWithFallback(var, eval, this);
|
||||
std::string result = edge_->env_->LookupWithFallback(var, eval, this);
|
||||
if (record_varname)
|
||||
lookups_.pop_back();
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string EdgeEnv::MakePathList(const Node* const* const span,
|
||||
@ -696,7 +727,6 @@ bool ImplicitDepLoader::ProcessDepfileDeps(
|
||||
Node* node = state_->GetNode(*i, slash_bits);
|
||||
*implicit_dep = node;
|
||||
node->AddOutEdge(edge);
|
||||
CreatePhonyInEdge(node);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -724,7 +754,6 @@ bool ImplicitDepLoader::LoadDepsFromLog(Edge* edge, string* err) {
|
||||
Node* node = deps->nodes[i];
|
||||
*implicit_dep = node;
|
||||
node->AddOutEdge(edge);
|
||||
CreatePhonyInEdge(node);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -736,21 +765,3 @@ vector<Node*>::iterator ImplicitDepLoader::PreallocateSpace(Edge* edge,
|
||||
edge->implicit_deps_ += count;
|
||||
return edge->inputs_.end() - edge->order_only_deps_ - count;
|
||||
}
|
||||
|
||||
void ImplicitDepLoader::CreatePhonyInEdge(Node* node) {
|
||||
if (node->in_edge())
|
||||
return;
|
||||
|
||||
Edge* phony_edge = state_->AddEdge(&State::kPhonyRule);
|
||||
phony_edge->generated_by_dep_loader_ = true;
|
||||
node->set_in_edge(phony_edge);
|
||||
phony_edge->outputs_.push_back(node);
|
||||
|
||||
// RecomputeDirty might not be called for phony_edge if a previous call
|
||||
// to RecomputeDirty had caused the file to be stat'ed. Because previous
|
||||
// invocations of RecomputeDirty would have seen this node without an
|
||||
// input edge (and therefore ready), we have to set outputs_ready_ to true
|
||||
// to avoid a potential stuck build. If we do call RecomputeDirty for
|
||||
// this node, it will simply set outputs_ready_ to the correct value.
|
||||
phony_edge->outputs_ready_ = true;
|
||||
}
|
||||
|
77
src/graph.h
Normal file → Executable file
77
src/graph.h
Normal file → Executable file
@ -38,14 +38,7 @@ struct State;
|
||||
/// it's dirty, mtime, etc.
|
||||
struct Node {
|
||||
Node(const std::string& path, uint64_t slash_bits)
|
||||
: path_(path),
|
||||
slash_bits_(slash_bits),
|
||||
mtime_(-1),
|
||||
exists_(ExistenceStatusUnknown),
|
||||
dirty_(false),
|
||||
dyndep_pending_(false),
|
||||
in_edge_(NULL),
|
||||
id_(-1) {}
|
||||
: path_(path), slash_bits_(slash_bits) {}
|
||||
|
||||
/// Return false on error.
|
||||
bool Stat(DiskInterface* disk_interface, std::string* err);
|
||||
@ -104,6 +97,14 @@ struct Node {
|
||||
Edge* in_edge() const { return in_edge_; }
|
||||
void set_in_edge(Edge* edge) { in_edge_ = edge; }
|
||||
|
||||
/// Indicates whether this node was generated from a depfile or dyndep file,
|
||||
/// instead of being a regular input or output from the Ninja manifest.
|
||||
bool generated_by_dep_loader() const { return generated_by_dep_loader_; }
|
||||
|
||||
void set_generated_by_dep_loader(bool value) {
|
||||
generated_by_dep_loader_ = value;
|
||||
}
|
||||
|
||||
int id() const { return id_; }
|
||||
void set_id(int id) { id_ = id; }
|
||||
|
||||
@ -119,13 +120,13 @@ private:
|
||||
|
||||
/// Set bits starting from lowest for backslashes that were normalized to
|
||||
/// forward slashes by CanonicalizePath. See |PathDecanonicalized|.
|
||||
uint64_t slash_bits_;
|
||||
uint64_t slash_bits_ = 0;
|
||||
|
||||
/// Possible values of mtime_:
|
||||
/// -1: file hasn't been examined
|
||||
/// 0: we looked, and file doesn't exist
|
||||
/// >0: actual file's mtime, or the latest mtime of its dependencies if it doesn't exist
|
||||
TimeStamp mtime_;
|
||||
TimeStamp mtime_ = -1;
|
||||
|
||||
enum ExistenceStatus {
|
||||
/// The file hasn't been examined.
|
||||
@ -135,20 +136,27 @@ private:
|
||||
/// The path is an actual file. mtime_ will be the file's mtime.
|
||||
ExistenceStatusExists
|
||||
};
|
||||
ExistenceStatus exists_;
|
||||
ExistenceStatus exists_ = ExistenceStatusUnknown;
|
||||
|
||||
/// Dirty is true when the underlying file is out-of-date.
|
||||
/// But note that Edge::outputs_ready_ is also used in judging which
|
||||
/// edges to build.
|
||||
bool dirty_;
|
||||
bool dirty_ = false;
|
||||
|
||||
/// Store whether dyndep information is expected from this node but
|
||||
/// has not yet been loaded.
|
||||
bool dyndep_pending_;
|
||||
bool dyndep_pending_ = false;
|
||||
|
||||
/// Set to true when this node comes from a depfile, a dyndep file or the
|
||||
/// deps log. If it does not have a producing edge, the build should not
|
||||
/// abort if it is missing (as for regular source inputs). By default
|
||||
/// all nodes have this flag set to true, since the deps and build logs
|
||||
/// can be loaded before the manifest.
|
||||
bool generated_by_dep_loader_ = true;
|
||||
|
||||
/// The Edge that produces this Node, or NULL when there is no
|
||||
/// known edge to produce it.
|
||||
Edge* in_edge_;
|
||||
Edge* in_edge_ = nullptr;
|
||||
|
||||
/// All Edges that use this Node as an input.
|
||||
std::vector<Edge*> out_edges_;
|
||||
@ -157,7 +165,7 @@ private:
|
||||
std::vector<Edge*> validation_out_edges_;
|
||||
|
||||
/// A dense integer id for the node, assigned and used by DepsLog.
|
||||
int id_;
|
||||
int id_ = -1;
|
||||
};
|
||||
|
||||
/// An edge in the dependency graph; links between Nodes using Rules.
|
||||
@ -172,7 +180,8 @@ struct Edge {
|
||||
: rule_(NULL), pool_(NULL), dyndep_(NULL), env_(NULL), mark_(VisitNone),
|
||||
id_(0), outputs_ready_(false), deps_loaded_(false),
|
||||
deps_missing_(false), generated_by_dep_loader_(false),
|
||||
implicit_deps_(0), order_only_deps_(0), implicit_outs_(0) {}
|
||||
command_start_time_(0), implicit_deps_(0), order_only_deps_(0),
|
||||
implicit_outs_(0) {}
|
||||
|
||||
/// Return true if all inputs' in-edges are ready.
|
||||
bool AllInputsReady() const;
|
||||
@ -198,19 +207,20 @@ struct Edge {
|
||||
// Append all edge explicit inputs to |*out|. Possibly with shell escaping.
|
||||
void CollectInputs(bool shell_escape, std::vector<std::string>* out) const;
|
||||
|
||||
const Rule* rule_;
|
||||
Pool* pool_;
|
||||
const Rule* rule_ = nullptr;
|
||||
Pool* pool_ = nullptr;
|
||||
std::vector<Node*> inputs_;
|
||||
std::vector<Node*> outputs_;
|
||||
std::vector<Node*> validations_;
|
||||
Node* dyndep_;
|
||||
BindingEnv* env_;
|
||||
VisitMark mark_;
|
||||
size_t id_;
|
||||
bool outputs_ready_;
|
||||
bool deps_loaded_;
|
||||
bool deps_missing_;
|
||||
bool generated_by_dep_loader_;
|
||||
Node* dyndep_ = nullptr;
|
||||
BindingEnv* env_ = nullptr;
|
||||
VisitMark mark_ = VisitNone;
|
||||
size_t id_ = 0;
|
||||
bool outputs_ready_ = false;
|
||||
bool deps_loaded_ = false;
|
||||
bool deps_missing_ = false;
|
||||
bool generated_by_dep_loader_ = false;
|
||||
TimeStamp command_start_time_ = 0;
|
||||
|
||||
const Rule& rule() const { return *rule_; }
|
||||
Pool* pool() const { return pool_; }
|
||||
@ -225,8 +235,8 @@ struct Edge {
|
||||
// don't cause the target to rebuild.
|
||||
// These are stored in inputs_ in that order, and we keep counts of
|
||||
// #2 and #3 when we need to access the various subsets.
|
||||
int implicit_deps_;
|
||||
int order_only_deps_;
|
||||
int implicit_deps_ = 0;
|
||||
int order_only_deps_ = 0;
|
||||
bool is_implicit(size_t index) {
|
||||
return index >= inputs_.size() - order_only_deps_ - implicit_deps_ &&
|
||||
!is_order_only(index);
|
||||
@ -240,7 +250,7 @@ struct Edge {
|
||||
// 2) implicit outs, which the target generates but are not part of $out.
|
||||
// These are stored in outputs_ in that order, and we keep a count of
|
||||
// #2 to use when we need to access the various subsets.
|
||||
int implicit_outs_;
|
||||
int implicit_outs_ = 0;
|
||||
bool is_implicit_out(size_t index) const {
|
||||
return index >= outputs_.size() - implicit_outs_;
|
||||
}
|
||||
@ -248,6 +258,10 @@ struct Edge {
|
||||
bool is_phony() const;
|
||||
bool use_console() const;
|
||||
bool maybe_phonycycle_diagnostic() const;
|
||||
|
||||
// Historical info: how long did this edge take last time,
|
||||
// as per .ninja_log, if known? Defaults to -1 if unknown.
|
||||
int64_t prev_elapsed_time_millis = -1;
|
||||
};
|
||||
|
||||
struct EdgeCmp {
|
||||
@ -295,11 +309,6 @@ struct ImplicitDepLoader {
|
||||
/// an iterator pointing at the first new space.
|
||||
std::vector<Node*>::iterator PreallocateSpace(Edge* edge, int count);
|
||||
|
||||
/// If we don't have a edge that generates this input already,
|
||||
/// create one; this makes us not abort if the input is missing,
|
||||
/// but instead will rebuild in that circumstance.
|
||||
void CreatePhonyInEdge(Node* node);
|
||||
|
||||
State* state_;
|
||||
DiskInterface* disk_interface_;
|
||||
DepsLog* deps_log_;
|
||||
|
1
src/graph_test.cc
Normal file → Executable file
1
src/graph_test.cc
Normal file → Executable file
@ -977,3 +977,4 @@ TEST_F(GraphTest, PhonyDepsMtimes) {
|
||||
EXPECT_EQ(out1->mtime(), out1Mtime1);
|
||||
EXPECT_TRUE(out1->dirty());
|
||||
}
|
||||
|
||||
|
0
src/graphviz.cc
Normal file → Executable file
0
src/graphviz.cc
Normal file → Executable file
0
src/graphviz.h
Normal file → Executable file
0
src/graphviz.h
Normal file → Executable file
0
src/hash_collision_bench.cc
Normal file → Executable file
0
src/hash_collision_bench.cc
Normal file → Executable file
44
src/hash_map.h
Normal file → Executable file
44
src/hash_map.h
Normal file → Executable file
@ -53,7 +53,6 @@ unsigned int MurmurHash2(const void* key, size_t len) {
|
||||
return h;
|
||||
}
|
||||
|
||||
#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900)
|
||||
#include <unordered_map>
|
||||
|
||||
namespace std {
|
||||
@ -68,56 +67,13 @@ struct hash<StringPiece> {
|
||||
};
|
||||
}
|
||||
|
||||
#elif defined(_MSC_VER)
|
||||
#include <hash_map>
|
||||
|
||||
using stdext::hash_map;
|
||||
using stdext::hash_compare;
|
||||
|
||||
struct StringPieceCmp : public hash_compare<StringPiece> {
|
||||
size_t operator()(const StringPiece& key) const {
|
||||
return MurmurHash2(key.str_, key.len_);
|
||||
}
|
||||
bool operator()(const StringPiece& a, const StringPiece& b) const {
|
||||
int cmp = memcmp(a.str_, b.str_, min(a.len_, b.len_));
|
||||
if (cmp < 0) {
|
||||
return true;
|
||||
} else if (cmp > 0) {
|
||||
return false;
|
||||
} else {
|
||||
return a.len_ < b.len_;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#else
|
||||
#include <ext/hash_map>
|
||||
|
||||
using __gnu_cxx::hash_map;
|
||||
|
||||
namespace __gnu_cxx {
|
||||
template<>
|
||||
struct hash<StringPiece> {
|
||||
size_t operator()(StringPiece key) const {
|
||||
return MurmurHash2(key.str_, key.len_);
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif
|
||||
|
||||
/// A template for hash_maps keyed by a StringPiece whose string is
|
||||
/// owned externally (typically by the values). Use like:
|
||||
/// ExternalStringHash<Foo*>::Type foos; to make foos into a hash
|
||||
/// mapping StringPiece => Foo*.
|
||||
template<typename V>
|
||||
struct ExternalStringHashMap {
|
||||
#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900)
|
||||
typedef std::unordered_map<StringPiece, V> Type;
|
||||
#elif defined(_MSC_VER)
|
||||
typedef hash_map<StringPiece, V, StringPieceCmp> Type;
|
||||
#else
|
||||
typedef hash_map<StringPiece, V> Type;
|
||||
#endif
|
||||
};
|
||||
|
||||
#endif // NINJA_MAP_H_
|
||||
|
0
src/includes_normalize-win32.cc
Normal file → Executable file
0
src/includes_normalize-win32.cc
Normal file → Executable file
5
src/includes_normalize.h
Normal file → Executable file
5
src/includes_normalize.h
Normal file → Executable file
@ -12,6 +12,9 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef INCLUDES_NORMALIZE_H_
|
||||
#define INCLUDES_NORMALIZE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -38,3 +41,5 @@ struct IncludesNormalize {
|
||||
std::string relative_to_;
|
||||
std::vector<StringPiece> split_relative_to_;
|
||||
};
|
||||
|
||||
#endif // INCLUDES_NORMALIZE_H_
|
||||
|
4
src/includes_normalize_test.cc
Normal file → Executable file
4
src/includes_normalize_test.cc
Normal file → Executable file
@ -117,10 +117,10 @@ TEST(IncludesNormalize, LongInvalidPath) {
|
||||
// Construct max size path having cwd prefix.
|
||||
// kExactlyMaxPath = "$cwd\\a\\aaaa...aaaa\0";
|
||||
char kExactlyMaxPath[_MAX_PATH + 1];
|
||||
ASSERT_NE(_getcwd(kExactlyMaxPath, sizeof kExactlyMaxPath), NULL);
|
||||
ASSERT_STRNE(_getcwd(kExactlyMaxPath, sizeof kExactlyMaxPath), NULL);
|
||||
|
||||
int cwd_len = strlen(kExactlyMaxPath);
|
||||
ASSERT_LE(cwd_len + 3 + 1, _MAX_PATH)
|
||||
ASSERT_LE(cwd_len + 3 + 1, _MAX_PATH);
|
||||
kExactlyMaxPath[cwd_len] = '\\';
|
||||
kExactlyMaxPath[cwd_len + 1] = 'a';
|
||||
kExactlyMaxPath[cwd_len + 2] = '\\';
|
||||
|
@ -18,7 +18,6 @@
|
||||
# It expects the name of the variable as its first argument, and reads
|
||||
# stdin and writes stdout.
|
||||
|
||||
set -e
|
||||
varname="$1"
|
||||
|
||||
# 'od' and 'sed' may not be available on all platforms, and may not support the
|
||||
|
0
src/json.cc
Normal file → Executable file
0
src/json.cc
Normal file → Executable file
0
src/json.h
Normal file → Executable file
0
src/json.h
Normal file → Executable file
0
src/json_test.cc
Normal file → Executable file
0
src/json_test.cc
Normal file → Executable file
0
src/lexer.cc
Normal file → Executable file
0
src/lexer.cc
Normal file → Executable file
0
src/lexer.h
Normal file → Executable file
0
src/lexer.h
Normal file → Executable file
0
src/lexer.in.cc
Normal file → Executable file
0
src/lexer.in.cc
Normal file → Executable file
0
src/lexer_test.cc
Normal file → Executable file
0
src/lexer_test.cc
Normal file → Executable file
9
src/line_printer.cc
Normal file → Executable file
9
src/line_printer.cc
Normal file → Executable file
@ -46,10 +46,6 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) {
|
||||
}
|
||||
#endif
|
||||
supports_color_ = smart_terminal_;
|
||||
if (!supports_color_) {
|
||||
const char* clicolor_force = getenv("CLICOLOR_FORCE");
|
||||
supports_color_ = clicolor_force && string(clicolor_force) != "0";
|
||||
}
|
||||
#ifdef _WIN32
|
||||
// Try enabling ANSI escape sequence support on Windows 10 terminals.
|
||||
if (supports_color_) {
|
||||
@ -61,6 +57,10 @@ LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) {
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (!supports_color_) {
|
||||
const char* clicolor_force = getenv("CLICOLOR_FORCE");
|
||||
supports_color_ = clicolor_force && std::string(clicolor_force) != "0";
|
||||
}
|
||||
}
|
||||
|
||||
void LinePrinter::Print(string to_print, LineType type) {
|
||||
@ -118,6 +118,7 @@ void LinePrinter::Print(string to_print, LineType type) {
|
||||
have_blank_line_ = false;
|
||||
} else {
|
||||
printf("%s\n", to_print.c_str());
|
||||
fflush(stdout);
|
||||
}
|
||||
}
|
||||
|
||||
|
0
src/line_printer.h
Normal file → Executable file
0
src/line_printer.h
Normal file → Executable file
0
src/load_status.h
Normal file → Executable file
0
src/load_status.h
Normal file → Executable file
3
src/manifest_parser.cc
Normal file → Executable file
3
src/manifest_parser.cc
Normal file → Executable file
@ -14,8 +14,10 @@
|
||||
|
||||
#include "manifest_parser.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "graph.h"
|
||||
@ -416,6 +418,7 @@ bool ManifestParser::ParseEdge(string* err) {
|
||||
if (dgi == edge->inputs_.end()) {
|
||||
return lexer_.Error("dyndep '" + dyndep + "' is not an input", err);
|
||||
}
|
||||
assert(!edge->dyndep_->generated_by_dep_loader());
|
||||
}
|
||||
|
||||
return true;
|
||||
|
0
src/manifest_parser.h
Normal file → Executable file
0
src/manifest_parser.h
Normal file → Executable file
0
src/manifest_parser_perftest.cc
Normal file → Executable file
0
src/manifest_parser_perftest.cc
Normal file → Executable file
0
src/manifest_parser_test.cc
Normal file → Executable file
0
src/manifest_parser_test.cc
Normal file → Executable file
79
src/metrics.cc
Normal file → Executable file
79
src/metrics.cc
Normal file → Executable file
@ -18,13 +18,8 @@
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <sys/time.h>
|
||||
#else
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
||||
#include "util.h"
|
||||
|
||||
@ -34,49 +29,40 @@ Metrics* g_metrics = NULL;
|
||||
|
||||
namespace {
|
||||
|
||||
#ifndef _WIN32
|
||||
/// Compute a platform-specific high-res timer value that fits into an int64.
|
||||
int64_t HighResTimer() {
|
||||
timeval tv;
|
||||
if (gettimeofday(&tv, NULL) < 0)
|
||||
Fatal("gettimeofday: %s", strerror(errno));
|
||||
return (int64_t)tv.tv_sec * 1000*1000 + tv.tv_usec;
|
||||
auto now = chrono::steady_clock::now();
|
||||
return chrono::duration_cast<chrono::steady_clock::duration>(
|
||||
now.time_since_epoch())
|
||||
.count();
|
||||
}
|
||||
|
||||
/// Convert a delta of HighResTimer() values to microseconds.
|
||||
int64_t TimerToMicros(int64_t dt) {
|
||||
// No conversion necessary.
|
||||
return dt;
|
||||
}
|
||||
#else
|
||||
int64_t LargeIntegerToInt64(const LARGE_INTEGER& i) {
|
||||
return ((int64_t)i.HighPart) << 32 | i.LowPart;
|
||||
}
|
||||
|
||||
int64_t HighResTimer() {
|
||||
LARGE_INTEGER counter;
|
||||
if (!QueryPerformanceCounter(&counter))
|
||||
Fatal("QueryPerformanceCounter: %s", GetLastErrorString().c_str());
|
||||
return LargeIntegerToInt64(counter);
|
||||
constexpr int64_t GetFrequency() {
|
||||
// If numerator isn't 1 then we lose precision and that will need to be
|
||||
// assessed.
|
||||
static_assert(std::chrono::steady_clock::period::num == 1,
|
||||
"Numerator must be 1");
|
||||
return std::chrono::steady_clock::period::den /
|
||||
std::chrono::steady_clock::period::num;
|
||||
}
|
||||
|
||||
int64_t TimerToMicros(int64_t dt) {
|
||||
static int64_t ticks_per_sec = 0;
|
||||
if (!ticks_per_sec) {
|
||||
LARGE_INTEGER freq;
|
||||
if (!QueryPerformanceFrequency(&freq))
|
||||
Fatal("QueryPerformanceFrequency: %s", GetLastErrorString().c_str());
|
||||
ticks_per_sec = LargeIntegerToInt64(freq);
|
||||
}
|
||||
|
||||
// dt is in ticks. We want microseconds.
|
||||
return (dt * 1000000) / ticks_per_sec;
|
||||
return chrono::duration_cast<chrono::microseconds>(
|
||||
std::chrono::steady_clock::duration{ dt })
|
||||
.count();
|
||||
}
|
||||
|
||||
int64_t TimerToMicros(double dt) {
|
||||
// dt is in ticks. We want microseconds.
|
||||
using DoubleSteadyClock =
|
||||
std::chrono::duration<double, std::chrono::steady_clock::period>;
|
||||
return chrono::duration_cast<chrono::microseconds>(DoubleSteadyClock{ dt })
|
||||
.count();
|
||||
}
|
||||
#endif
|
||||
|
||||
} // anonymous namespace
|
||||
|
||||
|
||||
ScopedMetric::ScopedMetric(Metric* metric) {
|
||||
metric_ = metric;
|
||||
if (!metric_)
|
||||
@ -87,7 +73,9 @@ ScopedMetric::~ScopedMetric() {
|
||||
if (!metric_)
|
||||
return;
|
||||
metric_->count++;
|
||||
int64_t dt = TimerToMicros(HighResTimer() - start_);
|
||||
// Leave in the timer's natural frequency to avoid paying the conversion cost
|
||||
// on every measurement.
|
||||
int64_t dt = HighResTimer() - start_;
|
||||
metric_->sum += dt;
|
||||
}
|
||||
|
||||
@ -112,18 +100,23 @@ void Metrics::Report() {
|
||||
for (vector<Metric*>::iterator i = metrics_.begin();
|
||||
i != metrics_.end(); ++i) {
|
||||
Metric* metric = *i;
|
||||
double total = metric->sum / (double)1000;
|
||||
double avg = metric->sum / (double)metric->count;
|
||||
uint64_t micros = TimerToMicros(metric->sum);
|
||||
double total = micros / (double)1000;
|
||||
double avg = micros / (double)metric->count;
|
||||
printf("%-*s\t%-6d\t%-8.1f\t%.1f\n", width, metric->name.c_str(),
|
||||
metric->count, avg, total);
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t Stopwatch::Now() const {
|
||||
return TimerToMicros(HighResTimer());
|
||||
double Stopwatch::Elapsed() const {
|
||||
// Convert to micros after converting to double to minimize error.
|
||||
return 1e-6 * TimerToMicros(static_cast<double>(NowRaw() - started_));
|
||||
}
|
||||
|
||||
uint64_t Stopwatch::NowRaw() const {
|
||||
return HighResTimer();
|
||||
}
|
||||
|
||||
int64_t GetTimeMillis() {
|
||||
return TimerToMicros(HighResTimer()) / 1000;
|
||||
}
|
||||
|
||||
|
20
src/metrics.h
Normal file → Executable file
20
src/metrics.h
Normal file → Executable file
@ -28,11 +28,10 @@ struct Metric {
|
||||
std::string name;
|
||||
/// Number of times we've hit the code path.
|
||||
int count;
|
||||
/// Total time (in micros) we've spent on the code path.
|
||||
/// Total time (in platform-dependent units) we've spent on the code path.
|
||||
int64_t sum;
|
||||
};
|
||||
|
||||
|
||||
/// A scoped object for recording a metric across the body of a function.
|
||||
/// Used by the METRIC_RECORD macro.
|
||||
struct ScopedMetric {
|
||||
@ -68,15 +67,15 @@ struct Stopwatch {
|
||||
Stopwatch() : started_(0) {}
|
||||
|
||||
/// Seconds since Restart() call.
|
||||
double Elapsed() const {
|
||||
return 1e-6 * static_cast<double>(Now() - started_);
|
||||
}
|
||||
double Elapsed() const;
|
||||
|
||||
void Restart() { started_ = Now(); }
|
||||
void Restart() { started_ = NowRaw(); }
|
||||
|
||||
private:
|
||||
uint64_t started_;
|
||||
uint64_t Now() const;
|
||||
// Return the current time using the native frequency of the high resolution
|
||||
// timer.
|
||||
uint64_t NowRaw() const;
|
||||
};
|
||||
|
||||
/// The primary interface to metrics. Use METRIC_RECORD("foobar") at the top
|
||||
@ -86,6 +85,13 @@ struct Stopwatch {
|
||||
g_metrics ? g_metrics->NewMetric(name) : NULL; \
|
||||
ScopedMetric metrics_h_scoped(metrics_h_metric);
|
||||
|
||||
/// A variant of METRIC_RECORD that doesn't record anything if |condition|
|
||||
/// is false.
|
||||
#define METRIC_RECORD_IF(name, condition) \
|
||||
static Metric* metrics_h_metric = \
|
||||
g_metrics ? g_metrics->NewMetric(name) : NULL; \
|
||||
ScopedMetric metrics_h_scoped((condition) ? metrics_h_metric : NULL);
|
||||
|
||||
extern Metrics* g_metrics;
|
||||
|
||||
#endif // NINJA_METRICS_H_
|
||||
|
0
src/minidump-win32.cc
Normal file → Executable file
0
src/minidump-win32.cc
Normal file → Executable file
0
src/missing_deps.cc
Normal file → Executable file
0
src/missing_deps.cc
Normal file → Executable file
7
src/missing_deps.h
Normal file → Executable file
7
src/missing_deps.h
Normal file → Executable file
@ -19,9 +19,7 @@
|
||||
#include <set>
|
||||
#include <string>
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
#include <unordered_map>
|
||||
#endif
|
||||
|
||||
struct DepsLog;
|
||||
struct DiskInterface;
|
||||
@ -68,13 +66,8 @@ struct MissingDependencyScanner {
|
||||
int missing_dep_path_count_;
|
||||
|
||||
private:
|
||||
#if __cplusplus >= 201103L
|
||||
using InnerAdjacencyMap = std::unordered_map<Edge*, bool>;
|
||||
using AdjacencyMap = std::unordered_map<Edge*, InnerAdjacencyMap>;
|
||||
#else
|
||||
typedef std::map<Edge*, bool> InnerAdjacencyMap;
|
||||
typedef std::map<Edge*, InnerAdjacencyMap> AdjacencyMap;
|
||||
#endif
|
||||
AdjacencyMap adjacency_map_;
|
||||
};
|
||||
|
||||
|
9
src/missing_deps_test.cc
Normal file → Executable file
9
src/missing_deps_test.cc
Normal file → Executable file
@ -33,7 +33,12 @@ struct MissingDependencyScannerTest : public testing::Test {
|
||||
scanner_(&delegate_, &deps_log_, &state_, &filesystem_) {
|
||||
std::string err;
|
||||
deps_log_.OpenForWrite(kTestDepsLogFilename, &err);
|
||||
ASSERT_EQ("", err);
|
||||
EXPECT_EQ("", err);
|
||||
}
|
||||
|
||||
~MissingDependencyScannerTest() {
|
||||
// Remove test file.
|
||||
deps_log_.Close();
|
||||
}
|
||||
|
||||
MissingDependencyScanner& scanner() { return scanner_; }
|
||||
@ -79,6 +84,7 @@ struct MissingDependencyScannerTest : public testing::Test {
|
||||
ASSERT_EQ(1u, scanner().generator_rules_.count(rule));
|
||||
}
|
||||
|
||||
ScopedFilePath scoped_file_path_ = kTestDepsLogFilename;
|
||||
MissingDependencyTestDelegate delegate_;
|
||||
Rule generator_rule_;
|
||||
Rule compile_rule_;
|
||||
@ -159,4 +165,3 @@ TEST_F(MissingDependencyScannerTest, CycleInGraph) {
|
||||
std::vector<Node*> nodes = state_.RootNodes(&err);
|
||||
ASSERT_NE("", err);
|
||||
}
|
||||
|
||||
|
0
src/msvc_helper-win32.cc
Normal file → Executable file
0
src/msvc_helper-win32.cc
Normal file → Executable file
5
src/msvc_helper.h
Normal file → Executable file
5
src/msvc_helper.h
Normal file → Executable file
@ -12,6 +12,9 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef MSVC_HELPER_H_
|
||||
#define MSVC_HELPER_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
std::string EscapeForDepfile(const std::string& path);
|
||||
@ -30,3 +33,5 @@ struct CLWrapper {
|
||||
|
||||
void* env_block_;
|
||||
};
|
||||
|
||||
#endif // MSVC_HELPER_H_
|
||||
|
0
src/msvc_helper_main-win32.cc
Normal file → Executable file
0
src/msvc_helper_main-win32.cc
Normal file → Executable file
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user