xemu/tests/Makefile.include
Paolo Bonzini 09e93326e4 build: replace ninjatool with ninja
Now that the build is done entirely by Meson, there is no need
to keep the Makefile conversion.  Instead, we can ask Ninja about
the targets it exposes and forward them.

The main advantages are, from smallest to largest:

- reducing the possible namespace pollution within the Makefile

- removal of a relatively large Python program

- faster build because parsing Makefile.ninja is slower than
parsing build.ninja; and faster build after Meson runs because
we do not have to generate Makefile.ninja.

- tracking of command lines, which provides more accurate rebuilds

In addition the change removes the requirement for GNU make 3.82, which
was annoying on Mac, and avoids bugs on Windows due to ninjatool not
knowing how to convert Windows escapes to POSIX escapes.

Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
2020-10-17 10:45:51 -04:00

159 lines
5.5 KiB
Makefile

# -*- Mode: makefile -*-
.PHONY: check-help
check-help:
@echo "Regression testing targets:"
@echo
@echo " $(MAKE) check Run block, qapi-schema, unit, softfloat, qtest and decodetree tests"
@echo
@echo " $(MAKE) check-qtest-TARGET Run qtest tests for given target"
@echo " $(MAKE) check-qtest Run qtest tests"
@echo " $(MAKE) check-unit Run qobject tests"
@echo " $(MAKE) check-speed Run qobject speed tests"
@echo " $(MAKE) check-qapi-schema Run QAPI schema tests"
@echo " $(MAKE) check-block Run block tests"
ifeq ($(CONFIG_TCG),y)
@echo " $(MAKE) check-tcg Run TCG tests"
@echo " $(MAKE) check-softfloat Run FPU emulation tests"
endif
@echo " $(MAKE) check-acceptance Run all acceptance (functional) tests"
@echo
@echo " $(MAKE) check-report.tap Generates an aggregated TAP test report"
@echo " $(MAKE) check-venv Creates a Python venv for tests"
@echo " $(MAKE) check-clean Clean the tests and related data"
@echo
@echo "The following are useful for CI builds"
@echo " $(MAKE) check-build Build most test binaris"
@echo " $(MAKE) get-vm-images Downloads all images used by acceptance tests, according to configured targets (~350 MB each, 1.5 GB max)"
@echo
@echo
@echo "The variable SPEED can be set to control the gtester speed setting."
@echo "Default options are -k and (for $(MAKE) V=1) --verbose; they can be"
@echo "changed with variable GTESTER_OPTIONS."
ifneq ($(wildcard config-host.mak),)
export SRC_PATH
# Get the list of all supported sysemu targets
SYSEMU_TARGET_LIST := $(subst -softmmu.mak,,$(notdir \
$(wildcard $(SRC_PATH)/default-configs/*-softmmu.mak)))
SPEED = quick
# Per guest TCG tests
BUILD_TCG_TARGET_RULES=$(patsubst %,build-tcg-tests-%, $(TARGET_DIRS))
CLEAN_TCG_TARGET_RULES=$(patsubst %,clean-tcg-tests-%, $(TARGET_DIRS))
RUN_TCG_TARGET_RULES=$(patsubst %,run-tcg-tests-%, $(TARGET_DIRS))
# Probe for the Docker Builds needed for each build
$(foreach PROBE_TARGET,$(TARGET_DIRS), \
$(eval -include $(SRC_PATH)/tests/tcg/Makefile.prereqs))
$(BUILD_TCG_TARGET_RULES): build-tcg-tests-%: $(if $(CONFIG_PLUGIN),test-plugins)
$(call quiet-command,$(MAKE) $(SUBDIR_MAKEFLAGS) \
-f $(SRC_PATH)/tests/tcg/Makefile.qemu \
SRC_PATH=$(SRC_PATH) \
V="$(V)" TARGET="$*" guest-tests, \
"BUILD", "TCG tests for $*")
$(RUN_TCG_TARGET_RULES): run-tcg-tests-%: build-tcg-tests-% all
$(call quiet-command,$(MAKE) $(SUBDIR_MAKEFLAGS) \
-f $(SRC_PATH)/tests/tcg/Makefile.qemu \
SRC_PATH=$(SRC_PATH) SPEED="$(SPEED)" \
V="$(V)" TARGET="$*" run-guest-tests, \
"RUN", "TCG tests for $*")
$(CLEAN_TCG_TARGET_RULES): clean-tcg-tests-%:
$(call quiet-command,$(MAKE) $(SUBDIR_MAKEFLAGS) \
-f $(SRC_PATH)/tests/tcg/Makefile.qemu \
SRC_PATH=$(SRC_PATH) TARGET="$*" clean-guest-tests, \
"CLEAN", "TCG tests for $*")
.PHONY: build-tcg
build-tcg: $(BUILD_TCG_TARGET_RULES)
.PHONY: check-tcg
check-tcg: $(RUN_TCG_TARGET_RULES)
.PHONY: clean-tcg
clean-tcg: $(CLEAN_TCG_TARGET_RULES)
# Python venv for running tests
.PHONY: check-venv check-acceptance
TESTS_VENV_DIR=$(BUILD_DIR)/tests/venv
TESTS_VENV_REQ=$(SRC_PATH)/tests/requirements.txt
TESTS_RESULTS_DIR=$(BUILD_DIR)/tests/results
# Controls the output generated by Avocado when running tests.
# Any number of command separated loggers are accepted. For more
# information please refer to "avocado --help".
AVOCADO_SHOW=app
AVOCADO_TAGS=$(patsubst %-softmmu,-t arch:%, $(filter %-softmmu,$(TARGET_DIRS)))
$(TESTS_VENV_DIR): $(TESTS_VENV_REQ)
$(call quiet-command, \
$(PYTHON) -m venv --system-site-packages $@, \
VENV, $@)
$(call quiet-command, \
$(TESTS_VENV_DIR)/bin/python -m pip -q install -r $(TESTS_VENV_REQ), \
PIP, $(TESTS_VENV_REQ))
$(call quiet-command, touch $@)
$(TESTS_RESULTS_DIR):
$(call quiet-command, mkdir -p $@, \
MKDIR, $@)
check-venv: $(TESTS_VENV_DIR)
FEDORA_31_ARCHES_CANDIDATES=$(patsubst ppc64,ppc64le,$(TARGETS))
FEDORA_31_ARCHES := x86_64 aarch64 ppc64le s390x
FEDORA_31_DOWNLOAD=$(filter $(FEDORA_31_ARCHES),$(FEDORA_31_ARCHES_CANDIDATES))
# download one specific Fedora 31 image
get-vm-image-fedora-31-%: check-venv
$(call quiet-command, \
$(TESTS_VENV_DIR)/bin/python -m avocado vmimage get \
--distro=fedora --distro-version=31 --arch=$*, \
"AVOCADO", "Downloading acceptance tests VM image for $*")
# download all vm images, according to defined targets
get-vm-images: check-venv $(patsubst %,get-vm-image-fedora-31-%, $(FEDORA_31_DOWNLOAD))
check-acceptance: check-venv $(TESTS_RESULTS_DIR) get-vm-images
$(call quiet-command, \
$(TESTS_VENV_DIR)/bin/python -m avocado \
--show=$(AVOCADO_SHOW) run --job-results-dir=$(TESTS_RESULTS_DIR) \
--filter-by-tags-include-empty --filter-by-tags-include-empty-key \
$(AVOCADO_TAGS) \
$(if $(GITLAB_CI),,--failfast) tests/acceptance, \
"AVOCADO", "tests/acceptance")
# Consolidated targets
.PHONY: check-block check check-clean get-vm-images
check:
ifeq ($(CONFIG_TOOLS)$(CONFIG_POSIX),yy)
QEMU_IOTESTS_HELPERS-$(CONFIG_LINUX) = tests/qemu-iotests/socket_scm_helper$(EXESUF)
check: check-block
check-block: $(SRC_PATH)/tests/check-block.sh qemu-img$(EXESUF) \
qemu-io$(EXESUF) qemu-nbd$(EXESUF) $(QEMU_IOTESTS_HELPERS-y) \
$(filter qemu-system-%, $(ninja-targets))
@$<
endif
check-build: $(QEMU_IOTESTS_HELPERS-y)
check-clean:
rm -rf $(TESTS_VENV_DIR) $(TESTS_RESULTS_DIR)
clean: check-clean
# For backwards compatibility
check-speed: bench-speed
endif