mirror of
https://gitee.com/openharmony/third_party_vulkan-loader
synced 2024-11-27 09:21:56 +00:00
layers: Automated checking of validation layer documentation
Addition of vk_layer_documentation_generate.py script to check hand-written vk_validation_layer_details.md doc against the layer source code. Currently this will verify documented validation checks against checks declared in source headers and verify that API calls in document are actual API calls. This includes many documentation and layer updates to get everything in synch and allow the script to pass.
This commit is contained in:
parent
50469450fd
commit
2545973ec4
@ -87,6 +87,7 @@ if (NOT WIN32)
|
||||
COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/run_all_tests_with_layers.sh
|
||||
COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/layer_test_suite.py
|
||||
COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/vktracereplay.sh
|
||||
COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/vkvalidatelayerdoc.sh
|
||||
VERBATIM
|
||||
)
|
||||
endif()
|
||||
@ -96,10 +97,12 @@ else()
|
||||
FILE(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/_run_all_tests.ps1 RUN_ALL)
|
||||
FILE(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/_run_all_tests_with_layers.ps1 RUN_ALL_WITH_LAYERS)
|
||||
FILE(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/_vktracereplay.ps1 VKTRACEREPLAY)
|
||||
FILE(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/_vkvalidatelayerdoc.ps1 VKVALIDATELAYERDOC)
|
||||
add_custom_target(binary-dir-symlinks ALL
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${RUN_ALL} run_all_tests.ps1
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${RUN_ALL_WITH_LAYERS} run_all_tests_with_layers.ps1
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${VKTRACEREPLAY} vktracereplay.ps1
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${VKVALIDATELAYERDOC} vkvalidatelayerdoc.ps1
|
||||
COMMAND IF NOT EXIST golden mklink /J golden ${GOLDEN}
|
||||
VERBATIM
|
||||
)
|
||||
|
@ -18,3 +18,4 @@ $env:VK_LAYER_PATH = "..\layers\$dPath"
|
||||
& $dPath\vk_render_tests
|
||||
& $dPath\vk_layer_validation_tests
|
||||
.\vktracereplay.ps1 "-$dPath"
|
||||
.\vkvalidatelayerdoc.ps1
|
||||
|
@ -27,3 +27,9 @@ set -e
|
||||
|
||||
# vktracereplay.sh tests vktrace trace and replay
|
||||
./vktracereplay.sh
|
||||
|
||||
# vk_layer_documentation_generate.py is run from the project root dir
|
||||
# the --validate option will verify that checks documented in layer header
|
||||
# files match those documented in layer/vk_validation_layer_details.md
|
||||
./vkvalidatelayerdoc.sh
|
||||
|
||||
|
321
vk_layer_documentation_generate.py
Executable file
321
vk_layer_documentation_generate.py
Executable file
@ -0,0 +1,321 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# VK
|
||||
#
|
||||
# Copyright (C) 2015 LunarG, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
# DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import vulkan
|
||||
import platform
|
||||
|
||||
# vk_layer_documentation_generate.py overview
|
||||
# This script is intended to generate documentation based on vulkan layers
|
||||
# It parses known validation layer headers for details of the validation checks
|
||||
# It parses validation layer source files for specific code where checks are implemented
|
||||
# structs in a human-readable txt format, as well as utility functions
|
||||
# to print enum values as strings
|
||||
|
||||
# NOTE : Initially the script is performing validation of a hand-written document
|
||||
# Right now it does 3 checks:
|
||||
# 1. Verify ENUM codes declared in source are documented
|
||||
# 2. Verify ENUM codes in document are declared in source
|
||||
# 3. Verify API function names in document are in the actual API header (vulkan.py)
|
||||
# Currently script will flag errors in all of these cases
|
||||
|
||||
# TODO : Need a formal specification of the syntax for doc generation
|
||||
# Initially, these are the basics:
|
||||
# 1. Validation checks have unique ENUM values defined in validation layer header
|
||||
# 2. ENUM includes comments for 1-line overview of check and more detailed description
|
||||
# 3. Actual code implementing checks includes ENUM value in callback
|
||||
# 4. Code to test checks should include reference to ENUM
|
||||
|
||||
|
||||
# TODO : Need list of known validation layers to use as default input
|
||||
# Just a couple of flat lists right now, but may need to make this input file
|
||||
# or at least a more dynamic data structure
|
||||
layer_inputs = { 'draw_state' : {'header' : 'layers/draw_state.h',
|
||||
'source' : 'layers/draw_state.cpp',
|
||||
'generated' : False,
|
||||
'error_enum' : 'DRAW_STATE_ERROR'},
|
||||
'mem_tracker' : {'header' : 'layers/mem_tracker.h',
|
||||
'source' : 'layers/mem_tracker.cpp',
|
||||
'generated' : False,
|
||||
'error_enum' : 'MEM_TRACK_ERROR'},
|
||||
'shader_checker' : {'header' : 'layers/shader_checker.h',
|
||||
'source' : 'layers/shader_checker.cpp',
|
||||
'generated' : False,
|
||||
'error_enum' : 'SHADER_CHECKER_ERROR'},
|
||||
'threading' : {'header' : 'layers/threading.h',
|
||||
'source' : 'dbuild/layers/threading.cpp',
|
||||
'generated' : True,
|
||||
'error_enum' : 'THREADING_CHECKER_ERROR'},
|
||||
'object_track' : {'header' : 'layers/object_track.h',
|
||||
'source' : 'dbuild/layers/object_track.cpp',
|
||||
'generated' : True,
|
||||
'error_enum' : 'OBJECT_TRACK_ERROR',},
|
||||
}
|
||||
|
||||
builtin_headers = [layer_inputs[ln]['header'] for ln in layer_inputs]
|
||||
builtin_source = [layer_inputs[ln]['source'] for ln in layer_inputs]
|
||||
|
||||
# List of extensions in layers that are included in documentation, but not in vulkan.py API set
|
||||
layer_extension_functions = ['objTrackGetObjects', 'objTrackGetObjectsOfType']
|
||||
|
||||
def handle_args():
|
||||
parser = argparse.ArgumentParser(description='Generate layer documenation from source.')
|
||||
parser.add_argument('--in_headers', required=False, default=builtin_headers, help='The input layer header files from which code will be generated.')
|
||||
parser.add_argument('--in_source', required=False, default=builtin_source, help='The input layer source files from which code will be generated.')
|
||||
parser.add_argument('--layer_doc', required=False, default='layers/vk_validation_layer_details.md', help='Existing layer document to be validated against actual layers.')
|
||||
parser.add_argument('--validate', action='store_true', default=False, help='Validate that there are no mismatches between layer documentation and source. This includes cross-checking the validation checks, and making sure documented Vulkan API calls exist.')
|
||||
parser.add_argument('--print_structs', action='store_true', default=False, help='Primarily a debug option that prints out internal data structs used to generate layer docs.')
|
||||
parser.add_argument('--print_doc_checks', action='store_true', default=False, help='Primarily a debug option that prints out all of the checks that are documented.')
|
||||
return parser.parse_args()
|
||||
|
||||
# Little helper class for coloring cmd line output
|
||||
class bcolors:
|
||||
|
||||
def __init__(self):
|
||||
self.GREEN = '\033[0;32m'
|
||||
self.RED = '\033[0;31m'
|
||||
self.ENDC = '\033[0m'
|
||||
if 'Linux' != platform.system():
|
||||
self.GREEN = ''
|
||||
self.RED = ''
|
||||
self.ENDC = ''
|
||||
|
||||
def green(self):
|
||||
return self.GREEN
|
||||
|
||||
def red(self):
|
||||
return self.RED
|
||||
|
||||
def endc(self):
|
||||
return self.ENDC
|
||||
|
||||
# Class to parse the layer source code and store details in internal data structs
|
||||
class LayerParser:
|
||||
def __init__(self, header_file_list, source_file_list):
|
||||
self.header_files = header_file_list
|
||||
self.source_files = source_file_list
|
||||
self.layer_dict = {}
|
||||
self.api_dict = {}
|
||||
|
||||
# Parse layer header files into internal dict data structs
|
||||
def parse(self):
|
||||
# For each header file, parse details into dicts
|
||||
# TODO : Should have a global dict element to track overall list of checks
|
||||
store_enum = False
|
||||
for hf in self.header_files:
|
||||
layer_name = os.path.basename(hf).split('.')[0]
|
||||
self.layer_dict[layer_name] = {} # initialize a new dict for this layer
|
||||
self.layer_dict[layer_name]['CHECKS'] = [] # enum of checks is stored in a list
|
||||
#print('Parsing header file %s as layer name %s' % (hf, layer_name))
|
||||
with open(hf) as f:
|
||||
for line in f:
|
||||
if True in [line.strip().startswith(comment) for comment in ['//', '/*']]:
|
||||
#print("Skipping comment line: %s" % line)
|
||||
# For now skipping lines starting w/ comment, may use these to capture
|
||||
# documentation in the future
|
||||
continue
|
||||
|
||||
# Find enums
|
||||
if store_enum:
|
||||
if '}' in line: # we're done with enum definition
|
||||
store_enum = False
|
||||
continue
|
||||
# grab the enum name as a unique check
|
||||
if ',' in line:
|
||||
# TODO : When documentation for a check is contained in the source,
|
||||
# this is where we should also capture that documentation so that
|
||||
# it can then be transformed into desired doc format
|
||||
enum_name = line.split(',')[0].strip()
|
||||
# Flag an error if we have already seen this enum
|
||||
if enum_name in self.layer_dict[layer_name]['CHECKS']:
|
||||
print('ERROR : % layer has duplicate error enum: %s' % (layer_name, enum_name))
|
||||
self.layer_dict[layer_name]['CHECKS'].append(enum_name)
|
||||
# If the line includes 'typedef', 'enum', and the expected enum name, start capturing enums
|
||||
if False not in [ex in line for ex in ['typedef', 'enum', layer_inputs[layer_name]['error_enum']]]:
|
||||
store_enum = True
|
||||
|
||||
# For each source file, parse into dicts
|
||||
for sf in self.source_files:
|
||||
#print('Parsing source file %s' % sf)
|
||||
pass
|
||||
# TODO : In the source file we want to see where checks actually occur
|
||||
# Need to build function tree of checks so that we know all of the
|
||||
# checks that occur under a top-level Vulkan API call
|
||||
# Eventually in the validation we can flag ENUMs that aren't being
|
||||
# used in the source, and we can document source code lines as well
|
||||
# as Vulkan API calls where each specific ENUM check is made
|
||||
|
||||
def print_structs(self):
|
||||
print('This is where I print the data structs')
|
||||
for layer in self.layer_dict:
|
||||
print('Layer %s has %i checks:\n%s' % (layer, len(self.layer_dict[layer]['CHECKS'])-1, "\n\t".join(self.layer_dict[layer]['CHECKS'])))
|
||||
|
||||
# Class to parse hand-written md layer documentation into a dict and then validate its contents
|
||||
class LayerDoc:
|
||||
def __init__(self, source_file):
|
||||
self.layer_doc_filename = source_file
|
||||
self.txt_color = bcolors()
|
||||
# Main data struct to store info from layer doc
|
||||
self.layer_doc_dict = {}
|
||||
# Comprehensive list of all validation checks recorded in doc
|
||||
self.enum_list = []
|
||||
|
||||
# Parse the contents of doc into data struct
|
||||
def parse(self):
|
||||
layer_name = 'INIT'
|
||||
parse_layer_details = False
|
||||
detail_trigger = '| Check | '
|
||||
parse_pending_work = False
|
||||
pending_trigger = ' Pending Work'
|
||||
parse_overview = False
|
||||
overview_trigger = ' Overview'
|
||||
enum_prefix = ''
|
||||
|
||||
with open(self.layer_doc_filename) as f:
|
||||
for line in f:
|
||||
if parse_pending_work:
|
||||
if '.' in line and line.strip()[0].isdigit():
|
||||
todo_item = line.split('.')[1].strip()
|
||||
self.layer_doc_dict[layer_name]['pending'].append(todo_item)
|
||||
if pending_trigger in line and '##' in line:
|
||||
parse_layer_details = False
|
||||
parse_pending_work = True
|
||||
parse_overview = False
|
||||
self.layer_doc_dict[layer_name]['pending'] = []
|
||||
if parse_layer_details:
|
||||
# Grab details but skip the fomat line with a bunch of '-' chars
|
||||
if '|' in line and line.count('-') < 20:
|
||||
detail_sections = line.split('|')
|
||||
#print("Details elements from line %s: %s" % (line, detail_sections))
|
||||
check_name = '%s%s' % (enum_prefix, detail_sections[3].strip())
|
||||
if '_NA' in check_name:
|
||||
# TODO : Should clean up these NA checks in the doc, skipping them for now
|
||||
continue
|
||||
self.enum_list.append(check_name)
|
||||
self.layer_doc_dict[layer_name][check_name] = {}
|
||||
self.layer_doc_dict[layer_name][check_name]['summary_txt'] = detail_sections[1].strip()
|
||||
self.layer_doc_dict[layer_name][check_name]['details_txt'] = detail_sections[2].strip()
|
||||
self.layer_doc_dict[layer_name][check_name]['api_list'] = detail_sections[4].split()
|
||||
self.layer_doc_dict[layer_name][check_name]['tests'] = detail_sections[5].split()
|
||||
self.layer_doc_dict[layer_name][check_name]['notes'] = detail_sections[6].strip()
|
||||
# strip any unwanted commas from api and test names
|
||||
self.layer_doc_dict[layer_name][check_name]['api_list'] = [a.strip(',') for a in self.layer_doc_dict[layer_name][check_name]['api_list']]
|
||||
self.layer_doc_dict[layer_name][check_name]['tests'] = [a.strip(',') for a in self.layer_doc_dict[layer_name][check_name]['tests']]
|
||||
# Trigger details parsing when we have table header
|
||||
if detail_trigger in line:
|
||||
parse_layer_details = True
|
||||
parse_pending_work = False
|
||||
parse_overview = False
|
||||
enum_txt = line.split('|')[3]
|
||||
if '*' in enum_txt:
|
||||
enum_prefix = enum_txt.split()[-1].strip('*').strip()
|
||||
#print('prefix: %s' % enum_prefix)
|
||||
if parse_overview:
|
||||
self.layer_doc_dict[layer_name]['overview'] += line
|
||||
if overview_trigger in line and '##' in line:
|
||||
parse_layer_details = False
|
||||
parse_pending_work = False
|
||||
parse_overview = True
|
||||
layer_name = line.split()[1]
|
||||
self.layer_doc_dict[layer_name] = {}
|
||||
self.layer_doc_dict[layer_name]['overview'] = ''
|
||||
|
||||
# Verify that checks and api references in layer doc match reality
|
||||
# Report API calls from doc that are not found in API
|
||||
# Report checks from doc that are not in actual layers
|
||||
# Report checks from layers that are not captured in doc
|
||||
def validate(self, layer_dict):
|
||||
# Count number of errors found and return it
|
||||
errors_found = 0
|
||||
# First we'll go through the doc datastructures and flag any issues
|
||||
for chk in self.enum_list:
|
||||
doc_layer_found = False
|
||||
for real_layer in layer_dict:
|
||||
if chk in layer_dict[real_layer]['CHECKS']:
|
||||
#print('Found actual layer check %s in doc' % (chk))
|
||||
doc_layer_found = True
|
||||
continue
|
||||
if not doc_layer_found:
|
||||
print(self.txt_color.red() + 'Actual layers do not contain documented check: %s' % (chk) + self.txt_color.endc())
|
||||
errors_found += 1
|
||||
# Now go through API names in doc and verify they're real
|
||||
# First we're going to transform proto names from vulkan.py into single list
|
||||
core_api_names = [p.name for p in vulkan.core.protos]
|
||||
wsi_s_names = [p.name for p in vulkan.wsi_swapchain.protos]
|
||||
wsi_ds_names = [p.name for p in vulkan.wsi_device_swapchain.protos]
|
||||
dbg_rpt_names = [p.name for p in vulkan.debug_report_lunarg.protos]
|
||||
dbg_mrk_names = [p.name for p in vulkan.debug_marker_lunarg.protos]
|
||||
api_names = core_api_names + wsi_s_names + wsi_ds_names + dbg_rpt_names + dbg_mrk_names
|
||||
for ln in self.layer_doc_dict:
|
||||
for chk in self.layer_doc_dict[ln]:
|
||||
if chk in ['overview', 'pending']:
|
||||
continue
|
||||
for api in self.layer_doc_dict[ln][chk]['api_list']:
|
||||
if api[2:] not in api_names and api not in layer_extension_functions:
|
||||
print(self.txt_color.red() + 'Doc references invalid function: %s' % (api) + self.txt_color.endc())
|
||||
errors_found += 1
|
||||
# Now go through all of the actual checks in the layers and make sure they're covered in the doc
|
||||
for ln in layer_dict:
|
||||
for chk in layer_dict[ln]['CHECKS']:
|
||||
if chk not in self.enum_list:
|
||||
print(self.txt_color.red() + 'Doc is missing check: %s' % (chk) + self.txt_color.endc())
|
||||
errors_found += 1
|
||||
|
||||
return errors_found
|
||||
|
||||
# Print all of the checks captured in the doc
|
||||
def print_checks(self):
|
||||
print('Checks captured in doc:\n%s' % ('\n\t'.join(self.enum_list)))
|
||||
|
||||
def main(argv=None):
|
||||
# Parse args
|
||||
opts = handle_args()
|
||||
# Create parser for layer files
|
||||
layer_parser = LayerParser(opts.in_headers, opts.in_source)
|
||||
# Parse files into internal data structs
|
||||
layer_parser.parse()
|
||||
|
||||
# Generate requested types of output
|
||||
if opts.print_structs: # Print details of internal data structs
|
||||
layer_parser.print_structs()
|
||||
|
||||
layer_doc = LayerDoc(opts.layer_doc)
|
||||
layer_doc.parse()
|
||||
if opts.print_doc_checks:
|
||||
layer_doc.print_checks()
|
||||
|
||||
if opts.validate:
|
||||
num_errors = layer_doc.validate(layer_parser.layer_dict)
|
||||
if (0 == num_errors):
|
||||
txt_color = bcolors()
|
||||
print(txt_color.green() + 'No mismatches found between %s and implementation' % (os.path.basename(opts.layer_doc)) + txt_color.endc())
|
||||
else:
|
||||
return num_errors
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
Loading…
Reference in New Issue
Block a user