!64 解决应用解析工具单个文件不能解析问题

Merge pull request !64 from 姜前进/1115
This commit is contained in:
openharmony_ci 2023-11-16 02:05:35 +00:00 committed by Gitee
commit 0c3aac4975
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
3 changed files with 288 additions and 760 deletions

View File

@ -14,9 +14,11 @@
# limitations under the License.
import json
import os
from typedef.check.check import ApiResultInfo, FileDocInfo, OutputTxt
from coreImpl.check.check_doc import process_comment, process_file_doc_info
from coreImpl.check.check_name import check_file_name, check_ndk_name
from coreImpl.parser.parser import parser_include_ast
def process_api_json(api_info, file_doc_info: FileDocInfo, api_result_info_list):
@ -44,6 +46,7 @@ def process_file_json(file_info, api_result_info_list):
api_result_info_list.extend(check_file_name(file_info['name']))
apis = file_info['children']
file_doc_info = FileDocInfo()
api_result_info_list.extend(process_comment(file_info["comment"], file_doc_info, file_info))
for api in apis:
process_api_json(api, file_doc_info, api_result_info_list)
api_result_info_list.extend(process_file_doc_info(file_doc_info, file_info))
@ -58,23 +61,28 @@ def process_all_json(python_obj) -> list[ApiResultInfo]:
def write_in_txt(check_result):
txtResul = []
for result in check_result:
location = '{}(line:{}, col:{})'.format(result.location, result.locationLine, result.locationColumn)
message = 'API check error of [{}]:{}'.format(result.errorType['description'], result.errorInfo)
txtResul.append(OutputTxt(result.errorType['id'], result.level, location, result.fileName, message))
txtResul.append('api_check: false')
if len(check_result) == 0:
txtResul.append('api_check: false')
else:
for result in check_result:
location = '{}(line:{}, col:{})'.format(result.location, result.locationLine, result.locationColumn)
message = 'API check error of [{}]:{}'.format(result.errorType['description'], result.errorInfo)
txtResul.append(OutputTxt(result.errorType['id'], result.level, location, result.fileName, message))
txtResul.append('api_check: false')
result_json = json.dumps(txtResul, default=lambda obj: obj.__dict__, indent=4)
fs = open(r'./Error.txt', 'w', encoding='utf-8')
fs.write(result_json)
fs.close()
def curr_entry(file_path):
with open('./src/coreImpl/check/data.json') as json_file:
python_obj = json.load(json_file)
check_result = process_all_json(python_obj)
if len(check_result) == 0:
return
def curr_entry(pr_id):
file_path = os.path.abspath(os.path.join(os.getcwd(), "..{}..{}all_files.txt".format(os.sep, os.sep)))
file_list = get_md_files(file_path)
check_result = []
for file in file_list:
root_path = file.split('sdk_c')[0] + 'sdk_c'
python_obj = parser_include_ast(root_path, file)
check_result.extend(process_all_json(python_obj))
write_in_txt(check_result)
@ -83,7 +91,7 @@ def get_md_files(url) -> list[str]:
file_list = []
line = file.readline()
while line:
file_list.append(line)
file_list.append(line.splitlines()[0])
line = file.readline()
file.close()
return file_list

View File

@ -47,6 +47,8 @@ def check_file_name(file_path):
error_info = ErrorMessage.TRANSLATION_UNIT.value
api_result_info = ApiResultInfo(ErrorType.NAMING_ERRORS.value, error_info, '')
api_result_info.set_type(LogType.LOG_FILE.value)
api_result_info.set_file_name(file_path)
api_result_info.set_location(file_path)
api_result_info.set_level(2)
api_result_info_list.append(api_result_info)
return api_result_info_list
@ -70,8 +72,8 @@ def processing_check_data(function_type, api_info):
class CheckName(enum.Enum):
LARGE_HUMP = r'^([A-Z][a-z0-9]*)*$'
SMALL_HUMP = r'^([a-z][A-Z0-9]*)*$'
LARGE_HUMP = r'^([A-Z][a-z0-9]*)*([\_]([A-Z][a-z0-9]*)*)*$'
SMALL_HUMP = r'^([a-z][A-Z0-9]*)*([\_]([a-z][A-Z0-9]*)*)*$'
ALL_UPPERCASE_HUMP = r'^[A-Z]+[0-9]*([\_][A-Z0-9]+)*$'
GLOBAL_VARIABLE = r'^g_([a-z][A-Z0-9]*)*$'
FILE_NAME = r'^[a-z]+[a-z0-9]+([\_][a-z0-9]+)*\.h$'

File diff suppressed because it is too large Load Diff