mirror of
https://gitee.com/openharmony/interface_sdk_c
synced 2024-11-24 07:09:59 +00:00
!80 CAPI自动化测试基础框架
Merge pull request !80 from 王曹宇/feature_capi_test_case
This commit is contained in:
commit
0eb87a2611
@ -1,10 +1,13 @@
|
||||
1.使用该工具前需要修改[constants.py](./src/utils/constants.py)
|
||||
## 1.修改环境变量配置
|
||||
|
||||
使用该工具前需要修改[constants.py](./src/utils/constants.py)
|
||||
文件下的StringConstant.LIB_CLG_PATH、StringConstant.REPLACE_WAREHOUSE、StringConstant.INCLUDE_LIB;
|
||||
StringConstant.LIB_CLG_PATH:共享库(本地的)
|
||||
REPLACE_WAREHOUSE:拉下来的interface_sdk_c仓的目录(本地的路径) --例如:(去掉磁盘的路径)\\interface_sdk_c
|
||||
StringConstant.INCLUDE_LIB:# 拉到本地仓的三方库路径
|
||||
|
||||
2.环境:
|
||||
## 2.环境:
|
||||
|
||||
1)python-3.11.4-amd64
|
||||
|
||||
2)PyCharm Community Edition 2023.2
|
||||
@ -15,7 +18,8 @@ StringConstant.INCLUDE_LIB:# 拉到本地仓的三方库路径
|
||||
|
||||
5)在interface_sdk_c目录下运行的是src目录下的mian.py文件
|
||||
|
||||
3.终端指令
|
||||
## 3.终端指令
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
-N {collect,diff}, --tool-name {collect,diff}
|
||||
@ -23,4 +27,14 @@ options:
|
||||
-P PARSER_PATH, --parser-path PARSER_PATH
|
||||
解析路径
|
||||
|
||||
例如:用的统计工具:终端指令:py -N collect -P 目录文件路径 ---就是调用CAPI统计工具的使用
|
||||
例如:用的统计工具:终端指令:py -N collect -P 目录文件路径 ---就是调用CAPI统计工具的使用
|
||||
|
||||
## 4.自动化测试
|
||||
|
||||
1)在test/ut下添加对应工具的测试用例
|
||||
|
||||
2)在test/expect下添加对应工具的期望结果
|
||||
|
||||
3)在test/testCase/run_main.py添加对应的方法执行测试用例的解析,将结果输出到test/output下,与expect对比期望结果
|
||||
|
||||
添加的方法需要以test_开头
|
||||
|
@ -17,16 +17,18 @@
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import os
|
||||
from clang.cindex import CursorKind
|
||||
from typedef.check.check import ApiResultInfo, DocInfo, ErrorType, ErrorMessage, FileDocInfo, LogType, TAGS
|
||||
from typedef.check.check import ApiResultInfo, DocInfo, ErrorType, ErrorMessage, FileDocInfo, LogType, TAGS, ErrorLevel
|
||||
|
||||
current_file = os.path.dirname(__file__)
|
||||
# permission数据来源于https://gitee.com/openharmony/utils_system_resources/raw/master/systemres/main/config.json
|
||||
permission_tag_rules = ['ohos.permission.HEALTH_DATA', 'ohos.permission.HEART_RATE', 'ohos.permission.ACCELERATION']
|
||||
with open('./capi_parser/src/coreImpl/check/rules/perssion_rule.json') as json_file:
|
||||
with open(os.path.abspath(os.path.join(current_file, "rules/perssion_rule.json"))) as json_file:
|
||||
permission_file_content = json.load(json_file)
|
||||
permission_tag_rules.extend([item['name'] for item in permission_file_content['module']['definePermissions']])
|
||||
syscap_tag_rules: list = []
|
||||
with open('./capi_parser/src/coreImpl/check/rules/syscap_rule.json') as json_file:
|
||||
with open(os.path.abspath(os.path.join(current_file, "rules/syscap_rule.json"))) as json_file:
|
||||
syscap_tag_rules = json.load(json_file)
|
||||
|
||||
|
||||
@ -36,26 +38,18 @@ def create_api_result_info_by_doc(error_type: ErrorType, error: ErrorMessage, pa
|
||||
error_info = error_info.replace('$$', str(param), 1)
|
||||
api_result_info = ApiResultInfo(error_type.value, error_info, api_info['name'])
|
||||
api_result_info.set_type(LogType.LOG_JSDOC.value)
|
||||
api_result_info.set_level(ErrorLevel.MIDDLE.value)
|
||||
if 'location' in api_info.keys():
|
||||
api_result_info.set_location(api_info['location']['location_path'])
|
||||
api_result_info.set_location_line(api_info['location']['location_line'])
|
||||
api_result_info.set_location_column(api_info['location']['location_column'])
|
||||
api_result_info.set_file_name(api_info['location'])
|
||||
api_result_info.set_file_name(api_info['location']['location_path'])
|
||||
else:
|
||||
api_result_info.set_file_name(api_info['name'])
|
||||
api_result_info.set_location(api_info['name'])
|
||||
return api_result_info
|
||||
|
||||
|
||||
def create_api_result_info_by_file(error_type: ErrorType, error: ErrorMessage, params: list, file_info):
|
||||
error_info = str(error.value)
|
||||
for param in params:
|
||||
error_info = error_info.replace('$$', str(param), 1)
|
||||
api_result_info = ApiResultInfo(error_type.value, error_info, file_info['name'])
|
||||
api_result_info.set_type(LogType.LOG_FILE.value)
|
||||
return api_result_info
|
||||
|
||||
|
||||
def process_tag_addtogroup(tag_info, file_doc_info: FileDocInfo, api_info) -> list:
|
||||
api_result_info_list = []
|
||||
group_name = tag_info['name']
|
||||
@ -110,12 +104,19 @@ def process_tag_library(tag_info, file_doc_info: FileDocInfo, api_info) -> list:
|
||||
|
||||
|
||||
def process_tag_param(tag_info, file_doc_info: FileDocInfo, api_info) -> list:
|
||||
file_doc_info.curr_doc_info.param_index += 1
|
||||
api_result_info_list = []
|
||||
if api_info['kind'] != CursorKind.FUNCTION_DECL.name:
|
||||
return api_result_info_list
|
||||
|
||||
if 'parm' not in api_info.keys():
|
||||
api_result_info = create_api_result_info_by_doc(
|
||||
ErrorType.WRONG_VALUE, ErrorMessage.ERROR_INFO_COUNT_PARAM, [], api_info)
|
||||
api_result_info_list.append(api_result_info)
|
||||
return api_result_info_list
|
||||
index = file_doc_info.curr_doc_info.param_index
|
||||
params = api_info['parm']
|
||||
if (len(params) < index+1):
|
||||
if (len(params) < index + 1):
|
||||
return api_result_info_list
|
||||
param = api_info['parm'][index]
|
||||
if tag_info['name'] != param['name']:
|
||||
@ -234,8 +235,6 @@ def process_each_tags(tag_info, file_doc_info: FileDocInfo, api_info) -> list:
|
||||
if tag not in process_tag_function.keys():
|
||||
return []
|
||||
tag_process = process_tag_function[tag]
|
||||
if tag == TAGS['PARAM'].value:
|
||||
doc_info.param_index += 1
|
||||
api_result_info_list.extend(tag_process(tag_info, file_doc_info, api_info))
|
||||
return api_result_info_list
|
||||
|
||||
@ -303,7 +302,8 @@ def process_each_comment(comment_object, file_doc_info: FileDocInfo, api_info) -
|
||||
api_result_info_list.extend(process_each_tags(item, file_doc_info, api_info))
|
||||
# 判断param标签的数量和方法参数的数量是否对应
|
||||
param_tag_count = file_doc_info.curr_doc_info.param_index + 1
|
||||
if api_info['kind'] == CursorKind.FUNCTION_DECL.name and len(api_info['parm']) != param_tag_count:
|
||||
if api_info['kind'] == CursorKind.FUNCTION_DECL.name and \
|
||||
'parm' in api_info.keys() and len(api_info['parm']) != param_tag_count:
|
||||
api_result_info = create_api_result_info_by_doc(
|
||||
ErrorType.WRONG_SCENE, ErrorMessage.ERROR_INFO_COUNT_PARAM, [], api_info)
|
||||
api_result_info_list.append(api_result_info)
|
||||
@ -339,7 +339,7 @@ def process_comment(comment: str, file_doc_info: FileDocInfo, api_info) -> list:
|
||||
if comment == "none_comment":
|
||||
return api_result_info_list
|
||||
result = subprocess.check_output(
|
||||
['node', './capi_parser/src/coreImpl/check/comment_parser.js', comment]) # 解析comment
|
||||
['node', os.path.abspath(os.path.join(current_file, "comment_parser.js")), comment]) # 解析comment
|
||||
result_json = json.loads(result.decode('utf-8'))
|
||||
for item in result_json:
|
||||
api_result_info_list.extend(process_each_comment(item, file_doc_info, api_info))
|
||||
@ -355,19 +355,19 @@ def process_file_doc_info(file_doc_info: FileDocInfo, file_info) -> list:
|
||||
api_result_info_list = []
|
||||
# 处理group说明
|
||||
if file_doc_info.group_name is None:
|
||||
api_result_info = create_api_result_info_by_file(
|
||||
api_result_info = create_api_result_info_by_doc(
|
||||
ErrorType.WRONG_SCENE, ErrorMessage.ERROR_FILE_LOSE_ONE, ['group doc'], file_info)
|
||||
api_result_info_list.append(api_result_info)
|
||||
else:
|
||||
# 判断group标签的结尾
|
||||
if not file_doc_info.has_group_end:
|
||||
api_result_info = create_api_result_info_by_file(
|
||||
api_result_info = create_api_result_info_by_doc(
|
||||
ErrorType.WRONG_SCENE, ErrorMessage.ERROR_FILE_HAS_ONE_LOSE_OTHER,
|
||||
['group tag', 'end tag }'], file_info)
|
||||
api_result_info_list.append(api_result_info)
|
||||
# 处理file说明
|
||||
if file_doc_info.file_name is None:
|
||||
api_result_info = create_api_result_info_by_file(
|
||||
api_result_info = create_api_result_info_by_doc(
|
||||
ErrorType.WRONG_SCENE, ErrorMessage.ERROR_FILE_LOSE_ONE, ['file doc'], file_info)
|
||||
api_result_info_list.append(api_result_info)
|
||||
|
||||
|
0
capi_parser/test/expect/check/.gitkeep
Normal file
0
capi_parser/test/expect/check/.gitkeep
Normal file
0
capi_parser/test/output/check/.gitkeep
Normal file
0
capi_parser/test/output/check/.gitkeep
Normal file
30
capi_parser/test/testCase/run_main.py
Normal file
30
capi_parser/test/testCase/run_main.py
Normal file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import unittest
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../src")))
|
||||
from coreImpl.check.check import get_check_result, write_in_txt
|
||||
|
||||
|
||||
class TestMethods(unittest.TestCase):
|
||||
def test_check(self):
|
||||
test_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..\\"))
|
||||
test_case_path = os.path.join(test_path, "ut\\check")
|
||||
output_path = os.path.join(test_path, "output\\check")
|
||||
expect_path = os.path.join(test_path, "expect\\check")
|
||||
for dirpath, dirnames, filenames in os.walk(test_case_path):
|
||||
for item in filenames:
|
||||
file_name = item.split('.')[0]
|
||||
check_result = get_check_result([os.path.join(dirpath, item)])
|
||||
write_in_txt(check_result, os.path.join(output_path, "{}.txt".format(file_name)))
|
||||
with open(os.path.join(expect_path, "{}.txt".format(file_name))) as json_file:
|
||||
permission_file_content = json.load(json_file)
|
||||
result_json = json.dumps(permission_file_content, default=lambda obj: obj.__dict__, indent=4)
|
||||
self.assertEqual(result_json, "result_json", "{} case is error".format(os.path.join(dirpath, item)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
0
capi_parser/test/ut/check/.gitkeep
Normal file
0
capi_parser/test/ut/check/.gitkeep
Normal file
Loading…
Reference in New Issue
Block a user