Merge pull request !58 from zc/master
This commit is contained in:
openharmony_ci 2023-08-18 07:33:51 +00:00 committed by Gitee
commit d204fcc366
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
6 changed files with 54 additions and 50 deletions

View File

@ -796,7 +796,7 @@ def main():
partition_file, signing_algorithm, hash_algorithm, private_key = get_args()
if not_l2:
no_zip = True
# Unpack updater package
unpack_package_processing()

View File

@ -58,6 +58,7 @@ HASH_INFO_FMT = "<3HI"
HASH_DATA_HEADER_FMT = "<32sHI"
HASH_DATA_ADDR_FMT = "<2I"
class CreateHash(object):
"""
Create the component hash data
@ -128,7 +129,8 @@ class CreateHash(object):
component_len = os.path.getsize(component.file_path)
block_num = component_len // HASH_BLOCK_SIZE
component_name = component.component_addr.decode().ljust(COMPONENT_NAME_SIZE, "\0")
UPDATE_LOGGER.print_log("calc component hash component name:%s %d" % (component_name,len(component_name)))
UPDATE_LOGGER.print_log(
"calc component hash component name:%s %d" % (component_name, len(component_name)))
total_block = block_num + 1 if component_len % HASH_BLOCK_SIZE > 0 else block_num
self.hashdata += struct.pack(HASH_DATA_HEADER_FMT, component_name.encode(),
total_block, component_len)
@ -193,25 +195,27 @@ class CreateHash(object):
return True
def parse_print_hashdata(self, save_path):
hash_check_file_p = open(os.path.join(save_path + "hash_check_file_parse"), "wb+")
hash_check_file_p.write(("hash info:").encode())
hash_check_file_p.write((HashType(self.hash_type.value).name + ' ' + str(self.hash_digest_size) + \
' ' + str(self.component_num) + ' ' + str(self.block_size) + '\n').encode())
hash_check_fd = open(os.path.join(save_path + "hash_check_file_parse"), os.O_RDWR | os.O_CREAT, 0o755)
with os.fdopen(hash_check_fd, "wb+") as hash_check_file_p:
hash_check_file_p.write(("hash info:").encode())
hash_check_file_p.write(("%s %s %s %s\n" % (
HashType(self.hash_type.value).name, str(self.hash_digest_size),
str(self.component_num), str(self.block_size))).encode())
offset = 0
for i in range(0, self.component_num):
hash_check_file_p.write((self.hashdata_list[offset][0] + '\n').encode())
hash_check_file_p.write((str(self.hashdata_list[offset][1]) + ' ' + \
str(self.hashdata_list[offset][2]) + '\n').encode())
for j in range(0, self.hashdata_list[offset][1]):
index = offset + 1
hashdata_hexstr = "".join("%02x" % b for b in self.hashdata_list[j + index][2])
hash_check_file_p.write((str(self.hashdata_list[j + index][0]) + ' ' + \
str(self.hashdata_list[j + index][1]) + ' ' + hashdata_hexstr + \
'\n').encode())
offset += (1 + self.hashdata_list[offset][1])
offset = 0
for i in range(0, self.component_num):
hash_check_file_p.write(("%s\n" % (self.hashdata_list[offset][0])).encode())
hash_check_file_p.write(("%s %s\n" % (
str(self.hashdata_list[offset][1]), str(self.hashdata_list[offset][2]))).encode())
for j in range(0, self.hashdata_list[offset][1]):
index = offset + 1
hashdata_hexstr = "".join("%02x" % b for b in self.hashdata_list[j + index][2])
hash_check_file_p.write(("%s" % (
str(self.hashdata_list[j + index][0]), str(self.hashdata_list[j + index][1]),
hashdata_hexstr)).encode())
signdata_hexstr = "".join("%02x" % b for b in self.signdata)
hash_check_file_p.write(("hash sign:").encode())
hash_check_file_p.write(signdata_hexstr.encode())
hash_check_file_p.close()
offset += (1 + self.hashdata_list[offset][1])
signdata_hexstr = "".join("%02x" % b for b in self.signdata)
hash_check_file_p.write(("hash sign:").encode())
hash_check_file_p.write(signdata_hexstr.encode())

View File

@ -231,7 +231,7 @@ class VerseScript(Script):
"""
cmd = 'update_partitions("/%s");\n' % PARTITION_FILE
return cmd
def full_image_update(self, update_file_name):
cmd = 'update_from_bin("%s");\n' % update_file_name
return cmd

View File

@ -110,7 +110,7 @@ class UnpackPackage(object):
package_file.seek(self.addr_offset)
component_addr = package_file.read(self.addr_size)
component_addr = component_addr.split(b"\x00")[0].decode('utf-8')
package_file.seek(self.type_offset)
component_type_buffer = package_file.read(COMPONENT_TYPE_SIZE)
component_type = struct.unpack(COMPONENT_TYPE_FMT, component_type_buffer)

View File

@ -131,7 +131,7 @@ def create_update_bin():
sign_algo = SIGN_ALGO_PSS
else:
sign_algo = SIGN_ALGO_RSA
# create bin
package = CreatePackage(head_list, component_list, save_patch, OPTIONS_MANAGER.private_key)
if not package.create_package():
@ -460,7 +460,7 @@ def build_update_package(no_zip, update_package, prelude_script,
return False
update_file_name = get_update_file_name()
if not no_zip:
update_package_path = os.path.join(
update_package, '%s_unsigned.zip' % update_file_name)

View File

@ -25,11 +25,11 @@ from collections import OrderedDict
import xmltodict
import zipfile
from cryptography.hazmat.primitives import hashes
from log_exception import UPDATE_LOGGER
from build_pkcs7 import sign_ota_package
from copy import copy
from ctypes import cdll
from cryptography.hazmat.primitives import hashes
from log_exception import UPDATE_LOGGER
operation_path = os.path.dirname(os.path.realpath(__file__))
PRODUCT = 'hi3516'
@ -118,19 +118,8 @@ class ExtInit:
return False
@singleton
class OptionsManager:
"""
Options management class
"""
class BaseOptionsManager:
def __init__(self):
self.init = ExtInit()
self.parser = argparse.ArgumentParser()
# Own parameters
self.product = None
# Entry parameters
self.source_package = None
self.target_package = None
@ -148,6 +137,23 @@ class OptionsManager:
self.make_dir_path = None
@singleton
class OptionsManager(BaseOptionsManager):
"""
Options management class
"""
def __init__(self):
super().__init__()
self.init = ExtInit()
self.parser = argparse.ArgumentParser()
# Own parameters
self.product = None
# Parsed package parameters
self.target_package_dir = None
self.target_package_config_dir = None
@ -329,7 +335,6 @@ def parse_update_config(xml_path):
difference_list.append(component['@compAddr'])
OPTIONS_MANAGER.incremental_img_name_list.append(split_img_name(component['#text']))
UPDATE_LOGGER.print_log('XML file parsing completed!')
ret_params = [head_list, comp_dict, whole_list, difference_list, package_version, full_image_path_list]
return ret_params
@ -584,9 +589,7 @@ def get_update_info():
os.path.join(OPTIONS_MANAGER.target_package_config_dir,
VERSION_MBN_PATH)))
if version_mbn_content is False:
UPDATE_LOGGER.print_log(
"Get version mbn content failed!",
log_type=UPDATE_LOGGER.ERROR_LOG)
UPDATE_LOGGER.print_log("Get version mbn content failed!", log_type=UPDATE_LOGGER.ERROR_LOG)
return False
OPTIONS_MANAGER.version_mbn_content = version_mbn_content
OPTIONS_MANAGER.board_list_file_path = os.path.join(
@ -597,9 +600,7 @@ def get_update_info():
os.path.join(OPTIONS_MANAGER.target_package_config_dir,
BOARD_LIST_PATH)))
if board_list_content is False:
UPDATE_LOGGER.print_log(
"Get board list content failed!",
log_type=UPDATE_LOGGER.ERROR_LOG)
UPDATE_LOGGER.print_log("Get board list content failed!", log_type=UPDATE_LOGGER.ERROR_LOG)
return False
OPTIONS_MANAGER.board_list_content = board_list_content
@ -615,11 +616,10 @@ def get_update_info():
OPTIONS_MANAGER.target_package_version, \
OPTIONS_MANAGER.full_image_path_list = \
parse_update_config(xml_file_path)
UPDATE_LOGGER.print_log("XML file parsing completed!")
if head_info_list is False or component_info_dict is False or \
full_img_list is False or incremental_img_list is False:
UPDATE_LOGGER.print_log(
"Get parse update config xml failed!",
log_type=UPDATE_LOGGER.ERROR_LOG)
UPDATE_LOGGER.print_log("Get parse update config xml failed!", log_type=UPDATE_LOGGER.ERROR_LOG)
return False
OPTIONS_MANAGER.head_info_list, OPTIONS_MANAGER.component_info_dict, \
OPTIONS_MANAGER.full_img_list, OPTIONS_MANAGER.incremental_img_list = \