fix isNeedLatency bug

Signed-off-by: wang-yangsong <wangyangsong1@h-partners.com>
This commit is contained in:
wang-yangsong 2024-06-28 16:55:10 +08:00
parent 334f2bf8b0
commit 8e1d43e297
2 changed files with 12 additions and 6 deletions

View File

@ -1329,7 +1329,7 @@ OH_NN_ReturnCode ExecutorPrepare(Executor** executor, Compilation** compilation)
std::vector<char> vechiaiModelId(hiaiModelIdStr.begin(), hiaiModelIdStr.end());
configMap["hiaiModelId"] = vechiaiModelId;
std::vector<char> vecNeedLatency(static_cast<char>(compilationImpl->isNeedModelLatency));
std::vector<char> vecNeedLatency = { static_cast<char>(compilationImpl->isNeedModelLatency) };
configMap["isNeedModelLatency"] = vecNeedLatency;
executorImpl->SetExtensionConfig(configMap);
@ -1400,7 +1400,7 @@ OH_NN_ReturnCode Unload(const ExecutorConfig* config)
int ret = nnrtService.Unload(config->hiaiModelId);
if (ret != static_cast<int>(OH_NN_SUCCESS)) {
LOGE("Unload failed, nnrtService is not exist, jump over Unload.");
LOGE("Unload failed, some error happen when unload hiaiModelId.");
return static_cast<OH_NN_ReturnCode>(ret);
}
@ -1645,7 +1645,7 @@ OH_NN_ReturnCode RunSync(Executor *executor,
configPtr->isNeedModelLatency = false;
std::unordered_map<std::string, std::vector<char>> configMap;
std::vector<char> vecNeedLatency(static_cast<char>(configPtr->isNeedModelLatency));
std::vector<char> vecNeedLatency = { static_cast<char>(configPtr->isNeedModelLatency) };
configMap["isNeedModelLatency"] = vecNeedLatency;
ret = executor->SetExtensionConfig(configMap);

View File

@ -194,18 +194,24 @@ OH_NN_ReturnCode NNExecutor::SetExtensionConfig(const std::unordered_map<std::st
}
for (auto config : configs) {
char* configData = reinterpret_cast<char*>(config.second.data());
if (configData == nullptr) {
LOGD("[NNExecutor] SetExtensionConfig, key: %s, configData is nullptr.", config.first.c_str());
return OH_NN_FAILED;
}
if (!config.first.compare("callingPid")) {
m_executorConfig->callingPid = std::atoi(reinterpret_cast<char*>(config.second.data()));
m_executorConfig->callingPid = std::atoi(configData);
LOGD("[NNExecutor] SetExtensionConfig, callingPid: %{public}d.", m_executorConfig->callingPid);
}
if (!config.first.compare("hiaiModelId")) {
m_executorConfig->hiaiModelId = std::atoi(reinterpret_cast<char*>(config.second.data()));
m_executorConfig->hiaiModelId = std::atoi(configData);
LOGD("[NNExecutor] SetExtensionConfig, hiaiModelId: %{public}d.", m_executorConfig->hiaiModelId);
}
if (!config.first.compare("isNeedModelLatency")) {
m_executorConfig->isNeedModelLatency = static_cast<bool>(reinterpret_cast<char*>(config.second.data()));
m_executorConfig->isNeedModelLatency = static_cast<bool>(configData);
LOGD("[NNExecutor] SetExtensionConfig, isNeedModelLatency: %{public}d.",
m_executorConfig->isNeedModelLatency);
}