mirror of
https://gitee.com/openharmony/ai_neural_network_runtime
synced 2024-12-04 14:11:01 +00:00
fix isNeedLatency bug
Signed-off-by: wang-yangsong <wangyangsong1@h-partners.com>
This commit is contained in:
parent
334f2bf8b0
commit
8e1d43e297
@ -1329,7 +1329,7 @@ OH_NN_ReturnCode ExecutorPrepare(Executor** executor, Compilation** compilation)
|
||||
std::vector<char> vechiaiModelId(hiaiModelIdStr.begin(), hiaiModelIdStr.end());
|
||||
configMap["hiaiModelId"] = vechiaiModelId;
|
||||
|
||||
std::vector<char> vecNeedLatency(static_cast<char>(compilationImpl->isNeedModelLatency));
|
||||
std::vector<char> vecNeedLatency = { static_cast<char>(compilationImpl->isNeedModelLatency) };
|
||||
configMap["isNeedModelLatency"] = vecNeedLatency;
|
||||
|
||||
executorImpl->SetExtensionConfig(configMap);
|
||||
@ -1400,7 +1400,7 @@ OH_NN_ReturnCode Unload(const ExecutorConfig* config)
|
||||
|
||||
int ret = nnrtService.Unload(config->hiaiModelId);
|
||||
if (ret != static_cast<int>(OH_NN_SUCCESS)) {
|
||||
LOGE("Unload failed, nnrtService is not exist, jump over Unload.");
|
||||
LOGE("Unload failed, some error happen when unload hiaiModelId.");
|
||||
return static_cast<OH_NN_ReturnCode>(ret);
|
||||
}
|
||||
|
||||
@ -1645,7 +1645,7 @@ OH_NN_ReturnCode RunSync(Executor *executor,
|
||||
|
||||
configPtr->isNeedModelLatency = false;
|
||||
std::unordered_map<std::string, std::vector<char>> configMap;
|
||||
std::vector<char> vecNeedLatency(static_cast<char>(configPtr->isNeedModelLatency));
|
||||
std::vector<char> vecNeedLatency = { static_cast<char>(configPtr->isNeedModelLatency) };
|
||||
configMap["isNeedModelLatency"] = vecNeedLatency;
|
||||
|
||||
ret = executor->SetExtensionConfig(configMap);
|
||||
|
@ -194,18 +194,24 @@ OH_NN_ReturnCode NNExecutor::SetExtensionConfig(const std::unordered_map<std::st
|
||||
}
|
||||
|
||||
for (auto config : configs) {
|
||||
char* configData = reinterpret_cast<char*>(config.second.data());
|
||||
if (configData == nullptr) {
|
||||
LOGD("[NNExecutor] SetExtensionConfig, key: %s, configData is nullptr.", config.first.c_str());
|
||||
return OH_NN_FAILED;
|
||||
}
|
||||
|
||||
if (!config.first.compare("callingPid")) {
|
||||
m_executorConfig->callingPid = std::atoi(reinterpret_cast<char*>(config.second.data()));
|
||||
m_executorConfig->callingPid = std::atoi(configData);
|
||||
LOGD("[NNExecutor] SetExtensionConfig, callingPid: %{public}d.", m_executorConfig->callingPid);
|
||||
}
|
||||
|
||||
if (!config.first.compare("hiaiModelId")) {
|
||||
m_executorConfig->hiaiModelId = std::atoi(reinterpret_cast<char*>(config.second.data()));
|
||||
m_executorConfig->hiaiModelId = std::atoi(configData);
|
||||
LOGD("[NNExecutor] SetExtensionConfig, hiaiModelId: %{public}d.", m_executorConfig->hiaiModelId);
|
||||
}
|
||||
|
||||
if (!config.first.compare("isNeedModelLatency")) {
|
||||
m_executorConfig->isNeedModelLatency = static_cast<bool>(reinterpret_cast<char*>(config.second.data()));
|
||||
m_executorConfig->isNeedModelLatency = static_cast<bool>(configData);
|
||||
LOGD("[NNExecutor] SetExtensionConfig, isNeedModelLatency: %{public}d.",
|
||||
m_executorConfig->isNeedModelLatency);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user