mirror of
https://gitee.com/openharmony/commonlibrary_ets_utils
synced 2025-02-10 03:53:22 +00:00
!1027 [Bug]: task里循环执行taskpool.Task.sendData导致崩溃
Merge pull request !1027 from 羽诺/task-increaseRefSignal
This commit is contained in:
commit
9fb9818652
@ -113,7 +113,6 @@ Task* Task::GenerateTask(napi_env env, napi_value napiTask, napi_value func,
|
||||
task->taskId_ = reinterpret_cast<uint64_t>(task);
|
||||
uv_loop_t* loop = NapiHelper::GetLibUV(env);
|
||||
ConcurrentHelper::UvHandleInit(loop, task->onResultSignal_, TaskPool::HandleTaskResult, task);
|
||||
ConcurrentHelper::UvHandleInit(loop, task->increaseRefSignal_, IncreaseTaskRef, task);
|
||||
napi_value taskId = NapiHelper::CreateUint64(env, task->taskId_);
|
||||
napi_value napiTrue = NapiHelper::CreateBooleanValue(env, true);
|
||||
napi_value napiFalse = NapiHelper::CreateBooleanValue(env, false);
|
||||
@ -421,14 +420,8 @@ napi_value Task::SendData(napi_env env, napi_callback_info cbinfo)
|
||||
ErrorHelper::ThrowError(env, ErrorHelper::ERR_WORKER_SERIALIZATION, errMessage.c_str());
|
||||
return nullptr;
|
||||
}
|
||||
if (task->increaseRefSignal_ == nullptr) {
|
||||
std::string errMessage = "taskpool:: only task can support SendData";
|
||||
HILOG_ERROR("taskpool:: %{public}s", errMessage.c_str());
|
||||
ErrorHelper::ThrowError(env, ErrorHelper::TYPE_ERROR, errMessage.c_str());
|
||||
return nullptr;
|
||||
}
|
||||
uv_async_send(task->increaseRefSignal_);
|
||||
TaskResultInfo* resultInfo = new TaskResultInfo(task->env_, task->taskId_, serializationArgs);
|
||||
|
||||
TaskResultInfo* resultInfo = new TaskResultInfo(task->env_, env, task->taskId_, serializationArgs);
|
||||
return TaskManager::GetInstance().NotifyCallbackExecute(env, resultInfo, task);
|
||||
}
|
||||
|
||||
@ -1123,16 +1116,6 @@ bool Task::CanExecuteDelayed(napi_env env)
|
||||
return true;
|
||||
}
|
||||
|
||||
void Task::IncreaseTaskRef(const uv_async_t* req)
|
||||
{
|
||||
auto task = static_cast<Task*>(req->data);
|
||||
if (task == nullptr) {
|
||||
HILOG_FATAL("taskpool:: IncreaseTaskRef task is nullptr");
|
||||
return;
|
||||
}
|
||||
napi_reference_ref(task->env_, task->taskRef_, nullptr);
|
||||
}
|
||||
|
||||
void Task::SetHasDependency(bool hasDependency)
|
||||
{
|
||||
hasDependency_ = hasDependency;
|
||||
|
@ -88,8 +88,6 @@ public:
|
||||
bool defaultTransfer = true, bool defaultCloneSendable = false);
|
||||
static void TaskDestructor(napi_env env, void* data, void* hint);
|
||||
|
||||
static void IncreaseTaskRef(const uv_async_t* req);
|
||||
|
||||
static void ThrowNoDependencyError(napi_env env);
|
||||
static void ExecuteListenerCallback(const uv_async_t* req);
|
||||
static void ExecuteListenerCallback(ListenerCallBackInfo* listenerCallBackInfo);
|
||||
@ -142,7 +140,6 @@ public:
|
||||
std::list<TaskInfo*> pendingTaskInfos_ {}; // for a common task executes multiple times
|
||||
void* result_ = nullptr;
|
||||
uv_async_t* onResultSignal_ = nullptr;
|
||||
uv_async_t* increaseRefSignal_ = nullptr;
|
||||
std::atomic<bool> success_ {true};
|
||||
std::atomic<uint64_t> startTime_ {};
|
||||
std::atomic<uint64_t> cpuTime_ {};
|
||||
@ -179,11 +176,12 @@ struct CallbackInfo {
|
||||
};
|
||||
|
||||
struct TaskResultInfo {
|
||||
TaskResultInfo(napi_env env, uint64_t id, void* args) : hostEnv(env),
|
||||
TaskResultInfo(napi_env env, napi_env curEnv, uint64_t id, void* args) : hostEnv(env), workerEnv(curEnv),
|
||||
taskId(id), serializationArgs(args) {}
|
||||
~TaskResultInfo() = default;
|
||||
|
||||
napi_env hostEnv;
|
||||
napi_env workerEnv;
|
||||
uint64_t taskId;
|
||||
void* serializationArgs;
|
||||
};
|
||||
|
@ -761,6 +761,8 @@ napi_value TaskManager::NotifyCallbackExecute(napi_env env, TaskResultInfo* resu
|
||||
callbackInfo->refCount++;
|
||||
callbackInfo->onCallbackSignal->data = callbackInfo.get();
|
||||
callbackInfo->worker = worker;
|
||||
auto workerEngine = reinterpret_cast<NativeEngine*>(env);
|
||||
workerEngine->IncreaseListeningCounter();
|
||||
uv_async_send(callbackInfo->onCallbackSignal);
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ void TaskPool::ExecuteCallbackInner(MsgQueue& msgQueue)
|
||||
auto resultInfo = msgQueue.DeQueue();
|
||||
ObjectScope<TaskResultInfo> resultInfoScope(resultInfo, false);
|
||||
napi_status status = napi_ok;
|
||||
CallbackScope callbackScope(resultInfo->hostEnv, resultInfo->taskId, status);
|
||||
CallbackScope callbackScope(resultInfo->hostEnv, resultInfo->workerEnv, resultInfo->taskId, status);
|
||||
if (status != napi_ok) {
|
||||
HILOG_ERROR("napi_open_handle_scope failed");
|
||||
return;
|
||||
@ -146,12 +146,6 @@ void TaskPool::ExecuteCallbackInner(MsgQueue& msgQueue)
|
||||
napi_get_and_clear_last_exception(env, &exception);
|
||||
HILOG_ERROR("taskpool:: an exception has occurred in napi_call_function");
|
||||
}
|
||||
auto task = TaskManager::GetInstance().GetTask(resultInfo->taskId);
|
||||
if (task == nullptr) {
|
||||
HILOG_DEBUG("taskpool:: task has been released or cancelled");
|
||||
continue;
|
||||
}
|
||||
napi_reference_unref(task->env_, task->taskRef_, nullptr);
|
||||
}
|
||||
}
|
||||
// ---------------------------------- SendData ---------------------------------------
|
||||
|
@ -64,19 +64,26 @@ private:
|
||||
|
||||
class CallbackScope {
|
||||
public:
|
||||
CallbackScope(napi_env env, uint64_t taskId, napi_status& status): env_(env), taskId_(taskId)
|
||||
CallbackScope(napi_env env, napi_env workerEnv, uint64_t taskId, napi_status& status): env_(env),
|
||||
workerEnv_(workerEnv), taskId_(taskId)
|
||||
{
|
||||
status = napi_open_handle_scope(env_, &scope_);
|
||||
}
|
||||
~CallbackScope()
|
||||
{
|
||||
TaskManager::GetInstance().DecreaseRefCount(env_, taskId_);
|
||||
if (workerEnv_ != nullptr) {
|
||||
auto workerEngine = reinterpret_cast<NativeEngine*>(workerEnv_);
|
||||
workerEngine->DecreaseListeningCounter();
|
||||
}
|
||||
|
||||
if (scope_ != nullptr) {
|
||||
napi_close_handle_scope(env_, scope_);
|
||||
}
|
||||
}
|
||||
private:
|
||||
napi_env env_;
|
||||
napi_env workerEnv_;
|
||||
uint64_t taskId_;
|
||||
napi_handle_scope scope_ = nullptr;
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user