lavfi/dnn: Fill Task using Common Function

This commit adds a common function for filling the TaskItems
in all three backends.

Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
This commit is contained in:
Shubhanshu Saxena 2021-06-05 23:38:06 +05:30 committed by Guo Yejun
parent 6b961f7409
commit 5509235818
3 changed files with 42 additions and 16 deletions

View File

@ -49,3 +49,23 @@ int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func
return 0;
}
DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc) {
if (task == NULL || exec_params == NULL || backend_model == NULL)
return DNN_ERROR;
if (do_ioproc != 0 && do_ioproc != 1)
return DNN_ERROR;
if (async != 0 && async != 1)
return DNN_ERROR;
task->do_ioproc = do_ioproc;
task->async = async;
task->input_name = exec_params->input_name;
task->in_frame = exec_params->in_frame;
task->out_frame = exec_params->out_frame;
task->model = backend_model;
task->nb_output = exec_params->nb_output;
task->output_names = exec_params->output_names;
return DNN_SUCCESS;
}

View File

@ -48,4 +48,19 @@ typedef struct InferenceItem {
int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params);
/**
* Fill the Task for Backend Execution. It should be called after
* checking execution parameters using ff_check_exec_params.
*
* @param task pointer to the allocated task
* @param exec_param pointer to execution parameters
* @param backend_model void pointer to the backend model
* @param async flag for async execution. Must be 0 or 1
* @param do_ioproc flag for IO processing. Must be 0 or 1
*
* @retval DNN_SUCCESS if successful
* @retval DNN_ERROR if flags are invalid or any parameter is NULL
*/
DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc);
#endif

View File

@ -793,14 +793,9 @@ DNNReturnType ff_dnn_execute_model_ov(const DNNModel *model, DNNExecBaseParams *
}
}
task.do_ioproc = 1;
task.async = 0;
task.input_name = exec_params->input_name;
task.in_frame = exec_params->in_frame;
task.output_names = &exec_params->output_names[0];
task.out_frame = exec_params->out_frame ? exec_params->out_frame : exec_params->in_frame;
task.nb_output = exec_params->nb_output;
task.model = ov_model;
if (ff_dnn_fill_task(&task, exec_params, ov_model, 0, 1) != DNN_SUCCESS) {
return DNN_ERROR;
}
if (extract_inference_from_task(ov_model->model->func_type, &task, ov_model->inference_queue, exec_params) != DNN_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "unable to extract inference from task.\n");
@ -841,14 +836,10 @@ DNNReturnType ff_dnn_execute_model_async_ov(const DNNModel *model, DNNExecBasePa
return DNN_ERROR;
}
task->do_ioproc = 1;
task->async = 1;
task->input_name = exec_params->input_name;
task->in_frame = exec_params->in_frame;
task->output_names = &exec_params->output_names[0];
task->out_frame = exec_params->out_frame ? exec_params->out_frame : exec_params->in_frame;
task->nb_output = exec_params->nb_output;
task->model = ov_model;
if (ff_dnn_fill_task(task, exec_params, ov_model, 1, 1) != DNN_SUCCESS) {
return DNN_ERROR;
}
if (ff_queue_push_back(ov_model->task_queue, task) < 0) {
av_freep(&task);
av_log(ctx, AV_LOG_ERROR, "unable to push back task_queue.\n");