aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorShubhanshu Saxena <shubhanshu.e01@gmail.com>2021-08-08 16:25:31 +0530
committerGuo Yejun <yejun.guo@intel.com>2021-08-10 22:27:27 +0800
commit86f0a4f9deb702528b914a194fac727f08e76c20 (patch)
treec5af2d6c7a9d9889ea68588b11d332876961d6b4
parent903c5d58f0311b12bd8127a545f1bf8549307f5c (diff)
downloadffmpeg-86f0a4f9deb702528b914a194fac727f08e76c20.tar.gz
lavfi/dnn: Add Async Execution Mechanism and Documentation
This commit adds an async execution mechanism for common use in the TensorFlow and Native backends. This commit also adds the documentation of typedefs and functions in the async module for common use in DNN backends. Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
-rw-r--r--libavfilter/dnn/dnn_backend_common.c53
-rw-r--r--libavfilter/dnn/dnn_backend_common.h56
2 files changed, 109 insertions, 0 deletions
diff --git a/libavfilter/dnn/dnn_backend_common.c b/libavfilter/dnn/dnn_backend_common.c
index 4d9d3f79b1..4c9045501f 100644
--- a/libavfilter/dnn/dnn_backend_common.c
+++ b/libavfilter/dnn/dnn_backend_common.c
@@ -69,3 +69,56 @@ DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, v
return DNN_SUCCESS;
}
+
+/**
+ * Thread routine for async execution.
+ * @param args pointer to DNNAsyncExecModule module
+ */
+static void *async_thread_routine(void *args)
+{
+ DNNAsyncExecModule *async_module = args;
+ void *request = async_module->args;
+
+ async_module->start_inference(request);
+ async_module->callback(request);
+ return NULL;
+}
+
+DNNReturnType ff_dnn_async_module_cleanup(DNNAsyncExecModule *async_module)
+{
+ if (!async_module) {
+ return DNN_ERROR;
+ }
+#if HAVE_PTHREAD_CANCEL
+ pthread_join(async_module->thread_id, NULL);
+#endif
+ async_module->start_inference = NULL;
+ async_module->callback = NULL;
+ async_module->args = NULL;
+ return DNN_SUCCESS;
+}
+
+DNNReturnType ff_dnn_start_inference_async(void *ctx, DNNAsyncExecModule *async_module)
+{
+ int ret;
+
+ if (!async_module) {
+ av_log(ctx, AV_LOG_ERROR, "async_module is null when starting async inference.\n");
+ return DNN_ERROR;
+ }
+
+#if HAVE_PTHREAD_CANCEL
+ pthread_join(async_module->thread_id, NULL);
+ ret = pthread_create(&async_module->thread_id, NULL, async_thread_routine, async_module);
+ if (ret != 0) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to start async inference.\n");
+ return DNN_ERROR;
+ }
+#else
+ if (async_module->start_inference(async_module->args) != DNN_SUCCESS) {
+ return DNN_ERROR;
+ }
+ async_module->callback(async_module->args);
+#endif
+ return DNN_SUCCESS;
+}
diff --git a/libavfilter/dnn/dnn_backend_common.h b/libavfilter/dnn/dnn_backend_common.h
index 5281fdfed1..96e4df9676 100644
--- a/libavfilter/dnn/dnn_backend_common.h
+++ b/libavfilter/dnn/dnn_backend_common.h
@@ -25,6 +25,7 @@
#define AVFILTER_DNN_DNN_BACKEND_COMMON_H
#include "../dnn_interface.h"
+#include "libavutil/thread.h"
#define DNN_BACKEND_COMMON_OPTIONS \
{ "nireq", "number of request", OFFSET(options.nireq), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS },
@@ -49,6 +50,34 @@ typedef struct InferenceItem {
uint32_t bbox_index;
} InferenceItem;
+/**
+ * Common Async Execution Mechanism for the DNN Backends.
+ */
+typedef struct DNNAsyncExecModule {
+ /**
+ * Synchronous inference function for the backend
+ * with corresponding request item as the argument.
+ */
+ DNNReturnType (*start_inference)(void *request);
+
+ /**
+ * Completion Callback for the backend.
+ * Expected argument type of callback must match that
+ * of the inference function.
+ */
+ void (*callback)(void *args);
+
+ /**
+ * Argument for the execution functions.
+ * i.e. Request item for the backend.
+ */
+ void *args;
+#if HAVE_PTHREAD_CANCEL
+ pthread_t thread_id;
+ pthread_attr_t thread_attr;
+#endif
+} DNNAsyncExecModule;
+
int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params);
/**
@@ -66,4 +95,31 @@ int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func
*/
DNNReturnType ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc);
+/**
+ * Join the Async Execution thread and set module pointers to NULL.
+ *
+ * @param async_module pointer to DNNAsyncExecModule module
+ *
+ * @retval DNN_SUCCESS if successful
+ * @retval DNN_ERROR if async_module is NULL
+ */
+DNNReturnType ff_dnn_async_module_cleanup(DNNAsyncExecModule *async_module);
+
+/**
+ * Start asynchronous inference routine for the TensorFlow
+ * model on a detached thread. It calls the completion callback
+ * after the inference completes. Completion callback and inference
+ * function must be set before calling this function.
+ *
+ * If POSIX threads aren't supported, the execution rolls back
+ * to synchronous mode, calling completion callback after inference.
+ *
+ * @param ctx pointer to the backend context
+ * @param async_module pointer to DNNAsyncExecModule module
+ *
+ * @retval DNN_SUCCESS on the start of async inference.
+ * @retval DNN_ERROR in case async inference cannot be started
+ */
+DNNReturnType ff_dnn_start_inference_async(void *ctx, DNNAsyncExecModule *async_module);
+
#endif