FFmpeg
|
#include "libavformat/avio.h"
#include "libavutil/avassert.h"
#include "libavutil/avstring.h"
#include "libavutil/cpu.h"
#include "libavutil/opt.h"
#include "libavcodec/defs.h"
#include "../internal.h"
#include "dnn_io_proc.h"
#include "dnn_backend_common.h"
#include "safe_queue.h"
#include <tensorflow/c/c_api.h>
Go to the source code of this file.
Data Structures | |
struct | TFOptions |
struct | TFContext |
struct | TFModel |
struct | TFInferRequest |
Stores execution parameters for single call to the TensorFlow C API. More... | |
struct | TFRequestItem |
Macros | |
#define | OFFSET(x) offsetof(TFContext, x) |
#define | FLAGS AV_OPT_FLAG_FILTERING_PARAM |
#define | SPACE_CHARS " \t\r\n" |
Functions | |
AVFILTER_DEFINE_CLASS (dnn_tensorflow) | |
static int | execute_model_tf (TFRequestItem *request, Queue *lltask_queue) |
static void | infer_completion_callback (void *args) |
static void | destroy_request_item (TFRequestItem **arg) |
Free the TFRequestItem completely. More... | |
static void | free_buffer (void *data, size_t length) |
static void | tf_free_request (TFInferRequest *request) |
Free the contents of TensorFlow inference request. More... | |
static TFInferRequest * | tf_create_inference_request (void) |
Create a TensorFlow inference request. More... | |
static int | tf_start_inference (void *args) |
Start synchronous inference for the TensorFlow model. More... | |
static int | extract_lltask_from_task (TaskItem *task, Queue *lltask_queue) |
static TF_Buffer * | read_graph (const char *model_filename) |
static TF_Tensor * | allocate_input_tensor (const DNNData *input) |
static int | get_input_tf (void *model, DNNData *input, const char *input_name) |
static int | get_output_tf (void *model, const char *input_name, int input_width, int input_height, const char *output_name, int *output_width, int *output_height) |
static int | hex_to_data (uint8_t *data, const char *p) |
static int | load_tf_model (TFModel *tf_model, const char *model_filename) |
static void | dnn_free_model_tf (DNNModel **model) |
static DNNModel * | dnn_load_model_tf (const char *model_filename, DNNFunctionType func_type, const char *options, AVFilterContext *filter_ctx) |
static int | fill_model_input_tf (TFModel *tf_model, TFRequestItem *request) |
static int | dnn_execute_model_tf (const DNNModel *model, DNNExecBaseParams *exec_params) |
static DNNAsyncStatusType | dnn_get_result_tf (const DNNModel *model, AVFrame **in, AVFrame **out) |
static int | dnn_flush_tf (const DNNModel *model) |
Variables | |
static const AVOption | dnn_tensorflow_options [] |
const DNNModule | ff_dnn_backend_tf |
DNN tensorflow backend implementation.
Definition in file dnn_backend_tf.c.
#define OFFSET | ( | x | ) | offsetof(TFContext, x) |
Definition at line 78 of file dnn_backend_tf.c.
#define FLAGS AV_OPT_FLAG_FILTERING_PARAM |
Definition at line 79 of file dnn_backend_tf.c.
#define SPACE_CHARS " \t\r\n" |
Definition at line 369 of file dnn_backend_tf.c.
AVFILTER_DEFINE_CLASS | ( | dnn_tensorflow | ) |
|
static |
Definition at line 786 of file dnn_backend_tf.c.
Referenced by dnn_execute_model_tf(), and get_output_tf().
|
static |
Definition at line 723 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and execute_model_tf().
|
inlinestatic |
Free the TFRequestItem completely.
arg | Address of the TFInferRequest instance. |
Definition at line 183 of file dnn_backend_tf.c.
Referenced by dnn_flush_tf(), dnn_free_model_tf(), dnn_load_model_tf(), execute_model_tf(), and infer_completion_callback().
|
static |
Definition at line 92 of file dnn_backend_tf.c.
Referenced by read_graph().
|
static |
Free the contents of TensorFlow inference request.
It does not free the TFInferRequest instance.
request | pointer to TFInferRequest instance. NULL pointer is allowed. |
Definition at line 104 of file dnn_backend_tf.c.
Referenced by destroy_request_item(), execute_model_tf(), fill_model_input_tf(), and infer_completion_callback().
|
static |
Create a TensorFlow inference request.
All properties are initially unallocated and set as NULL.
Definition at line 132 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Start synchronous inference for the TensorFlow model.
request | pointer to the TFRequestItem for inference |
0 | if execution is successful |
AVERROR(EINVAL) | if request is NULL |
DNN_GENERIC_ERROR | if execution fails |
Definition at line 153 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and execute_model_tf().
Definition at line 197 of file dnn_backend_tf.c.
Referenced by dnn_execute_model_tf(), and get_output_tf().
|
static |
Definition at line 217 of file dnn_backend_tf.c.
Referenced by load_tf_model().
|
static |
Definition at line 250 of file dnn_backend_tf.c.
Referenced by fill_model_input_tf().
Definition at line 277 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and fill_model_input_tf().
|
static |
Definition at line 325 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Definition at line 370 of file dnn_backend_tf.c.
Referenced by load_tf_model().
Definition at line 399 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Definition at line 494 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and execute_model_tf().
|
static |
Definition at line 535 of file dnn_backend_tf.c.
|
static |
Definition at line 629 of file dnn_backend_tf.c.
Referenced by dnn_flush_tf(), and execute_model_tf().
|
static |
Definition at line 832 of file dnn_backend_tf.c.
|
static |
Definition at line 880 of file dnn_backend_tf.c.
Definition at line 886 of file dnn_backend_tf.c.
|
static |
Definition at line 80 of file dnn_backend_tf.c.
const DNNModule ff_dnn_backend_tf |
Definition at line 916 of file dnn_backend_tf.c.
Referenced by ff_get_dnn_module().