Skip to content

Core API

Low-level access to CVEDIA-RT functionality.

Classes

Name
class cvedia::rt::module::AmbaOutCore
class cvedia::rt::module::AmbaReader
Native implementation of the Videoreader plugin.
class cvedia::rt::module::ARMnnCore
class cvedia::rt::module::HailoCore
class cvedia::rt::module::MNNCore
class cvedia::rt::module::OnnxCore
class cvedia::rt::module::OpenVinoCore
class cvedia::rt::module::PaddleCore
class cvedia::rt::module::RknnCore
class cvedia::rt::module::SigmaStarCore
class cvedia::rt::module::SnpeCore
class cvedia::rt::module::TrtCore
class cvedia::rt::module::GStreamerReaderCore
Native implementation of the Videoreader plugin.
class cvedia::rt::module::GStreamerWriterCore
Native implementation of the Videoreader plugin.
class cvedia::rt::module::ImageReaderCore
Native implementation of the ImageReader plugin.
class cvedia::rt::Inference
struct cvedia::rt::Inference::config
struct cvedia::rt::Inference::stats
class cvedia::module::JetsonUtils
class cvedia::module::JetsonVideoReader
Native implementation of the Videoreader plugin.
class cvedia::rt::module::MotionCore
class cvedia::rt::module::MQTTCore
class cvedia::rt::module::REST
class cvedia::rt::module::Screencap
class cvedia::rt::module::TrackerCore
class cvedia::rt::module::TripwireCore
class cvedia::rt::module::VideoReaderCore
Native implementation of the VideoReaderCore plugin.
class cvedia::rt::module::WriteDataCore
class cvedia::rt::module::ZMQCore

Functions

Name
Inference()
~Inference() =default
void initialize()
expected< void > loadBackend() const
NODISCARD expected< void > loadModel(std::string const & path)
expected< pCValue > getCapabilities() const
bool setModelVariant(std::string const & variant) const
NODISCARD expected< void > setDevice(std::string const & device) const
NODISCARD expected< std::vector< std::string > > getActiveDevices() const
NODISCARD expected< std::vector< std::vector< xt::xarray< float > > > > runInference(std::vector< std::vector< Tensor > > & input) const
void createBackends(module::createInferenceHandler func, std::string const & name, int poolSize)
void clearBackends()
expected< std::shared_ptr< iface::InferenceHandler > > getActiveHandler() const
std::string shapeToString(std::vector< int > shape)
std::map< std::string, internal::ResourceUsage > getResourceUsages() const

Attributes

Name
bool enabled
std::string model_file
int inference_count
bool modelLoaded_
std::string modelUri_
bool runStats_
unsigned int idCounter_
ssize_t inputBatchSize_
ssize_t inputWidth_
ssize_t inputHeight_
ssize_t inputChannels_
std::string channelLayout_
std::vector< int > inputShape_
std::vector< int > outputShape_
std::string loadedModel_
config pluginConf
stats pluginStats
pCValue modelConf_
pCValue backendConfig_

Functions Documentation

function Inference

Inference()

function ~Inference

~Inference() =default

function initialize

void initialize()

function loadBackend

expected< void > loadBackend() const

function loadModel

NODISCARD expected< void > loadModel(
    std::string const & path
)

function getCapabilities

expected< pCValue > getCapabilities() const

function setModelVariant

bool setModelVariant(
    std::string const & variant
) const

function setDevice

NODISCARD expected< void > setDevice(
    std::string const & device
) const

function getActiveDevices

NODISCARD expected< std::vector< std::string > > getActiveDevices() const

function runInference

NODISCARD expected< std::vector< std::vector< xt::xarray< float > > > > runInference(
    std::vector< std::vector< Tensor > > & input
) const

function createBackends

void createBackends(
    module::createInferenceHandler func,
    std::string const & name,
    int poolSize
)

function clearBackends

void clearBackends()

function getActiveHandler

expected< std::shared_ptr< iface::InferenceHandler > > getActiveHandler() const

function shapeToString

static std::string shapeToString(
    std::vector< int > shape
)

function getResourceUsages

std::map< std::string, internal::ResourceUsage > getResourceUsages() const

Attributes Documentation

variable enabled

bool enabled = true;

variable model_file

std::string model_file;

variable inference_count

int inference_count = 0;

variable modelLoaded_

bool modelLoaded_ = false;

variable modelUri_

std::string modelUri_;

variable runStats_

bool runStats_ = false;

variable idCounter_

unsigned int idCounter_ = 0;

variable inputBatchSize_

ssize_t inputBatchSize_ = 0;

variable inputWidth_

ssize_t inputWidth_ = 0;

variable inputHeight_

ssize_t inputHeight_ = 0;

variable inputChannels_

ssize_t inputChannels_ = 0;

variable channelLayout_

std::string channelLayout_ {};

variable inputShape_

std::vector< int > inputShape_ {};

variable outputShape_

std::vector< int > outputShape_ {};

variable loadedModel_

std::string loadedModel_;

variable pluginConf

config pluginConf;

variable pluginStats

stats pluginStats;

variable modelConf_

pCValue modelConf_;

variable backendConfig_

pCValue backendConfig_;

Updated on 2023-06-02 at 17:01:24 +0000