package nvdsinferserver.config

Mouse Melon logoGet desktop application:
View/edit binary Protocol Buffers messages

message BackendParams

nvdsinferserver_config.proto:294

* Network backend Settings

Used in: InferenceConfig

message ClassificationParams

nvdsinferserver_config.proto:160

* Deepstream Classifciation settings

Used in: PostProcessParams

message CustomLib

nvdsinferserver_config.proto:46

* Custom lib for preload

Used in: InferenceConfig

message DetectionParams

nvdsinferserver_config.proto:90

* Deepstream Detection settings

Used in: PostProcessParams

message DetectionParams.DbScan

nvdsinferserver_config.proto:103

* DBScan object clustering

Used in: DetectionParams

message DetectionParams.GroupRectangle

nvdsinferserver_config.proto:116

* cluster method based on grouping rectangles

Used in: DetectionParams

message DetectionParams.Nms

nvdsinferserver_config.proto:92

* non-maximum-suppression cluster method

Used in: DetectionParams

message DetectionParams.PerClassParams

nvdsinferserver_config.proto:133

* specific parameters controled per class

Used in: DetectionParams

message DetectionParams.SimpleCluster

nvdsinferserver_config.proto:126

* simple cluster method for confidence filter

Used in: DetectionParams

message ExtraControl

nvdsinferserver_config.proto:309

* extrac controls

Used in: InferenceConfig

enum FrameScalingHW

nvdsinferserver_config.proto:39

Used in: PreProcessParams

message InferenceConfig

nvdsinferserver_config.proto:319

* Inference configuration

Used in: PluginControl

message InputLayer

nvdsinferserver_config.proto:214

* Network Input layer information

Used in: BackendParams

message LstmParams

nvdsinferserver_config.proto:269

* Network LSTM Parameters

Used in: InferenceConfig

message LstmParams.InitConst

nvdsinferserver_config.proto:271

* init constant value for lstm input tensors, usually zero or one

Used in: LstmLoop

message LstmParams.LstmLoop

nvdsinferserver_config.proto:276

* LSTM loop information

Used in: LstmParams

enum MediaFormat

nvdsinferserver_config.proto:14

Used in: PreProcessParams

message OtherNetworkParams

nvdsinferserver_config.proto:174

* Other Network settings, need application to do postprocessing

Used in: PostProcessParams

message OutputLayer

nvdsinferserver_config.proto:224

* Network Onput layer information

Used in: BackendParams

message PluginControl

nvdsinferserver_plugin.proto:17

* Plugin Control settings for input / inference / output

message PluginControl.BBoxFilter

nvdsinferserver_plugin.proto:32

* Boudingbox filter

Used in: DetectClassFilter, InputObjectControl

message PluginControl.Color

nvdsinferserver_plugin.proto:20

* Color values for Red/Green/Blue/Alpha, all values are in range [0, 1]

Used in: DetectClassFilter

message PluginControl.DetectClassFilter

nvdsinferserver_plugin.proto:44

* Detection of classes filter

Used in: OutputDetectionControl

message PluginControl.InputControl

nvdsinferserver_plugin.proto:85

* Plugin input data control policy

Used in: PluginControl

message PluginControl.InputObjectControl

nvdsinferserver_plugin.proto:69

* Input objects control

Used in: InputControl

message PluginControl.OutputControl

nvdsinferserver_plugin.proto:110

* Plugin output data control policy

Used in: PluginControl

message PluginControl.OutputDetectionControl

nvdsinferserver_plugin.proto:61

* Output detection results control

Used in: OutputControl

enum PluginControl.ProcessMode

nvdsinferserver_plugin.proto:75

* Processing Mode

Used in: InputControl

message PostProcessParams

nvdsinferserver_config.proto:191

* Post-processing settings

Used in: InferenceConfig

message PreProcessParams

nvdsinferserver_config.proto:52

* preprocessing settings

Used in: InferenceConfig

message PreProcessParams.ScaleNormalize

nvdsinferserver_config.proto:54

* Input data normalization settings

Used in: PreProcessParams

message SegmentationParams

nvdsinferserver_config.proto:168

* Deepstream segmentation settings

Used in: PostProcessParams

enum TensorDataType

nvdsinferserver_config.proto:27

Used in: InputLayer

enum TensorOrder

nvdsinferserver_config.proto:21

Used in: PreProcessParams

message TrtISParams

nvdsinferserver_config.proto:230

* TRTIS inference backend parameters

Used in: BackendParams

message TrtISParams.ModelRepo

nvdsinferserver_config.proto:232

* TRTIS models repo settings

Used in: TrtISParams

message TrtIsClassifyParams

nvdsinferserver_config.proto:180

* TRTIS classifcation settings

Used in: PostProcessParams