package nvdsinferserver.config

Mouse Melon logoGet desktop application:
View/edit binary Protocol Buffers messages

message BackendParams

nvdsinferserver_config.proto:147

* Network backend Settings

Used in: InferenceConfig

message ClassificationParams

nvdsinferserver_common.proto:201

* Deepstream Classifciation settings

Used in: PostProcessParams

message CustomLib

nvdsinferserver_common.proto:65

* Custom lib for preload

Used in: InferenceConfig

message DetectionParams

nvdsinferserver_common.proto:131

* Deepstream Detection settings

Used in: PostProcessParams

message DetectionParams.DbScan

nvdsinferserver_common.proto:144

* DBScan object clustering

Used in: DetectionParams

message DetectionParams.GroupRectangle

nvdsinferserver_common.proto:157

* cluster method based on grouping rectangles

Used in: DetectionParams

message DetectionParams.Nms

nvdsinferserver_common.proto:133

* non-maximum-suppression cluster method

Used in: DetectionParams

message DetectionParams.PerClassParams

nvdsinferserver_common.proto:174

* specific parameters controled per class

Used in: DetectionParams

message DetectionParams.SimpleCluster

nvdsinferserver_common.proto:167

* simple cluster method for confidence filter

Used in: DetectionParams

message ExtraControl

nvdsinferserver_config.proto:172

* extrac controls

Used in: InferenceConfig

enum FrameScalingHW

nvdsinferserver_common.proto:50

Used in: PreProcessParams

message InferenceConfig

nvdsinferserver_config.proto:192

* Inference configuration

Used in: PluginControl

message InputLayer

nvdsinferserver_common.proto:71

* Network Input layer information

Used in: BackendParams

message InputTensorFromMeta

nvdsinferserver_config.proto:185

* Input tensor is preprocessed

Used in: InferenceConfig

message LstmParams

nvdsinferserver_common.proto:236

* Network LSTM Parameters

Used in: InferenceConfig

message LstmParams.InitConst

nvdsinferserver_common.proto:238

* init constant value for lstm input tensors, usually zero or one

Used in: LstmLoop

message LstmParams.LstmLoop

nvdsinferserver_common.proto:243

* LSTM loop information

Used in: LstmParams

enum MediaFormat

nvdsinferserver_common.proto:21

Used in: PreProcessParams

enum MemoryType

nvdsinferserver_common.proto:58

* Tensor memory type

Used in: BackendParams

message OtherNetworkParams

nvdsinferserver_common.proto:219

* Other Network settings, need application to do postprocessing

Used in: PostProcessParams

message OutputLayer

nvdsinferserver_common.proto:81

* Network Onput layer information

Used in: BackendParams

message PluginControl

nvdsinferserver_plugin.proto:24

* Plugin Control settings for input / inference / output

message PluginControl.BBoxFilter

nvdsinferserver_plugin.proto:39

* Boudingbox filter

Used in: DetectClassFilter, InputObjectControl

message PluginControl.Color

nvdsinferserver_plugin.proto:27

* Color values for Red/Green/Blue/Alpha, all values are in range [0, 1]

Used in: DetectClassFilter

message PluginControl.DetectClassFilter

nvdsinferserver_plugin.proto:51

* Detection of classes filter

Used in: OutputDetectionControl

message PluginControl.InputControl

nvdsinferserver_plugin.proto:92

* Plugin input data control policy

Used in: PluginControl

message PluginControl.InputObjectControl

nvdsinferserver_plugin.proto:76

* Input objects control

Used in: InputControl

message PluginControl.OutputControl

nvdsinferserver_plugin.proto:122

* Plugin output data control policy

Used in: PluginControl

message PluginControl.OutputDetectionControl

nvdsinferserver_plugin.proto:68

* Output detection results control

Used in: OutputControl

enum PluginControl.ProcessMode

nvdsinferserver_plugin.proto:82

* Processing Mode

Used in: InputControl

message PostProcessParams

nvdsinferserver_config.proto:25

* Post-processing settings

Used in: InferenceConfig

message PreProcessParams

nvdsinferserver_common.proto:90

* preprocessing settings

Used in: InferenceConfig

message PreProcessParams.ScaleNormalize

nvdsinferserver_common.proto:92

* Input data normalization settings

Used in: PreProcessParams

message SegmentationParams

nvdsinferserver_common.proto:209

* Deepstream segmentation settings

Used in: PostProcessParams

enum TensorDataType

nvdsinferserver_common.proto:34

Used in: InputLayer

enum TensorOrder

nvdsinferserver_common.proto:28

Used in: PreProcessParams

message TritonClassifyParams

nvdsinferserver_common.proto:225

* Triton classifcation settings

Used in: PostProcessParams, TritonParams

message TritonGrpcParams

nvdsinferserver_config.proto:118

Used in: TritonParams

message TritonModelRepo

nvdsinferserver_config.proto:50

* Triton models repo settings

Used in: TritonParams

message TritonModelRepo.BackendConfig

nvdsinferserver_config.proto:53

* Triton backend config settings

Used in: TritonModelRepo

message TritonModelRepo.CudaDeviceMem

nvdsinferserver_config.proto:63

* Cuda Memory settings for GPU device

Used in: TritonModelRepo

message TritonParams

nvdsinferserver_config.proto:131

* Triton inference backend parameters

Used in: BackendParams