Package inference
Class GrpcService.InferStatistics.Builder
java.lang.Object
com.google.protobuf.AbstractMessageLite.Builder
com.google.protobuf.AbstractMessage.Builder<GrpcService.InferStatistics.Builder>
com.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
inference.GrpcService.InferStatistics.Builder
- All Implemented Interfaces:
com.google.protobuf.Message.Builder,com.google.protobuf.MessageLite.Builder,com.google.protobuf.MessageLiteOrBuilder,com.google.protobuf.MessageOrBuilder,GrpcService.InferStatisticsOrBuilder,Cloneable
- Enclosing class:
GrpcService.InferStatistics
public static final class GrpcService.InferStatistics.Builder
extends com.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
implements GrpcService.InferStatisticsOrBuilder
@@ @@.. cpp:var:: message InferStatistics @@ @@ Inference statistics. @@Protobuf type
inference.InferStatistics-
Method Summary
Modifier and TypeMethodDescriptionaddRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) build()clear()@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..clearField(com.google.protobuf.Descriptors.FieldDescriptor field) clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) @@ ..@@ ..clone()@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..static final com.google.protobuf.Descriptors.Descriptorcom.google.protobuf.Descriptors.DescriptorgetFail()@@ ..@@ ..@@ ..getQueue()@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..boolean@@ ..boolean@@ ..boolean@@ ..boolean@@ ..boolean@@ ..booleanhasFail()@@ ..booleanhasQueue()@@ ..boolean@@ ..protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTablefinal boolean@@ ..@@ ..@@ ..@@ ..@@ ..@@ ..mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) mergeFrom(com.google.protobuf.Message other) @@ ..@@ ..mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) @@ ..setCacheHit(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setCacheMiss(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setComputeInfer(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setComputeInput(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setComputeOutput(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setFail(GrpcService.StatisticDuration.Builder builderForValue) @@ ..@@ ..setQueue(GrpcService.StatisticDuration.Builder builderForValue) @@ ..setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) @@ ..setSuccess(GrpcService.StatisticDuration.Builder builderForValue) @@ ..setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) Methods inherited from class com.google.protobuf.GeneratedMessageV3.Builder
getAllFields, getField, getFieldBuilder, getOneofFieldDescriptor, getParentForChildren, getRepeatedField, getRepeatedFieldBuilder, getRepeatedFieldCount, getUnknownFields, getUnknownFieldSetBuilder, hasField, hasOneof, internalGetMapField, internalGetMapFieldReflection, internalGetMutableMapField, internalGetMutableMapFieldReflection, isClean, markClean, mergeUnknownLengthDelimitedField, mergeUnknownVarintField, newBuilderForField, onBuilt, onChanged, parseUnknownField, setUnknownFieldSetBuilder, setUnknownFieldsProto3Methods inherited from class com.google.protobuf.AbstractMessage.Builder
findInitializationErrors, getInitializationErrorString, internalMergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, newUninitializedMessageException, toStringMethods inherited from class com.google.protobuf.AbstractMessageLite.Builder
addAll, addAll, mergeDelimitedFrom, mergeDelimitedFrom, mergeFrom, newUninitializedMessageExceptionMethods inherited from class java.lang.Object
equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, waitMethods inherited from interface com.google.protobuf.Message.Builder
mergeDelimitedFrom, mergeDelimitedFromMethods inherited from interface com.google.protobuf.MessageLite.Builder
mergeFromMethods inherited from interface com.google.protobuf.MessageOrBuilder
findInitializationErrors, getAllFields, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof
-
Method Details
-
getDescriptor
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() -
internalGetFieldAccessorTable
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()- Specified by:
internalGetFieldAccessorTablein classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
clear
- Specified by:
clearin interfacecom.google.protobuf.Message.Builder- Specified by:
clearin interfacecom.google.protobuf.MessageLite.Builder- Overrides:
clearin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
getDescriptorForType
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()- Specified by:
getDescriptorForTypein interfacecom.google.protobuf.Message.Builder- Specified by:
getDescriptorForTypein interfacecom.google.protobuf.MessageOrBuilder- Overrides:
getDescriptorForTypein classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
getDefaultInstanceForType
- Specified by:
getDefaultInstanceForTypein interfacecom.google.protobuf.MessageLiteOrBuilder- Specified by:
getDefaultInstanceForTypein interfacecom.google.protobuf.MessageOrBuilder
-
build
- Specified by:
buildin interfacecom.google.protobuf.Message.Builder- Specified by:
buildin interfacecom.google.protobuf.MessageLite.Builder
-
buildPartial
- Specified by:
buildPartialin interfacecom.google.protobuf.Message.Builder- Specified by:
buildPartialin interfacecom.google.protobuf.MessageLite.Builder
-
clone
- Specified by:
clonein interfacecom.google.protobuf.Message.Builder- Specified by:
clonein interfacecom.google.protobuf.MessageLite.Builder- Overrides:
clonein classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
setField
public GrpcService.InferStatistics.Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) - Specified by:
setFieldin interfacecom.google.protobuf.Message.Builder- Overrides:
setFieldin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
clearField
public GrpcService.InferStatistics.Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) - Specified by:
clearFieldin interfacecom.google.protobuf.Message.Builder- Overrides:
clearFieldin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
clearOneof
public GrpcService.InferStatistics.Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) - Specified by:
clearOneofin interfacecom.google.protobuf.Message.Builder- Overrides:
clearOneofin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
setRepeatedField
public GrpcService.InferStatistics.Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) - Specified by:
setRepeatedFieldin interfacecom.google.protobuf.Message.Builder- Overrides:
setRepeatedFieldin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
addRepeatedField
public GrpcService.InferStatistics.Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) - Specified by:
addRepeatedFieldin interfacecom.google.protobuf.Message.Builder- Overrides:
addRepeatedFieldin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
mergeFrom
- Specified by:
mergeFromin interfacecom.google.protobuf.Message.Builder- Overrides:
mergeFromin classcom.google.protobuf.AbstractMessage.Builder<GrpcService.InferStatistics.Builder>
-
mergeFrom
-
isInitialized
public final boolean isInitialized()- Specified by:
isInitializedin interfacecom.google.protobuf.MessageLiteOrBuilder- Overrides:
isInitializedin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
mergeFrom
public GrpcService.InferStatistics.Builder mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException - Specified by:
mergeFromin interfacecom.google.protobuf.Message.Builder- Specified by:
mergeFromin interfacecom.google.protobuf.MessageLite.Builder- Overrides:
mergeFromin classcom.google.protobuf.AbstractMessage.Builder<GrpcService.InferStatistics.Builder>- Throws:
IOException
-
hasSuccess
public boolean hasSuccess()@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1;- Specified by:
hasSuccessin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the success field is set.
-
getSuccess
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1;- Specified by:
getSuccessin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The success.
-
setSuccess
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1; -
setSuccess
public GrpcService.InferStatistics.Builder setSuccess(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1; -
mergeSuccess
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1; -
clearSuccess
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1; -
getSuccessBuilder
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1; -
getSuccessOrBuilder
@@ .. cpp:var:: StatisticDuration success @@ @@ Cumulative count and duration for successful inference @@ request. The "success" count and cumulative duration includes @@ cache hits. @@
.inference.StatisticDuration success = 1;- Specified by:
getSuccessOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasFail
public boolean hasFail()@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2;- Specified by:
hasFailin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the fail field is set.
-
getFail
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2;- Specified by:
getFailin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The fail.
-
setFail
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2; -
setFail
public GrpcService.InferStatistics.Builder setFail(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2; -
mergeFail
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2; -
clearFail
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2; -
getFailBuilder
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2; -
getFailOrBuilder
@@ .. cpp:var:: StatisticDuration fail @@ @@ Cumulative count and duration for failed inference @@ request. @@
.inference.StatisticDuration fail = 2;- Specified by:
getFailOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasQueue
public boolean hasQueue()@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3;- Specified by:
hasQueuein interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the queue field is set.
-
getQueue
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3;- Specified by:
getQueuein interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The queue.
-
setQueue
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3; -
setQueue
public GrpcService.InferStatistics.Builder setQueue(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3; -
mergeQueue
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3; -
clearQueue
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3; -
getQueueBuilder
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3; -
getQueueOrBuilder
@@ .. cpp:var:: StatisticDuration queue @@ @@ The count and cumulative duration that inference requests wait in @@ scheduling or other queues. The "queue" count and cumulative @@ duration includes cache hits. @@
.inference.StatisticDuration queue = 3;- Specified by:
getQueueOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasComputeInput
public boolean hasComputeInput()@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4;- Specified by:
hasComputeInputin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the computeInput field is set.
-
getComputeInput
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4;- Specified by:
getComputeInputin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The computeInput.
-
setComputeInput
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4; -
setComputeInput
public GrpcService.InferStatistics.Builder setComputeInput(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4; -
mergeComputeInput
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4; -
clearComputeInput
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4; -
getComputeInputBuilder
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4; -
getComputeInputOrBuilder
@@ .. cpp:var:: StatisticDuration compute_input @@ @@ The count and cumulative duration to prepare input tensor data as @@ required by the model framework / backend. For example, this duration @@ should include the time to copy input tensor data to the GPU. @@ The "compute_input" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_input = 4;- Specified by:
getComputeInputOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasComputeInfer
public boolean hasComputeInfer()@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5;- Specified by:
hasComputeInferin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the computeInfer field is set.
-
getComputeInfer
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5;- Specified by:
getComputeInferin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The computeInfer.
-
setComputeInfer
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5; -
setComputeInfer
public GrpcService.InferStatistics.Builder setComputeInfer(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5; -
mergeComputeInfer
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5; -
clearComputeInfer
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5; -
getComputeInferBuilder
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5; -
getComputeInferOrBuilder
@@ .. cpp:var:: StatisticDuration compute_infer @@ @@ The count and cumulative duration to execute the model. @@ The "compute_infer" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_infer = 5;- Specified by:
getComputeInferOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasComputeOutput
public boolean hasComputeOutput()@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6;- Specified by:
hasComputeOutputin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the computeOutput field is set.
-
getComputeOutput
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6;- Specified by:
getComputeOutputin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The computeOutput.
-
setComputeOutput
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6; -
setComputeOutput
public GrpcService.InferStatistics.Builder setComputeOutput(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6; -
mergeComputeOutput
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6; -
clearComputeOutput
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6; -
getComputeOutputBuilder
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6; -
getComputeOutputOrBuilder
@@ .. cpp:var:: StatisticDuration compute_output @@ @@ The count and cumulative duration to extract output tensor data @@ produced by the model framework / backend. For example, this duration @@ should include the time to copy output tensor data from the GPU. @@ The "compute_output" count and cumulative duration do not account for @@ requests that were a cache hit. See the "cache_hit" field for more @@ info. @@
.inference.StatisticDuration compute_output = 6;- Specified by:
getComputeOutputOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasCacheHit
public boolean hasCacheHit()@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7;- Specified by:
hasCacheHitin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the cacheHit field is set.
-
getCacheHit
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7;- Specified by:
getCacheHitin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The cacheHit.
-
setCacheHit
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7; -
setCacheHit
public GrpcService.InferStatistics.Builder setCacheHit(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7; -
mergeCacheHit
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7; -
clearCacheHit
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7; -
getCacheHitBuilder
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7; -
getCacheHitOrBuilder
@@ .. cpp:var:: StatisticDuration cache_hit @@ @@ The count of response cache hits and cumulative duration to lookup @@ and extract output tensor data from the Response Cache on a cache @@ hit. For example, this duration should include the time to copy @@ output tensor data from the Response Cache to the response object. @@ On cache hits, triton does not need to go to the model/backend @@ for the output tensor data, so the "compute_input", "compute_infer", @@ and "compute_output" fields are not updated. Assuming the response @@ cache is enabled for a given model, a cache hit occurs for a @@ request to that model when the request metadata (model name, @@ model version, model inputs) hashes to an existing entry in the @@ cache. On a cache miss, the request hash and response output tensor @@ data is added to the cache. See response cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_hit = 7;- Specified by:
getCacheHitOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
hasCacheMiss
public boolean hasCacheMiss()@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8;- Specified by:
hasCacheMissin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- Whether the cacheMiss field is set.
-
getCacheMiss
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8;- Specified by:
getCacheMissin interfaceGrpcService.InferStatisticsOrBuilder- Returns:
- The cacheMiss.
-
setCacheMiss
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8; -
setCacheMiss
public GrpcService.InferStatistics.Builder setCacheMiss(GrpcService.StatisticDuration.Builder builderForValue) @@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8; -
mergeCacheMiss
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8; -
clearCacheMiss
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8; -
getCacheMissBuilder
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8; -
getCacheMissOrBuilder
@@ .. cpp:var:: StatisticDuration cache_miss @@ @@ The count of response cache misses and cumulative duration to lookup @@ and insert output tensor data from the computed response to the cache. @@ For example, this duration should include the time to copy @@ output tensor data from the response object to the Response Cache. @@ Assuming the response cache is enabled for a given model, a cache @@ miss occurs for a request to that model when the request metadata @@ does NOT hash to an existing entry in the cache. See the response @@ cache docs for more info: @@ https://github.com/triton-inference-server/server/blob/main/docs/response_cache.md @@
.inference.StatisticDuration cache_miss = 8;- Specified by:
getCacheMissOrBuilderin interfaceGrpcService.InferStatisticsOrBuilder
-
setUnknownFields
public final GrpcService.InferStatistics.Builder setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) - Specified by:
setUnknownFieldsin interfacecom.google.protobuf.Message.Builder- Overrides:
setUnknownFieldsin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-
mergeUnknownFields
public final GrpcService.InferStatistics.Builder mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields) - Specified by:
mergeUnknownFieldsin interfacecom.google.protobuf.Message.Builder- Overrides:
mergeUnknownFieldsin classcom.google.protobuf.GeneratedMessageV3.Builder<GrpcService.InferStatistics.Builder>
-