Class LlmCall.TokenCount.Builder (0.94.0)

public static final class LlmCall.TokenCount.Builder extends GeneratedMessage.Builder<LlmCall.TokenCount.Builder> implements LlmCall.TokenCountOrBuilder

Stores token counts of the LLM call.

Protobuf type google.cloud.dialogflow.cx.v3beta1.LlmCall.TokenCount

Static Methods

getDescriptor()

public static final Descriptors.Descriptor getDescriptor()
Returns
Type Description
Descriptor

Methods

build()

public LlmCall.TokenCount build()
Returns
Type Description
LlmCall.TokenCount

buildPartial()

public LlmCall.TokenCount buildPartial()
Returns
Type Description
LlmCall.TokenCount

clear()

public LlmCall.TokenCount.Builder clear()
Returns
Type Description
LlmCall.TokenCount.Builder
Overrides

clearConversationContextTokenCount()

public LlmCall.TokenCount.Builder clearConversationContextTokenCount()

The number of tokens used for the conversation history in the prompt.

int64 conversation_context_token_count = 3;

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

clearExampleTokenCount()

public LlmCall.TokenCount.Builder clearExampleTokenCount()

The number of tokens used for the retrieved examples in the prompt.

int64 example_token_count = 4;

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

clearTotalInputTokenCount()

public LlmCall.TokenCount.Builder clearTotalInputTokenCount()

The total number of tokens used for the input to the LLM call.

int64 total_input_token_count = 1;

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

clearTotalOutputTokenCount()

public LlmCall.TokenCount.Builder clearTotalOutputTokenCount()

The total number of tokens used for the output of the LLM call.

int64 total_output_token_count = 5;

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

getConversationContextTokenCount()

public long getConversationContextTokenCount()

The number of tokens used for the conversation history in the prompt.

int64 conversation_context_token_count = 3;

Returns
Type Description
long

The conversationContextTokenCount.

getDefaultInstanceForType()

public LlmCall.TokenCount getDefaultInstanceForType()
Returns
Type Description
LlmCall.TokenCount

getDescriptorForType()

public Descriptors.Descriptor getDescriptorForType()
Returns
Type Description
Descriptor
Overrides

getExampleTokenCount()

public long getExampleTokenCount()

The number of tokens used for the retrieved examples in the prompt.

int64 example_token_count = 4;

Returns
Type Description
long

The exampleTokenCount.

getTotalInputTokenCount()

public long getTotalInputTokenCount()

The total number of tokens used for the input to the LLM call.

int64 total_input_token_count = 1;

Returns
Type Description
long

The totalInputTokenCount.

getTotalOutputTokenCount()

public long getTotalOutputTokenCount()

The total number of tokens used for the output of the LLM call.

int64 total_output_token_count = 5;

Returns
Type Description
long

The totalOutputTokenCount.

internalGetFieldAccessorTable()

protected GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()
Returns
Type Description
FieldAccessorTable
Overrides

isInitialized()

public final boolean isInitialized()
Returns
Type Description
boolean
Overrides

mergeFrom(LlmCall.TokenCount other)

public LlmCall.TokenCount.Builder mergeFrom(LlmCall.TokenCount other)
Parameter
Name Description
other LlmCall.TokenCount
Returns
Type Description
LlmCall.TokenCount.Builder

mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)

public LlmCall.TokenCount.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
Parameters
Name Description
input CodedInputStream
extensionRegistry ExtensionRegistryLite
Returns
Type Description
LlmCall.TokenCount.Builder
Overrides
Exceptions
Type Description
IOException

mergeFrom(Message other)

public LlmCall.TokenCount.Builder mergeFrom(Message other)
Parameter
Name Description
other Message
Returns
Type Description
LlmCall.TokenCount.Builder
Overrides

setConversationContextTokenCount(long value)

public LlmCall.TokenCount.Builder setConversationContextTokenCount(long value)

The number of tokens used for the conversation history in the prompt.

int64 conversation_context_token_count = 3;

Parameter
Name Description
value long

The conversationContextTokenCount to set.

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

setExampleTokenCount(long value)

public LlmCall.TokenCount.Builder setExampleTokenCount(long value)

The number of tokens used for the retrieved examples in the prompt.

int64 example_token_count = 4;

Parameter
Name Description
value long

The exampleTokenCount to set.

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

setTotalInputTokenCount(long value)

public LlmCall.TokenCount.Builder setTotalInputTokenCount(long value)

The total number of tokens used for the input to the LLM call.

int64 total_input_token_count = 1;

Parameter
Name Description
value long

The totalInputTokenCount to set.

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.

setTotalOutputTokenCount(long value)

public LlmCall.TokenCount.Builder setTotalOutputTokenCount(long value)

The total number of tokens used for the output of the LLM call.

int64 total_output_token_count = 5;

Parameter
Name Description
value long

The totalOutputTokenCount to set.

Returns
Type Description
LlmCall.TokenCount.Builder

This builder for chaining.