- 0.94.0 (latest)
- 0.93.0
- 0.92.0
- 0.91.0
- 0.89.0
- 0.87.0
- 0.86.0
- 0.85.0
- 0.84.0
- 0.83.0
- 0.81.0
- 0.79.0
- 0.78.0
- 0.75.0
- 0.74.0
- 0.73.0
- 0.71.0
- 0.70.0
- 0.69.0
- 0.68.0
- 0.67.0
- 0.66.0
- 0.65.0
- 0.64.0
- 0.63.0
- 0.62.0
- 0.60.0
- 0.59.0
- 0.58.0
- 0.57.0
- 0.56.0
- 0.55.0
- 0.54.0
- 0.53.0
- 0.52.0
- 0.51.0
- 0.50.0
- 0.48.0
- 0.47.0
- 0.46.0
- 0.45.0
- 0.44.0
- 0.43.0
- 0.42.0
- 0.41.0
- 0.40.0
- 0.39.0
- 0.38.0
- 0.35.0
- 0.34.0
- 0.33.0
- 0.32.0
- 0.31.0
- 0.30.0
- 0.29.0
- 0.28.0
- 0.27.0
- 0.26.0
- 0.25.0
- 0.24.0
- 0.23.0
- 0.22.0
- 0.20.0
- 0.19.0
- 0.18.0
- 0.17.0
- 0.16.0
- 0.15.0
- 0.14.7
- 0.13.1
- 0.12.1
- 0.11.5
public static final class LlmCall.TokenCount.Builder extends GeneratedMessage.Builder<LlmCall.TokenCount.Builder> implements LlmCall.TokenCountOrBuilderStores token counts of the LLM call.
Protobuf type google.cloud.dialogflow.cx.v3beta1.LlmCall.TokenCount
Inheritance
java.lang.Object > AbstractMessageLite.Builder<MessageType,BuilderType> > AbstractMessage.Builder<BuilderType> > GeneratedMessage.Builder > LlmCall.TokenCount.BuilderImplements
LlmCall.TokenCountOrBuilderStatic Methods
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()| Returns | |
|---|---|
| Type | Description |
Descriptor |
|
Methods
build()
public LlmCall.TokenCount build()| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount |
|
buildPartial()
public LlmCall.TokenCount buildPartial()| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount |
|
clear()
public LlmCall.TokenCount.Builder clear()| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
|
clearConversationContextTokenCount()
public LlmCall.TokenCount.Builder clearConversationContextTokenCount()The number of tokens used for the conversation history in the prompt.
int64 conversation_context_token_count = 3;
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
clearExampleTokenCount()
public LlmCall.TokenCount.Builder clearExampleTokenCount()The number of tokens used for the retrieved examples in the prompt.
int64 example_token_count = 4;
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
clearTotalInputTokenCount()
public LlmCall.TokenCount.Builder clearTotalInputTokenCount()The total number of tokens used for the input to the LLM call.
int64 total_input_token_count = 1;
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
clearTotalOutputTokenCount()
public LlmCall.TokenCount.Builder clearTotalOutputTokenCount()The total number of tokens used for the output of the LLM call.
int64 total_output_token_count = 5;
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
getConversationContextTokenCount()
public long getConversationContextTokenCount()The number of tokens used for the conversation history in the prompt.
int64 conversation_context_token_count = 3;
| Returns | |
|---|---|
| Type | Description |
long |
The conversationContextTokenCount. |
getDefaultInstanceForType()
public LlmCall.TokenCount getDefaultInstanceForType()| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount |
|
getDescriptorForType()
public Descriptors.Descriptor getDescriptorForType()| Returns | |
|---|---|
| Type | Description |
Descriptor |
|
getExampleTokenCount()
public long getExampleTokenCount()The number of tokens used for the retrieved examples in the prompt.
int64 example_token_count = 4;
| Returns | |
|---|---|
| Type | Description |
long |
The exampleTokenCount. |
getTotalInputTokenCount()
public long getTotalInputTokenCount()The total number of tokens used for the input to the LLM call.
int64 total_input_token_count = 1;
| Returns | |
|---|---|
| Type | Description |
long |
The totalInputTokenCount. |
getTotalOutputTokenCount()
public long getTotalOutputTokenCount()The total number of tokens used for the output of the LLM call.
int64 total_output_token_count = 5;
| Returns | |
|---|---|
| Type | Description |
long |
The totalOutputTokenCount. |
internalGetFieldAccessorTable()
protected GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()| Returns | |
|---|---|
| Type | Description |
FieldAccessorTable |
|
isInitialized()
public final boolean isInitialized()| Returns | |
|---|---|
| Type | Description |
boolean |
|
mergeFrom(LlmCall.TokenCount other)
public LlmCall.TokenCount.Builder mergeFrom(LlmCall.TokenCount other)| Parameter | |
|---|---|
| Name | Description |
other |
LlmCall.TokenCount |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
|
mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public LlmCall.TokenCount.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)| Parameters | |
|---|---|
| Name | Description |
input |
CodedInputStream |
extensionRegistry |
ExtensionRegistryLite |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
|
| Exceptions | |
|---|---|
| Type | Description |
IOException |
|
mergeFrom(Message other)
public LlmCall.TokenCount.Builder mergeFrom(Message other)| Parameter | |
|---|---|
| Name | Description |
other |
Message |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
|
setConversationContextTokenCount(long value)
public LlmCall.TokenCount.Builder setConversationContextTokenCount(long value)The number of tokens used for the conversation history in the prompt.
int64 conversation_context_token_count = 3;
| Parameter | |
|---|---|
| Name | Description |
value |
longThe conversationContextTokenCount to set. |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
setExampleTokenCount(long value)
public LlmCall.TokenCount.Builder setExampleTokenCount(long value)The number of tokens used for the retrieved examples in the prompt.
int64 example_token_count = 4;
| Parameter | |
|---|---|
| Name | Description |
value |
longThe exampleTokenCount to set. |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
setTotalInputTokenCount(long value)
public LlmCall.TokenCount.Builder setTotalInputTokenCount(long value)The total number of tokens used for the input to the LLM call.
int64 total_input_token_count = 1;
| Parameter | |
|---|---|
| Name | Description |
value |
longThe totalInputTokenCount to set. |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |
setTotalOutputTokenCount(long value)
public LlmCall.TokenCount.Builder setTotalOutputTokenCount(long value)The total number of tokens used for the output of the LLM call.
int64 total_output_token_count = 5;
| Parameter | |
|---|---|
| Name | Description |
value |
longThe totalOutputTokenCount to set. |
| Returns | |
|---|---|
| Type | Description |
LlmCall.TokenCount.Builder |
This builder for chaining. |