public static final class LlmModelSettings.Parameters extends GeneratedMessage implements LlmModelSettings.ParametersOrBuilder
Generative model parameters to control the model behavior.
Protobuf type google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters
Inherited Members
com.google.protobuf.GeneratedMessage.<ContainingT,T>newFileScopedGeneratedExtension(java.lang.Class<?>,com.google.protobuf.Message)
com.google.protobuf.GeneratedMessage.<ContainingT,T>newMessageScopedGeneratedExtension(com.google.protobuf.Message,int,java.lang.Class<?>,com.google.protobuf.Message)
com.google.protobuf.GeneratedMessage.<ListT>makeMutableCopy(ListT)
com.google.protobuf.GeneratedMessage.<ListT>makeMutableCopy(ListT,int)
com.google.protobuf.GeneratedMessage.<T>emptyList(java.lang.Class<T>)
com.google.protobuf.GeneratedMessage.<V>serializeBooleanMapTo(com.google.protobuf.CodedOutputStream,com.google.protobuf.MapField<java.lang.Boolean,V>,com.google.protobuf.MapEntry<java.lang.Boolean,V>,int)
com.google.protobuf.GeneratedMessage.<V>serializeIntegerMapTo(com.google.protobuf.CodedOutputStream,com.google.protobuf.MapField<java.lang.Integer,V>,com.google.protobuf.MapEntry<java.lang.Integer,V>,int)
com.google.protobuf.GeneratedMessage.<V>serializeLongMapTo(com.google.protobuf.CodedOutputStream,com.google.protobuf.MapField<java.lang.Long,V>,com.google.protobuf.MapEntry<java.lang.Long,V>,int)
com.google.protobuf.GeneratedMessage.<V>serializeStringMapTo(com.google.protobuf.CodedOutputStream,com.google.protobuf.MapField<java.lang.String,V>,com.google.protobuf.MapEntry<java.lang.String,V>,int)
com.google.protobuf.GeneratedMessage.canUseUnsafe()
com.google.protobuf.GeneratedMessage.emptyBooleanList()
com.google.protobuf.GeneratedMessage.emptyDoubleList()
com.google.protobuf.GeneratedMessage.emptyFloatList()
com.google.protobuf.GeneratedMessage.emptyIntList()
com.google.protobuf.GeneratedMessage.emptyLongList()
com.google.protobuf.GeneratedMessage.internalGetMapFieldReflection(int)
com.google.protobuf.GeneratedMessage.isStringEmpty(java.lang.Object)
com.google.protobuf.GeneratedMessage.mergeFromAndMakeImmutableInternal(com.google.protobuf.CodedInputStream,com.google.protobuf.ExtensionRegistryLite)
com.google.protobuf.GeneratedMessage.newInstance(com.google.protobuf.GeneratedMessage.UnusedPrivateParameter)
com.google.protobuf.GeneratedMessage.parseUnknownFieldProto3(com.google.protobuf.CodedInputStream,com.google.protobuf.UnknownFieldSet.Builder,com.google.protobuf.ExtensionRegistryLite,int)
Static Fields
public static final int INPUT_TOKEN_LIMIT_FIELD_NUMBER
| Field Value |
| Type |
Description |
int |
|
OUTPUT_TOKEN_LIMIT_FIELD_NUMBER
public static final int OUTPUT_TOKEN_LIMIT_FIELD_NUMBER
| Field Value |
| Type |
Description |
int |
|
TEMPERATURE_FIELD_NUMBER
public static final int TEMPERATURE_FIELD_NUMBER
| Field Value |
| Type |
Description |
int |
|
Static Methods
getDefaultInstance()
public static LlmModelSettings.Parameters getDefaultInstance()
getDescriptor()
public static final Descriptors.Descriptor getDescriptor()
newBuilder()
public static LlmModelSettings.Parameters.Builder newBuilder()
newBuilder(LlmModelSettings.Parameters prototype)
public static LlmModelSettings.Parameters.Builder newBuilder(LlmModelSettings.Parameters prototype)
public static LlmModelSettings.Parameters parseDelimitedFrom(InputStream input)
public static LlmModelSettings.Parameters parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(byte[] data)
public static LlmModelSettings.Parameters parseFrom(byte[] data)
| Parameter |
| Name |
Description |
data |
byte[]
|
parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
public static LlmModelSettings.Parameters parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteString data)
public static LlmModelSettings.Parameters parseFrom(ByteString data)
parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static LlmModelSettings.Parameters parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
public static LlmModelSettings.Parameters parseFrom(CodedInputStream input)
public static LlmModelSettings.Parameters parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
public static LlmModelSettings.Parameters parseFrom(InputStream input)
public static LlmModelSettings.Parameters parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
parseFrom(ByteBuffer data)
public static LlmModelSettings.Parameters parseFrom(ByteBuffer data)
parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
public static LlmModelSettings.Parameters parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry)
parser()
public static Parser<LlmModelSettings.Parameters> parser()
Methods
equals(Object obj)
public boolean equals(Object obj)
| Parameter |
| Name |
Description |
obj |
Object
|
Overrides
getDefaultInstanceForType()
public LlmModelSettings.Parameters getDefaultInstanceForType()
public LlmModelSettings.Parameters.InputTokenLimit getInputTokenLimit()
The input token limit.
This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
public int getInputTokenLimitValue()
The input token limit.
This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns |
| Type |
Description |
int |
The enum numeric value on the wire for inputTokenLimit.
|
getOutputTokenLimit()
public LlmModelSettings.Parameters.OutputTokenLimit getOutputTokenLimit()
The output token limit.
This setting is currently only supported by playbooks.
Only one of output_token_limit and max_output_tokens is allowed to be
set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
getOutputTokenLimitValue()
public int getOutputTokenLimitValue()
The output token limit.
This setting is currently only supported by playbooks.
Only one of output_token_limit and max_output_tokens is allowed to be
set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns |
| Type |
Description |
int |
The enum numeric value on the wire for outputTokenLimit.
|
getParserForType()
public Parser<LlmModelSettings.Parameters> getParserForType()
Overrides
getSerializedSize()
public int getSerializedSize()
| Returns |
| Type |
Description |
int |
|
Overrides
getTemperature()
public float getTemperature()
The temperature used for sampling during response generation. Value
ranges from 0 to 1. Temperature controls the degree of randomness in
token selection. Lower temperature means less randomness, while higher
temperature means more randomness.
Valid range: [0.0, 1.0]
optional float temperature = 1;
| Returns |
| Type |
Description |
float |
The temperature.
|
public boolean hasInputTokenLimit()
The input token limit.
This setting is currently only supported by playbooks.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.InputTokenLimit input_token_limit = 2;
| Returns |
| Type |
Description |
boolean |
Whether the inputTokenLimit field is set.
|
hasOutputTokenLimit()
public boolean hasOutputTokenLimit()
The output token limit.
This setting is currently only supported by playbooks.
Only one of output_token_limit and max_output_tokens is allowed to be
set.
optional .google.cloud.dialogflow.cx.v3beta1.LlmModelSettings.Parameters.OutputTokenLimit output_token_limit = 3;
| Returns |
| Type |
Description |
boolean |
Whether the outputTokenLimit field is set.
|
hasTemperature()
public boolean hasTemperature()
The temperature used for sampling during response generation. Value
ranges from 0 to 1. Temperature controls the degree of randomness in
token selection. Lower temperature means less randomness, while higher
temperature means more randomness.
Valid range: [0.0, 1.0]
optional float temperature = 1;
| Returns |
| Type |
Description |
boolean |
Whether the temperature field is set.
|
hashCode()
| Returns |
| Type |
Description |
int |
|
Overrides
internalGetFieldAccessorTable()
protected GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()
Overrides
isInitialized()
public final boolean isInitialized()
Overrides
newBuilderForType()
public LlmModelSettings.Parameters.Builder newBuilderForType()
newBuilderForType(AbstractMessage.BuilderParent parent)
protected LlmModelSettings.Parameters.Builder newBuilderForType(AbstractMessage.BuilderParent parent)
Overrides
toBuilder()
public LlmModelSettings.Parameters.Builder toBuilder()
writeTo(CodedOutputStream output)
public void writeTo(CodedOutputStream output)
Overrides