Prompt Model Inference Configuration Args
data class PromptModelInferenceConfigurationArgs(val maxTokens: Output<Double>? = null, val stopSequences: Output<List<String>>? = null, val temperature: Output<Double>? = null, val topP: Output<Double>? = null) : ConvertibleToJava<PromptModelInferenceConfigurationArgs>
Prompt model inference configuration