Flow Prompt Model Inference Configuration Args
data class FlowPromptModelInferenceConfigurationArgs(val maxTokens: Output<Double>? = null, val stopSequences: Output<List<String>>? = null, val temperature: Output<Double>? = null, val topP: Output<Double>? = null) : ConvertibleToJava<FlowPromptModelInferenceConfigurationArgs>
Prompt model inference configuration