Azure Databricks Linked Service Args
data class AzureDatabricksLinkedServiceArgs(val accessToken: Output<Either<AzureKeyVaultSecretReferenceArgs, SecureStringArgs>>? = null, val annotations: Output<List<Any>>? = null, val authentication: Output<Any>? = null, val connectVia: Output<IntegrationRuntimeReferenceArgs>? = null, val credential: Output<CredentialReferenceArgs>? = null, val description: Output<String>? = null, val domain: Output<Any>, val encryptedCredential: Output<String>? = null, val existingClusterId: Output<Any>? = null, val instancePoolId: Output<Any>? = null, val newClusterCustomTags: Output<Map<String, Any>>? = null, val newClusterDriverNodeType: Output<Any>? = null, val newClusterEnableElasticDisk: Output<Any>? = null, val newClusterInitScripts: Output<Any>? = null, val newClusterLogDestination: Output<Any>? = null, val newClusterNodeType: Output<Any>? = null, val newClusterNumOfWorker: Output<Any>? = null, val newClusterSparkConf: Output<Map<String, Any>>? = null, val newClusterSparkEnvVars: Output<Map<String, Any>>? = null, val newClusterVersion: Output<Any>? = null, val parameters: Output<Map<String, ParameterSpecificationArgs>>? = null, val policyId: Output<Any>? = null, val type: Output<String>, val workspaceResourceId: Output<Any>? = null) : ConvertibleToJava<AzureDatabricksLinkedServiceArgs>
Azure Databricks linked service.
Constructors
Link copied to clipboard
fun AzureDatabricksLinkedServiceArgs(accessToken: Output<Either<AzureKeyVaultSecretReferenceArgs, SecureStringArgs>>? = null, annotations: Output<List<Any>>? = null, authentication: Output<Any>? = null, connectVia: Output<IntegrationRuntimeReferenceArgs>? = null, credential: Output<CredentialReferenceArgs>? = null, description: Output<String>? = null, domain: Output<Any>, encryptedCredential: Output<String>? = null, existingClusterId: Output<Any>? = null, instancePoolId: Output<Any>? = null, newClusterCustomTags: Output<Map<String, Any>>? = null, newClusterDriverNodeType: Output<Any>? = null, newClusterEnableElasticDisk: Output<Any>? = null, newClusterInitScripts: Output<Any>? = null, newClusterLogDestination: Output<Any>? = null, newClusterNodeType: Output<Any>? = null, newClusterNumOfWorker: Output<Any>? = null, newClusterSparkConf: Output<Map<String, Any>>? = null, newClusterSparkEnvVars: Output<Map<String, Any>>? = null, newClusterVersion: Output<Any>? = null, parameters: Output<Map<String, ParameterSpecificationArgs>>? = null, policyId: Output<Any>? = null, type: Output<String>, workspaceResourceId: Output<Any>? = null)
Functions
Properties
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string).