Linked Service Azure Databricks Args
data class LinkedServiceAzureDatabricksArgs(val accessToken: Output<String>? = null, val adbDomain: Output<String>? = null, val additionalProperties: Output<Map<String, String>>? = null, val annotations: Output<List<String>>? = null, val dataFactoryId: Output<String>? = null, val description: Output<String>? = null, val existingClusterId: Output<String>? = null, val instancePool: Output<LinkedServiceAzureDatabricksInstancePoolArgs>? = null, val integrationRuntimeName: Output<String>? = null, val keyVaultPassword: Output<LinkedServiceAzureDatabricksKeyVaultPasswordArgs>? = null, val msiWorkSpaceResourceId: Output<String>? = null, val name: Output<String>? = null, val newClusterConfig: Output<LinkedServiceAzureDatabricksNewClusterConfigArgs>? = null, val parameters: Output<Map<String, String>>? = null) : ConvertibleToJava<LinkedServiceAzureDatabricksArgs>
Manages a Linked Service (connection) between Azure Databricks and Azure Data Factory.
Example Usage
With Managed Identity & New Cluster
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.inputs.FactoryIdentityArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("East US")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.identity(FactoryIdentityArgs.builder()
.type("SystemAssigned")
.build())
.build());
var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.sku("standard")
.build());
var msiLinked = new LinkedServiceAzureDatabricks("msiLinked", LinkedServiceAzureDatabricksArgs.builder()
.dataFactoryId(exampleFactory.id())
.description("ADB Linked Service via MSI")
.adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
.msiWorkSpaceResourceId(exampleWorkspace.id())
.newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
.nodeType("Standard_NC12")
.clusterVersion("5.5.x-gpu-scala2.11")
.minNumberOfWorkers(1)
.maxNumberOfWorkers(5)
.driverNodeType("Standard_NC12")
.logDestination("dbfs:/logs")
.customTags(Map.ofEntries(
Map.entry("custom_tag1", "sct_value_1"),
Map.entry("custom_tag2", "sct_value_2")
))
.sparkConfig(Map.ofEntries(
Map.entry("config1", "value1"),
Map.entry("config2", "value2")
))
.sparkEnvironmentVariables(Map.ofEntries(
Map.entry("envVar1", "value1"),
Map.entry("envVar2", "value2")
))
.initScripts(
"init.sh",
"init2.sh")
.build())
.build());
}
}
Content copied to clipboard
With Access Token & Existing Cluster
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("East US")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.build());
var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.sku("standard")
.build());
var atLinked = new LinkedServiceAzureDatabricks("atLinked", LinkedServiceAzureDatabricksArgs.builder()
.dataFactoryId(exampleFactory.id())
.description("ADB Linked Service via Access Token")
.existingClusterId("0308-201146-sly615")
.accessToken("SomeDatabricksAccessToken")
.adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
.build());
}
}
Content copied to clipboard
Import
Data Factory Linked Services can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/linkedServiceAzureDatabricks:LinkedServiceAzureDatabricks example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example
Content copied to clipboard
Constructors
Link copied to clipboard
fun LinkedServiceAzureDatabricksArgs(accessToken: Output<String>? = null, adbDomain: Output<String>? = null, additionalProperties: Output<Map<String, String>>? = null, annotations: Output<List<String>>? = null, dataFactoryId: Output<String>? = null, description: Output<String>? = null, existingClusterId: Output<String>? = null, instancePool: Output<LinkedServiceAzureDatabricksInstancePoolArgs>? = null, integrationRuntimeName: Output<String>? = null, keyVaultPassword: Output<LinkedServiceAzureDatabricksKeyVaultPasswordArgs>? = null, msiWorkSpaceResourceId: Output<String>? = null, name: Output<String>? = null, newClusterConfig: Output<LinkedServiceAzureDatabricksNewClusterConfigArgs>? = null, parameters: Output<Map<String, String>>? = null)
Functions
Properties
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.