Spark Cluster Args
data class SparkClusterArgs(val clusterVersion: Output<String>? = null, val componentVersion: Output<SparkClusterComponentVersionArgs>? = null, val computeIsolation: Output<SparkClusterComputeIsolationArgs>? = null, val diskEncryptions: Output<List<SparkClusterDiskEncryptionArgs>>? = null, val encryptionInTransitEnabled: Output<Boolean>? = null, val extension: Output<SparkClusterExtensionArgs>? = null, val gateway: Output<SparkClusterGatewayArgs>? = null, val location: Output<String>? = null, val metastores: Output<SparkClusterMetastoresArgs>? = null, val monitor: Output<SparkClusterMonitorArgs>? = null, val name: Output<String>? = null, val network: Output<SparkClusterNetworkArgs>? = null, val resourceGroupName: Output<String>? = null, val roles: Output<SparkClusterRolesArgs>? = null, val securityProfile: Output<SparkClusterSecurityProfileArgs>? = null, val storageAccountGen2: Output<SparkClusterStorageAccountGen2Args>? = null, val storageAccounts: Output<List<SparkClusterStorageAccountArgs>>? = null, val tags: Output<Map<String, String>>? = null, val tier: Output<String>? = null, val tlsMinVersion: Output<String>? = null) : ConvertibleToJava<SparkClusterArgs>
Manages a HDInsight Spark Cluster.
Example Usage
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.Container;
import com.pulumi.azure.storage.ContainerArgs;
import com.pulumi.azure.hdinsight.SparkCluster;
import com.pulumi.azure.hdinsight.SparkClusterArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterComponentVersionArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterGatewayArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesHeadNodeArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesZookeeperNodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("West Europe")
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.accountTier("Standard")
.accountReplicationType("LRS")
.build());
var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()
.storageAccountName(exampleAccount.name())
.containerAccessType("private")
.build());
var exampleSparkCluster = new SparkCluster("exampleSparkCluster", SparkClusterArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.clusterVersion("3.6")
.tier("Standard")
.componentVersion(SparkClusterComponentVersionArgs.builder()
.spark("2.3")
.build())
.gateway(SparkClusterGatewayArgs.builder()
.username("acctestusrgw")
.password("Password123!")
.build())
.storageAccounts(SparkClusterStorageAccountArgs.builder()
.storageContainerId(exampleContainer.id())
.storageAccountKey(exampleAccount.primaryAccessKey())
.isDefault(true)
.build())
.roles(SparkClusterRolesArgs.builder()
.headNode(SparkClusterRolesHeadNodeArgs.builder()
.vmSize("Standard_A3")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.build())
.workerNode(SparkClusterRolesWorkerNodeArgs.builder()
.vmSize("Standard_A3")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.targetInstanceCount(3)
.build())
.zookeeperNode(SparkClusterRolesZookeeperNodeArgs.builder()
.vmSize("Medium")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.build())
.build())
.build());
}
}
Content copied to clipboard
Import
HDInsight Spark Clusters can be imported using the resource id
, e.g.
$ pulumi import azure:hdinsight/sparkCluster:SparkCluster example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.HDInsight/clusters/cluster1
Content copied to clipboard
Constructors
Link copied to clipboard
fun SparkClusterArgs(clusterVersion: Output<String>? = null, componentVersion: Output<SparkClusterComponentVersionArgs>? = null, computeIsolation: Output<SparkClusterComputeIsolationArgs>? = null, diskEncryptions: Output<List<SparkClusterDiskEncryptionArgs>>? = null, encryptionInTransitEnabled: Output<Boolean>? = null, extension: Output<SparkClusterExtensionArgs>? = null, gateway: Output<SparkClusterGatewayArgs>? = null, location: Output<String>? = null, metastores: Output<SparkClusterMetastoresArgs>? = null, monitor: Output<SparkClusterMonitorArgs>? = null, name: Output<String>? = null, network: Output<SparkClusterNetworkArgs>? = null, resourceGroupName: Output<String>? = null, roles: Output<SparkClusterRolesArgs>? = null, securityProfile: Output<SparkClusterSecurityProfileArgs>? = null, storageAccountGen2: Output<SparkClusterStorageAccountGen2Args>? = null, storageAccounts: Output<List<SparkClusterStorageAccountArgs>>? = null, tags: Output<Map<String, String>>? = null, tier: Output<String>? = null, tlsMinVersion: Output<String>? = null)
Functions
Properties
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard