Spark Pool
Manages a Synapse Spark Pool.
Example Usage
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.DataLakeGen2Filesystem;
import com.pulumi.azure.storage.DataLakeGen2FilesystemArgs;
import com.pulumi.azure.synapse.Workspace;
import com.pulumi.azure.synapse.WorkspaceArgs;
import com.pulumi.azure.synapse.inputs.WorkspaceIdentityArgs;
import com.pulumi.azure.synapse.SparkPool;
import com.pulumi.azure.synapse.SparkPoolArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolAutoScaleArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolAutoPauseArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolLibraryRequirementArgs;
import com.pulumi.azure.synapse.inputs.SparkPoolSparkConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example-resources")
.location("West Europe")
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.name("examplestorageacc")
.resourceGroupName(example.name())
.location(example.location())
.accountTier("Standard")
.accountReplicationType("LRS")
.accountKind("StorageV2")
.isHnsEnabled("true")
.build());
var exampleDataLakeGen2Filesystem = new DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", DataLakeGen2FilesystemArgs.builder()
.name("example")
.storageAccountId(exampleAccount.id())
.build());
var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
.name("example")
.resourceGroupName(example.name())
.location(example.location())
.storageDataLakeGen2FilesystemId(exampleDataLakeGen2Filesystem.id())
.sqlAdministratorLogin("sqladminuser")
.sqlAdministratorLoginPassword("H@Sh1CoR3!")
.identity(WorkspaceIdentityArgs.builder()
.type("SystemAssigned")
.build())
.build());
var exampleSparkPool = new SparkPool("exampleSparkPool", SparkPoolArgs.builder()
.name("example")
.synapseWorkspaceId(exampleWorkspace.id())
.nodeSizeFamily("MemoryOptimized")
.nodeSize("Small")
.cacheSize(100)
.autoScale(SparkPoolAutoScaleArgs.builder()
.maxNodeCount(50)
.minNodeCount(3)
.build())
.autoPause(SparkPoolAutoPauseArgs.builder()
.delayInMinutes(15)
.build())
.libraryRequirement(SparkPoolLibraryRequirementArgs.builder()
.content("""
appnope==0.1.0
beautifulsoup4==4.6.3
""")
.filename("requirements.txt")
.build())
.sparkConfig(SparkPoolSparkConfigArgs.builder()
.content("""
spark.shuffle.spill true
""")
.filename("config.txt")
.build())
.tags(Map.of("ENV", "Production"))
.build());
}
}
Content copied to clipboard
Import
Synapse Spark Pool can be imported using the resource id
, e.g.
$ pulumi import azure:synapse/sparkPool:SparkPool example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1/bigDataPools/sparkPool1
Content copied to clipboard
Properties
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard