WorkflowTemplateArgs

data class WorkflowTemplateArgs(val dagTimeout: Output<String>? = null, val jobs: Output<List<WorkflowTemplateJobArgs>>? = null, val labels: Output<Map<String, String>>? = null, val location: Output<String>? = null, val name: Output<String>? = null, val parameters: Output<List<WorkflowTemplateParameterArgs>>? = null, val placement: Output<WorkflowTemplatePlacementArgs>? = null, val project: Output<String>? = null, val version: Output<Int>? = null) : ConvertibleToJava<WorkflowTemplateArgs>

A Workflow Template is a reusable workflow configuration. It defines a graph of jobs with information on where to run those jobs.

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const template = new gcp.dataproc.WorkflowTemplate("template", {
name: "template-example",
location: "us-central1",
placement: {
managedCluster: {
clusterName: "my-cluster",
config: {
gceClusterConfig: {
zone: "us-central1-a",
tags: [
"foo",
"bar",
],
},
masterConfig: {
numInstances: 1,
machineType: "n1-standard-1",
diskConfig: {
bootDiskType: "pd-ssd",
bootDiskSizeGb: 15,
},
},
workerConfig: {
numInstances: 3,
machineType: "n1-standard-2",
diskConfig: {
bootDiskSizeGb: 10,
numLocalSsds: 2,
},
},
secondaryWorkerConfig: {
numInstances: 2,
},
softwareConfig: {
imageVersion: "2.0.35-debian10",
},
},
},
},
jobs: [
{
stepId: "someJob",
sparkJob: {
mainClass: "SomeClass",
},
},
{
stepId: "otherJob",
prerequisiteStepIds: ["someJob"],
prestoJob: {
queryFileUri: "someuri",
},
},
],
});
import pulumi
import pulumi_gcp as gcp
template = gcp.dataproc.WorkflowTemplate("template",
name="template-example",
location="us-central1",
placement={
"managed_cluster": {
"cluster_name": "my-cluster",
"config": {
"gce_cluster_config": {
"zone": "us-central1-a",
"tags": [
"foo",
"bar",
],
},
"master_config": {
"num_instances": 1,
"machine_type": "n1-standard-1",
"disk_config": {
"boot_disk_type": "pd-ssd",
"boot_disk_size_gb": 15,
},
},
"worker_config": {
"num_instances": 3,
"machine_type": "n1-standard-2",
"disk_config": {
"boot_disk_size_gb": 10,
"num_local_ssds": 2,
},
},
"secondary_worker_config": {
"num_instances": 2,
},
"software_config": {
"image_version": "2.0.35-debian10",
},
},
},
},
jobs=[
{
"step_id": "someJob",
"spark_job": {
"main_class": "SomeClass",
},
},
{
"step_id": "otherJob",
"prerequisite_step_ids": ["someJob"],
"presto_job": {
"query_file_uri": "someuri",
},
},
])
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var template = new Gcp.Dataproc.WorkflowTemplate("template", new()
{
Name = "template-example",
Location = "us-central1",
Placement = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementArgs
{
ManagedCluster = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterArgs
{
ClusterName = "my-cluster",
Config = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigArgs
{
GceClusterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs
{
Zone = "us-central1-a",
Tags = new[]
{
"foo",
"bar",
},
},
MasterConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs
{
NumInstances = 1,
MachineType = "n1-standard-1",
DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs
{
BootDiskType = "pd-ssd",
BootDiskSizeGb = 15,
},
},
WorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs
{
NumInstances = 3,
MachineType = "n1-standard-2",
DiskConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs
{
BootDiskSizeGb = 10,
NumLocalSsds = 2,
},
},
SecondaryWorkerConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs
{
NumInstances = 2,
},
SoftwareConfig = new Gcp.Dataproc.Inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs
{
ImageVersion = "2.0.35-debian10",
},
},
},
},
Jobs = new[]
{
new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
{
StepId = "someJob",
SparkJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobSparkJobArgs
{
MainClass = "SomeClass",
},
},
new Gcp.Dataproc.Inputs.WorkflowTemplateJobArgs
{
StepId = "otherJob",
PrerequisiteStepIds = new[]
{
"someJob",
},
PrestoJob = new Gcp.Dataproc.Inputs.WorkflowTemplateJobPrestoJobArgs
{
QueryFileUri = "someuri",
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewWorkflowTemplate(ctx, "template", &dataproc.WorkflowTemplateArgs{
Name: pulumi.String("template-example"),
Location: pulumi.String("us-central1"),
Placement: &dataproc.WorkflowTemplatePlacementArgs{
ManagedCluster: &dataproc.WorkflowTemplatePlacementManagedClusterArgs{
ClusterName: pulumi.String("my-cluster"),
Config: &dataproc.WorkflowTemplatePlacementManagedClusterConfigArgs{
GceClusterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs{
Zone: pulumi.String("us-central1-a"),
Tags: pulumi.StringArray{
pulumi.String("foo"),
pulumi.String("bar"),
},
},
MasterConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs{
NumInstances: pulumi.Int(1),
MachineType: pulumi.String("n1-standard-1"),
DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs{
BootDiskType: pulumi.String("pd-ssd"),
BootDiskSizeGb: pulumi.Int(15),
},
},
WorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs{
NumInstances: pulumi.Int(3),
MachineType: pulumi.String("n1-standard-2"),
DiskConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs{
BootDiskSizeGb: pulumi.Int(10),
NumLocalSsds: pulumi.Int(2),
},
},
SecondaryWorkerConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs{
NumInstances: pulumi.Int(2),
},
SoftwareConfig: &dataproc.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs{
ImageVersion: pulumi.String("2.0.35-debian10"),
},
},
},
},
Jobs: dataproc.WorkflowTemplateJobArray{
&dataproc.WorkflowTemplateJobArgs{
StepId: pulumi.String("someJob"),
SparkJob: &dataproc.WorkflowTemplateJobSparkJobArgs{
MainClass: pulumi.String("SomeClass"),
},
},
&dataproc.WorkflowTemplateJobArgs{
StepId: pulumi.String("otherJob"),
PrerequisiteStepIds: pulumi.StringArray{
pulumi.String("someJob"),
},
PrestoJob: &dataproc.WorkflowTemplateJobPrestoJobArgs{
QueryFileUri: pulumi.String("someuri"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.WorkflowTemplate;
import com.pulumi.gcp.dataproc.WorkflowTemplateArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobSparkJobArgs;
import com.pulumi.gcp.dataproc.inputs.WorkflowTemplateJobPrestoJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var template = new WorkflowTemplate("template", WorkflowTemplateArgs.builder()
.name("template-example")
.location("us-central1")
.placement(WorkflowTemplatePlacementArgs.builder()
.managedCluster(WorkflowTemplatePlacementManagedClusterArgs.builder()
.clusterName("my-cluster")
.config(WorkflowTemplatePlacementManagedClusterConfigArgs.builder()
.gceClusterConfig(WorkflowTemplatePlacementManagedClusterConfigGceClusterConfigArgs.builder()
.zone("us-central1-a")
.tags(
"foo",
"bar")
.build())
.masterConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigArgs.builder()
.numInstances(1)
.machineType("n1-standard-1")
.diskConfig(WorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigArgs.builder()
.bootDiskType("pd-ssd")
.bootDiskSizeGb(15)
.build())
.build())
.workerConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigArgs.builder()
.numInstances(3)
.machineType("n1-standard-2")
.diskConfig(WorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigArgs.builder()
.bootDiskSizeGb(10)
.numLocalSsds(2)
.build())
.build())
.secondaryWorkerConfig(WorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigArgs.builder()
.numInstances(2)
.build())
.softwareConfig(WorkflowTemplatePlacementManagedClusterConfigSoftwareConfigArgs.builder()
.imageVersion("2.0.35-debian10")
.build())
.build())
.build())
.build())
.jobs(
WorkflowTemplateJobArgs.builder()
.stepId("someJob")
.sparkJob(WorkflowTemplateJobSparkJobArgs.builder()
.mainClass("SomeClass")
.build())
.build(),
WorkflowTemplateJobArgs.builder()
.stepId("otherJob")
.prerequisiteStepIds("someJob")
.prestoJob(WorkflowTemplateJobPrestoJobArgs.builder()
.queryFileUri("someuri")
.build())
.build())
.build());
}
}
resources:
template:
type: gcp:dataproc:WorkflowTemplate
properties:
name: template-example
location: us-central1
placement:
managedCluster:
clusterName: my-cluster
config:
gceClusterConfig:
zone: us-central1-a
tags:
- foo
- bar
masterConfig:
numInstances: 1
machineType: n1-standard-1
diskConfig:
bootDiskType: pd-ssd
bootDiskSizeGb: 15
workerConfig:
numInstances: 3
machineType: n1-standard-2
diskConfig:
bootDiskSizeGb: 10
numLocalSsds: 2
secondaryWorkerConfig:
numInstances: 2
softwareConfig:
imageVersion: 2.0.35-debian10
jobs:
- stepId: someJob
sparkJob:
mainClass: SomeClass
- stepId: otherJob
prerequisiteStepIds:
- someJob
prestoJob:
queryFileUri: someuri

Import

WorkflowTemplate can be imported using any of these accepted formats:

  • projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}

  • {{project}}/{{location}}/{{name}}

  • {{location}}/{{name}} When using the pulumi import command, WorkflowTemplate can be imported using one of the formats above. For example:

$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}}
$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{project}}/{{location}}/{{name}}
$ pulumi import gcp:dataproc/workflowTemplate:WorkflowTemplate default {{location}}/{{name}}

Constructors

Link copied to clipboard
constructor(dagTimeout: Output<String>? = null, jobs: Output<List<WorkflowTemplateJobArgs>>? = null, labels: Output<Map<String, String>>? = null, location: Output<String>? = null, name: Output<String>? = null, parameters: Output<List<WorkflowTemplateParameterArgs>>? = null, placement: Output<WorkflowTemplatePlacementArgs>? = null, project: Output<String>? = null, version: Output<Int>? = null)

Properties

Link copied to clipboard
val dagTimeout: Output<String>? = null

Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of

Link copied to clipboard
val jobs: Output<List<WorkflowTemplateJobArgs>>? = null

Required. The Directed Acyclic Graph of Jobs to submit.

Link copied to clipboard
val labels: Output<Map<String, String>>? = null

Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label keys must contain 1 to 63 characters, and must conform to [RFC

Link copied to clipboard
val location: Output<String>? = null

The location for the resource

Link copied to clipboard
val name: Output<String>? = null

Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id}

Link copied to clipboard

Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.

Link copied to clipboard

Required. WorkflowTemplate scheduling information.

Link copied to clipboard
val project: Output<String>? = null

The project for the resource

Link copied to clipboard
val version: Output<Int>? = null

Output only. The current version of this workflow template.

Functions

Link copied to clipboard
open override fun toJava(): WorkflowTemplateArgs