Gdc Application Environment
An ApplicationEnvironment contains shared configuration that may be referenced by multiple SparkApplications. To get more information about ApplicationEnvironment, see:
How-to Guides
Example Usage
Dataprocgdc Applicationenvironment Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const application_environment = new gcp.dataproc.GdcApplicationEnvironment("application-environment", {
applicationEnvironmentId: "dp-tf-e2e-application-environment-basic",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
});
import pulumi
import pulumi_gcp as gcp
application_environment = gcp.dataproc.GdcApplicationEnvironment("application-environment",
application_environment_id="dp-tf-e2e-application-environment-basic",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default")
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var application_environment = new Gcp.Dataproc.GdcApplicationEnvironment("application-environment", new()
{
ApplicationEnvironmentId = "dp-tf-e2e-application-environment-basic",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcApplicationEnvironment(ctx, "application-environment", &dataproc.GdcApplicationEnvironmentArgs{
ApplicationEnvironmentId: pulumi.String("dp-tf-e2e-application-environment-basic"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var application_environment = new GdcApplicationEnvironment("application-environment", GdcApplicationEnvironmentArgs.builder()
.applicationEnvironmentId("dp-tf-e2e-application-environment-basic")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.build());
}
}
resources:
application-environment:
type: gcp:dataproc:GdcApplicationEnvironment
properties:
applicationEnvironmentId: dp-tf-e2e-application-environment-basic
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
Dataprocgdc Applicationenvironment
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const application_environment = new gcp.dataproc.GdcApplicationEnvironment("application-environment", {
applicationEnvironmentId: "dp-tf-e2e-application-environment",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
displayName: "An application environment",
labels: {
"test-label": "label-value",
},
annotations: {
an_annotation: "annotation_value",
},
sparkApplicationEnvironmentConfig: {
defaultProperties: {
"spark.executor.memory": "4g",
},
defaultVersion: "1.2",
},
});
import pulumi
import pulumi_gcp as gcp
application_environment = gcp.dataproc.GdcApplicationEnvironment("application-environment",
application_environment_id="dp-tf-e2e-application-environment",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
display_name="An application environment",
labels={
"test-label": "label-value",
},
annotations={
"an_annotation": "annotation_value",
},
spark_application_environment_config={
"default_properties": {
"spark.executor.memory": "4g",
},
"default_version": "1.2",
})
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var application_environment = new Gcp.Dataproc.GdcApplicationEnvironment("application-environment", new()
{
ApplicationEnvironmentId = "dp-tf-e2e-application-environment",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
DisplayName = "An application environment",
Labels =
{
{ "test-label", "label-value" },
},
Annotations =
{
{ "an_annotation", "annotation_value" },
},
SparkApplicationEnvironmentConfig = new Gcp.Dataproc.Inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs
{
DefaultProperties =
{
{ "spark.executor.memory", "4g" },
},
DefaultVersion = "1.2",
},
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcApplicationEnvironment(ctx, "application-environment", &dataproc.GdcApplicationEnvironmentArgs{
ApplicationEnvironmentId: pulumi.String("dp-tf-e2e-application-environment"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
DisplayName: pulumi.String("An application environment"),
Labels: pulumi.StringMap{
"test-label": pulumi.String("label-value"),
},
Annotations: pulumi.StringMap{
"an_annotation": pulumi.String("annotation_value"),
},
SparkApplicationEnvironmentConfig: &dataproc.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs{
DefaultProperties: pulumi.StringMap{
"spark.executor.memory": pulumi.String("4g"),
},
DefaultVersion: pulumi.String("1.2"),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
import com.pulumi.gcp.dataproc.inputs.GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var application_environment = new GdcApplicationEnvironment("application-environment", GdcApplicationEnvironmentArgs.builder()
.applicationEnvironmentId("dp-tf-e2e-application-environment")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.displayName("An application environment")
.labels(Map.of("test-label", "label-value"))
.annotations(Map.of("an_annotation", "annotation_value"))
.sparkApplicationEnvironmentConfig(GdcApplicationEnvironmentSparkApplicationEnvironmentConfigArgs.builder()
.defaultProperties(Map.of("spark.executor.memory", "4g"))
.defaultVersion("1.2")
.build())
.build());
}
}
resources:
application-environment:
type: gcp:dataproc:GdcApplicationEnvironment
properties:
applicationEnvironmentId: dp-tf-e2e-application-environment
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
displayName: An application environment
labels:
test-label: label-value
annotations:
an_annotation: annotation_value
sparkApplicationEnvironmentConfig:
defaultProperties:
spark.executor.memory: 4g
defaultVersion: '1.2'
Import
ApplicationEnvironment can be imported using any of these accepted formats:
projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/applicationEnvironments/{{application_environment_id}}
{{project}}/{{location}}/{{serviceinstance}}/{{application_environment_id}}
{{location}}/{{serviceinstance}}/{{application_environment_id}}
When using thepulumi import
command, ApplicationEnvironment can be imported using one of the formats above. For example:
$ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/applicationEnvironments/{{application_environment_id}}
$ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default {{project}}/{{location}}/{{serviceinstance}}/{{application_environment_id}}
$ pulumi import gcp:dataproc/gdcApplicationEnvironment:GdcApplicationEnvironment default {{location}}/{{serviceinstance}}/{{application_environment_id}}
Properties
The annotations to associate with this application environment. Annotations may be used to store client information, but are not used by the server. Note: This field is non-authoritative, and will only manage the annotations present in your configuration. Please refer to the field effective_annotations
for all of the annotations present on the resource.
The id of the application environment
The timestamp when the resource was created.
User-provided human-readable name to be used in user interfaces.
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
The labels to associate with this application environment. Labels may be used for filtering and billing tracking. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels
for all of the labels present on the resource.
The combination of labels configured directly on the resource and default labels configured on the provider.
The id of the service instance to which this application environment belongs.
Represents the SparkApplicationEnvironmentConfig. Structure is documented below.
The timestamp when the resource was most recently updated.