Environment Args
Creates a MWAA Environment resource.
Example Usage
A MWAA Environment requires an IAM role (aws.iam.Role
), two subnets in the private zone (aws.ec2.Subnet
) and a versioned S3 bucket (aws.s3.BucketV2
).
Basic Usage
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with Airflow configuration options
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.airflowConfigurationOptions(Map.ofEntries(
Map.entry("core.default_task_retries", 16),
Map.entry("core.parallelism", 1)
))
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with logging configurations
Note that Airflow task logs are enabled by default with the INFO
log level.
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationDagProcessingLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationSchedulerLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationTaskLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationWebserverLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationWorkerLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.loggingConfiguration(EnvironmentLoggingConfigurationArgs.builder()
.dagProcessingLogs(EnvironmentLoggingConfigurationDagProcessingLogsArgs.builder()
.enabled(true)
.logLevel("DEBUG")
.build())
.schedulerLogs(EnvironmentLoggingConfigurationSchedulerLogsArgs.builder()
.enabled(true)
.logLevel("INFO")
.build())
.taskLogs(EnvironmentLoggingConfigurationTaskLogsArgs.builder()
.enabled(true)
.logLevel("WARNING")
.build())
.webserverLogs(EnvironmentLoggingConfigurationWebserverLogsArgs.builder()
.enabled(true)
.logLevel("ERROR")
.build())
.workerLogs(EnvironmentLoggingConfigurationWorkerLogsArgs.builder()
.enabled(true)
.logLevel("CRITICAL")
.build())
.build())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with tags
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.tags(Map.ofEntries(
Map.entry("Name", "example"),
Map.entry("Environment", "production")
))
.build());
}
}
Import
Using pulumi import
, import MWAA Environment using Name
. For example:
$ pulumi import aws:mwaa/environment:Environment example MyAirflowEnvironment
Constructors
Functions
Properties
The airflow_configuration_options
parameter specifies airflow override options. Check the Official documentation for all possible configuration options.
The relative path to the DAG folder on your Amazon S3 storage bucket. For example, dags. For more information, see Importing DAGs on Amazon MWAA.
Environment class for the cluster. Possible options are mw1.small
, mw1.medium
, mw1.large
. Will be set by default to mw1.small
. Please check the AWS Pricing for more information about the environment classes.
The Amazon Resource Name (ARN) of the task execution role that the Amazon MWAA and its environment can assume. Check the official AWS documentation for the detailed role specification.
The Amazon Resource Name (ARN) of your KMS key that you want to use for encryption. Will be set to the ARN of the managed KMS key aws/airflow
by default. Please check the Official Documentation for more information.
The relative path to the plugins.zip file on your Amazon S3 storage bucket. For example, plugins.zip. If a relative path is provided in the request, then plugins_s3_object_version is required. For more information, see Importing DAGs on Amazon MWAA.
The relative path to the requirements.txt file on your Amazon S3 storage bucket. For example, requirements.txt. If a relative path is provided in the request, then requirements_s3_object_version is required. For more information, see Importing DAGs on Amazon MWAA.
The relative path to the script hosted in your bucket. The script runs as your environment starts before starting the Apache Airflow process. Use this script to install dependencies, modify configuration options, and set environment variables. See Using a startup script. Supported for environment versions 2.x and later.