Environment Args
Creates a MWAA Environment resource.
Example Usage
A MWAA Environment requires an IAM role (aws.iam.Role
), two subnets in the private zone (aws.ec2.Subnet
) and a versioned S3 bucket (aws.s3.BucketV2
).
Basic Usage
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with Airflow configuration options
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.airflowConfigurationOptions(Map.ofEntries(
Map.entry("core.default_task_retries", 16),
Map.entry("core.parallelism", 1)
))
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with logging configurations
Note that Airflow task logs are enabled by default with the INFO
log level.
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationDagProcessingLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationSchedulerLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationTaskLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationWebserverLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentLoggingConfigurationWorkerLogsArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.loggingConfiguration(EnvironmentLoggingConfigurationArgs.builder()
.dagProcessingLogs(EnvironmentLoggingConfigurationDagProcessingLogsArgs.builder()
.enabled(true)
.logLevel("DEBUG")
.build())
.schedulerLogs(EnvironmentLoggingConfigurationSchedulerLogsArgs.builder()
.enabled(true)
.logLevel("INFO")
.build())
.taskLogs(EnvironmentLoggingConfigurationTaskLogsArgs.builder()
.enabled(true)
.logLevel("WARNING")
.build())
.webserverLogs(EnvironmentLoggingConfigurationWebserverLogsArgs.builder()
.enabled(true)
.logLevel("ERROR")
.build())
.workerLogs(EnvironmentLoggingConfigurationWorkerLogsArgs.builder()
.enabled(true)
.logLevel("CRITICAL")
.build())
.build())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.build());
}
}
Example with tags
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.mwaa.Environment;
import com.pulumi.aws.mwaa.EnvironmentArgs;
import com.pulumi.aws.mwaa.inputs.EnvironmentNetworkConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Environment("example", EnvironmentArgs.builder()
.dagS3Path("dags/")
.executionRoleArn(aws_iam_role.example().arn())
.networkConfiguration(EnvironmentNetworkConfigurationArgs.builder()
.securityGroupIds(aws_security_group.example().id())
.subnetIds(aws_subnet.private().stream().map(element -> element.id()).collect(toList()))
.build())
.sourceBucketArn(aws_s3_bucket.example().arn())
.tags(Map.ofEntries(
Map.entry("Name", "example"),
Map.entry("Environment", "production")
))
.build());
}
}
Import
MWAA Environment can be imported using Name
e.g.,
$ pulumi import aws:mwaa/environment:Environment example MyAirflowEnvironment
Constructors
Properties
The airflow_configuration_options
parameter specifies airflow override options. Check the Official documentation for all possible configuration options.
Airflow version of your environment, will be set by default to the latest version that MWAA supports.
The relative path to the DAG folder on your Amazon S3 storage bucket. For example, dags. For more information, see Importing DAGs on Amazon MWAA.
Environment class for the cluster. Possible options are mw1.small
, mw1.medium
, mw1.large
. Will be set by default to mw1.small
. Please check the AWS Pricing for more information about the environment classes.
The Amazon Resource Name (ARN) of the task execution role that the Amazon MWAA and its environment can assume. Check the official AWS documentation for the detailed role specification.
The Amazon Resource Name (ARN) of your KMS key that you want to use for encryption. Will be set to the ARN of the managed KMS key aws/airflow
by default. Please check the Official Documentation for more information.
The Apache Airflow logs you want to send to Amazon CloudWatch Logs.
The maximum number of workers that can be automatically scaled up. Value need to be between 1
and 25
. Will be 10
by default.
The minimum number of workers that you want to run in your environment. Will be 1
by default.
Specifies the network configuration for your Apache Airflow Environment. This includes two private subnets as well as security groups for the Airflow environment. Each subnet requires internet connection, otherwise the deployment will fail. See Network configuration below for details.
The plugins.zip file version you want to use.
The relative path to the plugins.zip file on your Amazon S3 storage bucket. For example, plugins.zip. If a relative path is provided in the request, then plugins_s3_object_version is required. For more information, see Importing DAGs on Amazon MWAA.
The requirements.txt file version you want to use.
The relative path to the requirements.txt file on your Amazon S3 storage bucket. For example, requirements.txt. If a relative path is provided in the request, then requirements_s3_object_version is required. For more information, see Importing DAGs on Amazon MWAA.
The number of schedulers that you want to run in your environment. v2.0.2 and above accepts 2
- 5
, default 2
. v1.10.12 accepts 1
.
The Amazon Resource Name (ARN) of your Amazon S3 storage bucket. For example, arn:aws:s3:::airflow-mybucketname.
The version of the startup shell script you want to use. You must specify the version ID that Amazon S3 assigns to the file every time you update the script.
The relative path to the script hosted in your bucket. The script runs as your environment starts before starting the Apache Airflow process. Use this script to install dependencies, modify configuration options, and set environment variables. See Using a startup script. Supported for environment versions 2.x and later.
Specifies whether the webserver should be accessible over the internet or via your specified VPC. Possible options: PRIVATE_ONLY
(default) and PUBLIC_ONLY
.
Specifies the start date for the weekly maintenance window.