PreventionJobTrigger

class PreventionJobTrigger : KotlinCustomResource

A job trigger configuration. To get more information about JobTrigger, see:

Example Usage

Dlp Job Trigger Basic

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()
.description("Description")
.displayName("Displayname")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger Bigquery Row Limit

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bigqueryRowLimit = new PreventionJobTrigger("bigqueryRowLimit", PreventionJobTriggerArgs.builder()
.description("Description")
.displayName("Displayname")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
.rowsLimit(1000)
.sampleMethod("RANDOM_START")
.tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
.datasetId("dataset")
.projectId("project")
.tableId("table_to_scan")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger Bigquery Row Limit Percentage

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bigqueryRowLimitPercentage = new PreventionJobTrigger("bigqueryRowLimitPercentage", PreventionJobTriggerArgs.builder()
.description("Description")
.displayName("Displayname")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
.rowsLimitPercent(50)
.sampleMethod("RANDOM_START")
.tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
.datasetId("dataset")
.projectId("project")
.tableId("table_to_scan")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger Job Notification Emails

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var jobNotificationEmails = new PreventionJobTrigger("jobNotificationEmails", PreventionJobTriggerArgs.builder()
.description("Description for the job_trigger created by terraform")
.displayName("TerraformDisplayName")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.jobNotificationEmails()
.build())
.inspectTemplateName("sample-inspect-template")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger Deidentify

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Table;
import com.pulumi.gcp.bigquery.TableArgs;
import com.pulumi.gcp.bigquery.inputs.TableTimePartitioningArgs;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var defaultDataset = new Dataset("defaultDataset", DatasetArgs.builder()
.datasetId("tf_test")
.friendlyName("terraform-test")
.description("Description for the dataset created by terraform")
.location("US")
.defaultTableExpirationMs(3600000)
.labels(Map.of("env", "default"))
.build());
var defaultTable = new Table("defaultTable", TableArgs.builder()
.datasetId(defaultDataset.datasetId())
.tableId("tf_test")
.deletionProtection(false)
.timePartitioning(TableTimePartitioningArgs.builder()
.type("DAY")
.build())
.labels(Map.of("env", "default"))
.schema("""
[
{
"name": "quantity",
"type": "NUMERIC",
"mode": "NULLABLE",
"description": "The quantity"
},
{
"name": "name",
"type": "STRING",
"mode": "NULLABLE",
"description": "Name of the object"
}
]
""")
.build());
var deidentify = new PreventionJobTrigger("deidentify", PreventionJobTriggerArgs.builder()
.parent("projects/my-project-name")
.description("Description for the job_trigger created by terraform")
.displayName("TerraformDisplayName")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.inspectTemplateName("sample-inspect-template")
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.deidentify(PreventionJobTriggerInspectJobActionDeidentifyArgs.builder()
.cloudStorageOutput("gs://samplebucket/dir/")
.fileTypesToTransforms(
"CSV",
"TSV")
.transformationDetailsStorageConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs.builder()
.projectId("my-project-name")
.datasetId(defaultDataset.datasetId())
.tableId(defaultTable.tableId())
.build())
.build())
.transformationConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs.builder()
.deidentifyTemplate("sample-deidentify-template")
.imageRedactTemplate("sample-image-redact-template")
.structuredDeidentifyTemplate("sample-structured-deidentify-template")
.build())
.build())
.build())
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.build());
}
}

Dlp Job Trigger Hybrid

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerManualArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var hybridTrigger = new PreventionJobTrigger("hybridTrigger", PreventionJobTriggerArgs.builder()
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.hybridOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs.builder()
.description("Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings")
.labels(Map.of("env", "prod"))
.requiredFindingLabelKeys("appointment-bookings-comments")
.tableOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs.builder()
.identifyingFields(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs.builder()
.name("booking_id")
.build())
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.manual()
.build())
.build());
}
}

Dlp Job Trigger Inspect

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigLimitsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var inspect = new PreventionJobTrigger("inspect", PreventionJobTriggerArgs.builder()
.description("Description")
.displayName("Displayname")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectConfig(PreventionJobTriggerInspectJobInspectConfigArgs.builder()
.customInfoTypes(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs.builder()
.infoType(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs.builder()
.name("MY_CUSTOM_TYPE")
.build())
.likelihood("UNLIKELY")
.regex(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs.builder()
.pattern("test*")
.build())
.build())
.infoTypes(PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs.builder()
.name("EMAIL_ADDRESS")
.build())
.limits(PreventionJobTriggerInspectJobInspectConfigLimitsArgs.builder()
.maxFindingsPerItem(10)
.maxFindingsPerRequest(50)
.build())
.minLikelihood("UNLIKELY")
.ruleSet(
%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger Publish To Stackdriver

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var publishToStackdriver = new PreventionJobTrigger("publishToStackdriver", PreventionJobTriggerArgs.builder()
.description("Description for the job_trigger created by terraform")
.displayName("TerraformDisplayName")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.publishToStackdriver()
.build())
.inspectTemplateName("sample-inspect-template")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Dlp Job Trigger With Id

package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataloss.PreventionJobTrigger;
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var withTriggerId = new PreventionJobTrigger("withTriggerId", PreventionJobTriggerArgs.builder()
.description("Starting description")
.displayName("display")
.inspectJob(PreventionJobTriggerInspectJobArgs.builder()
.actions(PreventionJobTriggerInspectJobActionArgs.builder()
.saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
.outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
.table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
.datasetId("dataset123")
.projectId("project")
.build())
.build())
.build())
.build())
.inspectTemplateName("fake")
.storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
.cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
.fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
.url("gs://mybucket/directory/")
.build())
.build())
.build())
.build())
.parent("projects/my-project-name")
.triggerId("id-")
.triggers(PreventionJobTriggerTriggerArgs.builder()
.schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
.recurrencePeriodDuration("86400s")
.build())
.build())
.build());
}
}

Import

JobTrigger can be imported using any of these accepted formats

$ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/jobTriggers/{{name}}
$ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/{{name}}

Properties

Link copied to clipboard
val createTime: Output<String>

(Output) The creation timestamp of an inspectTemplate. Set by the server.

Link copied to clipboard
val description: Output<String>?

A description of the job trigger. (Optional) A short description of where the data is coming from. Will be stored once in the job. 256 max length.

Link copied to clipboard
val displayName: Output<String>?

User set display name of the job trigger.

Link copied to clipboard
val id: Output<String>
Link copied to clipboard

Controls what and how to inspect for findings. Structure is documented below.

Link copied to clipboard
val lastRunTime: Output<String>

The timestamp of the last time this trigger executed.

Link copied to clipboard
val name: Output<String>

Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type. (Required) Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type. (Required) Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type. (Required) Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type. (Required) Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type. (Required) Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342. (Required) Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid. (Required) The name of the Datastore kind. (Required) Name of a BigQuery field to be returned with the findings. (Required) Name describing the field to which scanning is limited. (Required) Name describing the field excluded from scanning. (Required) Name describing the field.

Link copied to clipboard
val parent: Output<String>

The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}

Link copied to clipboard
val pulumiChildResources: Set<KotlinResource>
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
val status: Output<String>?

Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.

Link copied to clipboard
val triggerId: Output<String>

The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: a-zA-Z\d-_+. The maximum length is 100 characters. Can be empty to allow the system to generate one.

Link copied to clipboard

What event needs to occur for a new job to be started. Structure is documented below.

Link copied to clipboard
val updateTime: Output<String>

The last update timestamp of an inspectTemplate. Set by the server.

Link copied to clipboard
val urn: Output<String>