BatchOperationsJobArgs

data class BatchOperationsJobArgs(val bucketList: Output<BatchOperationsJobBucketListArgs>? = null, val deleteObject: Output<BatchOperationsJobDeleteObjectArgs>? = null, val deleteProtection: Output<Boolean>? = null, val jobId: Output<String>? = null, val project: Output<String>? = null, val putMetadata: Output<BatchOperationsJobPutMetadataArgs>? = null, val putObjectHold: Output<BatchOperationsJobPutObjectHoldArgs>? = null, val rewriteObject: Output<BatchOperationsJobRewriteObjectArgs>? = null) : ConvertibleToJava<BatchOperationsJobArgs>

Storage Batch Operations (SBO) is a Cloud Storage management feature that offers a seamless experience to perform single batch operations on millions of GCS objects in a serverless manner.

Example Usage

Storage Batch Operations

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const bucket = new gcp.storage.Bucket("bucket", {
name: "tf-sample-bucket",
location: "us-central1",
forceDestroy: true,
});
const tf_job = new gcp.storage.BatchOperationsJob("tf-job", {
jobId: "tf-job",
bucketList: {
buckets: {
bucket: bucket.name,
prefixList: {
includedObjectPrefixes: ["bkt"],
},
},
},
putMetadata: {
customMetadata: {
key: "value",
},
},
deleteProtection: false,
});
import pulumi
import pulumi_gcp as gcp
bucket = gcp.storage.Bucket("bucket",
name="tf-sample-bucket",
location="us-central1",
force_destroy=True)
tf_job = gcp.storage.BatchOperationsJob("tf-job",
job_id="tf-job",
bucket_list={
"buckets": {
"bucket": bucket.name,
"prefix_list": {
"included_object_prefixes": ["bkt"],
},
},
},
put_metadata={
"custom_metadata": {
"key": "value",
},
},
delete_protection=False)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = "tf-sample-bucket",
Location = "us-central1",
ForceDestroy = true,
});
var tf_job = new Gcp.Storage.BatchOperationsJob("tf-job", new()
{
JobId = "tf-job",
BucketList = new Gcp.Storage.Inputs.BatchOperationsJobBucketListArgs
{
Buckets = new Gcp.Storage.Inputs.BatchOperationsJobBucketListBucketsArgs
{
Bucket = bucket.Name,
PrefixList = new Gcp.Storage.Inputs.BatchOperationsJobBucketListBucketsPrefixListArgs
{
IncludedObjectPrefixes = new[]
{
"bkt",
},
},
},
},
PutMetadata = new Gcp.Storage.Inputs.BatchOperationsJobPutMetadataArgs
{
CustomMetadata =
{
{ "key", "value" },
},
},
DeleteProtection = false,
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.String("tf-sample-bucket"),
Location: pulumi.String("us-central1"),
ForceDestroy: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = storage.NewBatchOperationsJob(ctx, "tf-job", &storage.BatchOperationsJobArgs{
JobId: pulumi.String("tf-job"),
BucketList: &storage.BatchOperationsJobBucketListArgs{
Buckets: &storage.BatchOperationsJobBucketListBucketsArgs{
Bucket: bucket.Name,
PrefixList: &storage.BatchOperationsJobBucketListBucketsPrefixListArgs{
IncludedObjectPrefixes: pulumi.StringArray{
pulumi.String("bkt"),
},
},
},
},
PutMetadata: &storage.BatchOperationsJobPutMetadataArgs{
CustomMetadata: pulumi.StringMap{
"key": pulumi.String("value"),
},
},
DeleteProtection: pulumi.Bool(false),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BatchOperationsJob;
import com.pulumi.gcp.storage.BatchOperationsJobArgs;
import com.pulumi.gcp.storage.inputs.BatchOperationsJobBucketListArgs;
import com.pulumi.gcp.storage.inputs.BatchOperationsJobBucketListBucketsArgs;
import com.pulumi.gcp.storage.inputs.BatchOperationsJobBucketListBucketsPrefixListArgs;
import com.pulumi.gcp.storage.inputs.BatchOperationsJobPutMetadataArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bucket = new Bucket("bucket", BucketArgs.builder()
.name("tf-sample-bucket")
.location("us-central1")
.forceDestroy(true)
.build());
var tf_job = new BatchOperationsJob("tf-job", BatchOperationsJobArgs.builder()
.jobId("tf-job")
.bucketList(BatchOperationsJobBucketListArgs.builder()
.buckets(BatchOperationsJobBucketListBucketsArgs.builder()
.bucket(bucket.name())
.prefixList(BatchOperationsJobBucketListBucketsPrefixListArgs.builder()
.includedObjectPrefixes("bkt")
.build())
.build())
.build())
.putMetadata(BatchOperationsJobPutMetadataArgs.builder()
.customMetadata(Map.of("key", "value"))
.build())
.deleteProtection(false)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: tf-sample-bucket
location: us-central1
forceDestroy: true
tf-job:
type: gcp:storage:BatchOperationsJob
properties:
jobId: tf-job
bucketList:
buckets:
bucket: ${bucket.name}
prefixList:
includedObjectPrefixes:
- bkt
putMetadata:
customMetadata:
key: value
deleteProtection: false

Import

Job can be imported using any of these accepted formats:

  • projects/{{project}}/locations/global/jobs/{{job_id}}

  • {{project}}/{{job_id}}

  • {{job_id}} When using the pulumi import command, Job can be imported using one of the formats above. For example:

$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default projects/{{project}}/locations/global/jobs/{{job_id}}
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{project}}/{{job_id}}
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{job_id}}

Constructors

Link copied to clipboard
constructor(bucketList: Output<BatchOperationsJobBucketListArgs>? = null, deleteObject: Output<BatchOperationsJobDeleteObjectArgs>? = null, deleteProtection: Output<Boolean>? = null, jobId: Output<String>? = null, project: Output<String>? = null, putMetadata: Output<BatchOperationsJobPutMetadataArgs>? = null, putObjectHold: Output<BatchOperationsJobPutObjectHoldArgs>? = null, rewriteObject: Output<BatchOperationsJobRewriteObjectArgs>? = null)

Properties

Link copied to clipboard

List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned Structure is documented below.

Link copied to clipboard

allows batch operations to delete objects in bucket Structure is documented below.

Link copied to clipboard
val deleteProtection: Output<Boolean>? = null

If set to true, the storage batch operation job will not be deleted and new job will be created.

Link copied to clipboard
val jobId: Output<String>? = null

The ID of the job.

Link copied to clipboard
val project: Output<String>? = null

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

Link copied to clipboard

allows batch operations to update metadata for objects in bucket Structure is documented below.

Link copied to clipboard

allows to update temporary hold or eventBased hold for objects in bucket. Structure is documented below.

Link copied to clipboard

allows to update encryption key for objects in bucket. Structure is documented below.

Functions

Link copied to clipboard
open override fun toJava(): BatchOperationsJobArgs