Oss Export Args
Log service data delivery management, this service provides the function of delivering data in logstore to oss product storage. Refer to details.
NOTE: Available since v1.187.0.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
import * as random from "@pulumi/random";
const _default = new random.index.Integer("default", {
max: 99999,
min: 10000,
});
const example = new alicloud.log.Project("example", {
projectName: `terraform-example-${_default.result}`,
description: "terraform-example",
tags: {
Created: "TF",
For: "example",
},
});
const exampleStore = new alicloud.log.Store("example", {
projectName: example.projectName,
logstoreName: "example-store",
retentionPeriod: 3650,
shardCount: 3,
autoSplit: true,
maxSplitShardCount: 60,
appendMeta: true,
});
const exampleOssExport = new alicloud.log.OssExport("example", {
projectName: example.projectName,
logstoreName: exampleStore.logstoreName,
exportName: "terraform-example",
displayName: "terraform-example",
bucket: "example-bucket",
prefix: "root",
suffix: "",
bufferInterval: 300,
bufferSize: 250,
compressType: "none",
pathFormat: "%Y/%m/%d/%H/%M",
contentType: "json",
jsonEnableTag: true,
roleArn: "role_arn_for_oss_write",
logReadRoleArn: "role_arn_for_sls_read",
timeZone: "+0800",
});
import pulumi
import pulumi_alicloud as alicloud
import pulumi_random as random
default = random.index.Integer("default",
max=99999,
min=10000)
example = alicloud.log.Project("example",
project_name=f"terraform-example-{default['result']}",
description="terraform-example",
tags={
"Created": "TF",
"For": "example",
})
example_store = alicloud.log.Store("example",
project_name=example.project_name,
logstore_name="example-store",
retention_period=3650,
shard_count=3,
auto_split=True,
max_split_shard_count=60,
append_meta=True)
example_oss_export = alicloud.log.OssExport("example",
project_name=example.project_name,
logstore_name=example_store.logstore_name,
export_name="terraform-example",
display_name="terraform-example",
bucket="example-bucket",
prefix="root",
suffix="",
buffer_interval=300,
buffer_size=250,
compress_type="none",
path_format="%Y/%m/%d/%H/%M",
content_type="json",
json_enable_tag=True,
role_arn="role_arn_for_oss_write",
log_read_role_arn="role_arn_for_sls_read",
time_zone="+0800")
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var @default = new Random.Index.Integer("default", new()
{
Max = 99999,
Min = 10000,
});
var example = new AliCloud.Log.Project("example", new()
{
ProjectName = $"terraform-example-{@default.Result}",
Description = "terraform-example",
Tags =
{
{ "Created", "TF" },
{ "For", "example" },
},
});
var exampleStore = new AliCloud.Log.Store("example", new()
{
ProjectName = example.ProjectName,
LogstoreName = "example-store",
RetentionPeriod = 3650,
ShardCount = 3,
AutoSplit = true,
MaxSplitShardCount = 60,
AppendMeta = true,
});
var exampleOssExport = new AliCloud.Log.OssExport("example", new()
{
ProjectName = example.ProjectName,
LogstoreName = exampleStore.LogstoreName,
ExportName = "terraform-example",
DisplayName = "terraform-example",
Bucket = "example-bucket",
Prefix = "root",
Suffix = "",
BufferInterval = 300,
BufferSize = 250,
CompressType = "none",
PathFormat = "%Y/%m/%d/%H/%M",
ContentType = "json",
JsonEnableTag = true,
RoleArn = "role_arn_for_oss_write",
LogReadRoleArn = "role_arn_for_sls_read",
TimeZone = "+0800",
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_default, err := random.NewInteger(ctx, "default", &random.IntegerArgs{
Max: 99999,
Min: 10000,
})
if err != nil {
return err
}
example, err := log.NewProject(ctx, "example", &log.ProjectArgs{
ProjectName: pulumi.Sprintf("terraform-example-%v", _default.Result),
Description: pulumi.String("terraform-example"),
Tags: pulumi.StringMap{
"Created": pulumi.String("TF"),
"For": pulumi.String("example"),
},
})
if err != nil {
return err
}
exampleStore, err := log.NewStore(ctx, "example", &log.StoreArgs{
ProjectName: example.ProjectName,
LogstoreName: pulumi.String("example-store"),
RetentionPeriod: pulumi.Int(3650),
ShardCount: pulumi.Int(3),
AutoSplit: pulumi.Bool(true),
MaxSplitShardCount: pulumi.Int(60),
AppendMeta: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = log.NewOssExport(ctx, "example", &log.OssExportArgs{
ProjectName: example.ProjectName,
LogstoreName: exampleStore.LogstoreName,
ExportName: pulumi.String("terraform-example"),
DisplayName: pulumi.String("terraform-example"),
Bucket: pulumi.String("example-bucket"),
Prefix: pulumi.String("root"),
Suffix: pulumi.String(""),
BufferInterval: pulumi.Int(300),
BufferSize: pulumi.Int(250),
CompressType: pulumi.String("none"),
PathFormat: pulumi.String("%Y/%m/%d/%H/%M"),
ContentType: pulumi.String("json"),
JsonEnableTag: pulumi.Bool(true),
RoleArn: pulumi.String("role_arn_for_oss_write"),
LogReadRoleArn: pulumi.String("role_arn_for_sls_read"),
TimeZone: pulumi.String("+0800"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.random.integer;
import com.pulumi.random.integerArgs;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.log.OssExport;
import com.pulumi.alicloud.log.OssExportArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var default_ = new Integer("default", IntegerArgs.builder()
.max(99999)
.min(10000)
.build());
var example = new Project("example", ProjectArgs.builder()
.projectName(String.format("terraform-example-%s", default_.result()))
.description("terraform-example")
.tags(Map.ofEntries(
Map.entry("Created", "TF"),
Map.entry("For", "example")
))
.build());
var exampleStore = new Store("exampleStore", StoreArgs.builder()
.projectName(example.projectName())
.logstoreName("example-store")
.retentionPeriod(3650)
.shardCount(3)
.autoSplit(true)
.maxSplitShardCount(60)
.appendMeta(true)
.build());
var exampleOssExport = new OssExport("exampleOssExport", OssExportArgs.builder()
.projectName(example.projectName())
.logstoreName(exampleStore.logstoreName())
.exportName("terraform-example")
.displayName("terraform-example")
.bucket("example-bucket")
.prefix("root")
.suffix("")
.bufferInterval(300)
.bufferSize(250)
.compressType("none")
.pathFormat("%Y/%m/%d/%H/%M")
.contentType("json")
.jsonEnableTag(true)
.roleArn("role_arn_for_oss_write")
.logReadRoleArn("role_arn_for_sls_read")
.timeZone("+0800")
.build());
}
}
resources:
default:
type: random:integer
properties:
max: 99999
min: 10000
example:
type: alicloud:log:Project
properties:
projectName: terraform-example-${default.result}
description: terraform-example
tags:
Created: TF
For: example
exampleStore:
type: alicloud:log:Store
name: example
properties:
projectName: ${example.projectName}
logstoreName: example-store
retentionPeriod: 3650
shardCount: 3
autoSplit: true
maxSplitShardCount: 60
appendMeta: true
exampleOssExport:
type: alicloud:log:OssExport
name: example
properties:
projectName: ${example.projectName}
logstoreName: ${exampleStore.logstoreName}
exportName: terraform-example
displayName: terraform-example
bucket: example-bucket
prefix: root
suffix: ""
bufferInterval: 300
bufferSize: 250
compressType: none
pathFormat: '%Y/%m/%d/%H/%M'
contentType: json
jsonEnableTag: true
roleArn: role_arn_for_oss_write
logReadRoleArn: role_arn_for_sls_read
timeZone: '+0800'
Import
Log oss export can be imported using the id or name, e.g.
$ pulumi import alicloud:log/ossExport:OssExport example tf-log-project:tf-log-logstore:tf-log-export
Constructors
Properties
How often is it delivered every interval.
Automatically control the creation interval of delivery tasks and set the upper limit of an OSS object size (calculated in uncompressed), unit: MB
.
OSS data storage compression method, support: none
, snappy
, zstd
, gzip
. Among them, none means that the original data is not compressed, and snappy means that the data is compressed using the snappy algorithm, which can reduce the storage space usage of the OSS Bucket
.
Configure columns when content_type
is parquet
or orc
.
Storage format, only supports three types: json
, parquet
, orc
, csv
. According to the different format, please select the following parameters
Field configuration in csv content_type.
Separator configuration in csv content_type.
escape in csv content_type.
Indicates whether to write the field name to the CSV file, the default value is false
.
lineFeed in csv content_type.
Invalid field content in csv content_type.
Escape character in csv content_type.
The display name for oss export.
Delivery configuration name, it can only contain lowercase letters, numbers, dashes -
and underscores _
. It must start and end with lowercase letters or numbers, and the name must be 2 to 128 characters long.
Whether to deliver the label when content_type
= json
.
Used for logstore reading, the role should have log read policy, such as acs:ram::13234:role/logrole
, if log_read_role_arn
is not set, role_arn
is used to read logstore.
The name of the log logstore.
The OSS Bucket directory is dynamically generated according to the creation time of the export task, it cannot start with a forward slash /
, the default value is %Y/%m/%d/%H/%M
.
The name of the log project. It is the only in one Alicloud account.