OutputArgs

data class OutputArgs(val datasource: Output<Any>? = null, val jobName: Output<String>? = null, val name: Output<String>? = null, val outputName: Output<String>? = null, val resourceGroupName: Output<String>? = null, val serialization: Output<Any>? = null, val sizeWindow: Output<Int>? = null, val timeWindow: Output<String>? = null) : ConvertibleToJava<OutputArgs>

An output object, containing all information associated with the named output. All outputs are contained under a streaming job. Azure REST API version: 2020-03-01. Prior API version in Azure Native 1.x: 2016-03-01. Other available API versions: 2021-10-01-preview.

Example Usage

Create a DocumentDB output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.DocumentDbOutputDataSourceArgs
{
AccountId = "someAccountId",
AccountKey = "accountKey==",
CollectionNamePattern = "collection",
Database = "db01",
DocumentId = "documentId",
PartitionKey = "key",
Type = "Microsoft.Storage/DocumentDB",
},
JobName = "sj2331",
OutputName = "output3022",
ResourceGroupName = "sjrg7983",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.DocumentDbOutputDataSource{
AccountId: "someAccountId",
AccountKey: "accountKey==",
CollectionNamePattern: "collection",
Database: "db01",
DocumentId: "documentId",
PartitionKey: "key",
Type: "Microsoft.Storage/DocumentDB",
},
JobName: pulumi.String("sj2331"),
OutputName: pulumi.String("output3022"),
ResourceGroupName: pulumi.String("sjrg7983"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.accountId("someAccountId")
.accountKey("accountKey==")
.collectionNamePattern("collection")
.database("db01")
.documentId("documentId")
.partitionKey("key")
.type("Microsoft.Storage/DocumentDB")
.build())
.jobName("sj2331")
.outputName("output3022")
.resourceGroupName("sjrg7983")
.build());
}
}

Create a Gateway Message Bus output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.GatewayMessageBusOutputDataSourceArgs
{
Topic = "EdgeTopic1",
Type = "GatewayMessageBus",
},
JobName = "sj2331",
OutputName = "output3022",
ResourceGroupName = "sjrg7983",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.GatewayMessageBusOutputDataSource{
Topic: "EdgeTopic1",
Type: "GatewayMessageBus",
},
JobName: pulumi.String("sj2331"),
OutputName: pulumi.String("output3022"),
ResourceGroupName: pulumi.String("sjrg7983"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(GatewayMessageBusOutputDataSourceArgs.builder()
.topic("EdgeTopic1")
.type("GatewayMessageBus")
.build())
.jobName("sj2331")
.outputName("output3022")
.resourceGroupName("sjrg7983")
.build());
}
}

Create a Power BI output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.PowerBIOutputDataSourceArgs
{
Dataset = "someDataset",
GroupId = "ac40305e-3e8d-43ac-8161-c33799f43e95",
GroupName = "MyPowerBIGroup",
RefreshToken = "someRefreshToken==",
Table = "someTable",
TokenUserDisplayName = "Bob Smith",
TokenUserPrincipalName = "bobsmith@contoso.com",
Type = "PowerBI",
},
JobName = "sj2331",
OutputName = "output3022",
ResourceGroupName = "sjrg7983",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.PowerBIOutputDataSource{
Dataset: "someDataset",
GroupId: "ac40305e-3e8d-43ac-8161-c33799f43e95",
GroupName: "MyPowerBIGroup",
RefreshToken: "someRefreshToken==",
Table: "someTable",
TokenUserDisplayName: "Bob Smith",
TokenUserPrincipalName: "bobsmith@contoso.com",
Type: "PowerBI",
},
JobName: pulumi.String("sj2331"),
OutputName: pulumi.String("output3022"),
ResourceGroupName: pulumi.String("sjrg7983"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(PowerBIOutputDataSourceArgs.builder()
.dataset("someDataset")
.groupId("ac40305e-3e8d-43ac-8161-c33799f43e95")
.groupName("MyPowerBIGroup")
.refreshToken("someRefreshToken==")
.table("someTable")
.tokenUserDisplayName("Bob Smith")
.tokenUserPrincipalName("bobsmith@contoso.com")
.type("PowerBI")
.build())
.jobName("sj2331")
.outputName("output3022")
.resourceGroupName("sjrg7983")
.build());
}
}

Create a Service Bus Queue output with Avro serialization

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.ServiceBusQueueOutputDataSourceArgs
{
PropertyColumns = new[]
{
"column1",
"column2",
},
QueueName = "sdkqueue",
ServiceBusNamespace = "sdktest",
SharedAccessPolicyKey = "sharedAccessPolicyKey=",
SharedAccessPolicyName = "RootManageSharedAccessKey",
SystemPropertyColumns =
{
{ "MessageId", "col3" },
{ "PartitionKey", "col4" },
},
Type = "Microsoft.ServiceBus/Queue",
},
JobName = "sj5095",
OutputName = "output3456",
ResourceGroupName = "sjrg3410",
Serialization = new AzureNative.StreamAnalytics.Inputs.AvroSerializationArgs
{
Type = "Avro",
},
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.ServiceBusQueueOutputDataSource{
PropertyColumns: []string{
"column1",
"column2",
},
QueueName: "sdkqueue",
ServiceBusNamespace: "sdktest",
SharedAccessPolicyKey: "sharedAccessPolicyKey=",
SharedAccessPolicyName: "RootManageSharedAccessKey",
SystemPropertyColumns: map[string]interface{}{
"MessageId": "col3",
"PartitionKey": "col4",
},
Type: "Microsoft.ServiceBus/Queue",
},
JobName: pulumi.String("sj5095"),
OutputName: pulumi.String("output3456"),
ResourceGroupName: pulumi.String("sjrg3410"),
Serialization: streamanalytics.AvroSerialization{
Type: "Avro",
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.propertyColumns(
"column1",
"column2")
.queueName("sdkqueue")
.serviceBusNamespace("sdktest")
.sharedAccessPolicyKey("sharedAccessPolicyKey=")
.sharedAccessPolicyName("RootManageSharedAccessKey")
.systemPropertyColumns(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.type("Microsoft.ServiceBus/Queue")
.build())
.jobName("sj5095")
.outputName("output3456")
.resourceGroupName("sjrg3410")
.serialization(AvroSerializationArgs.builder()
.type("Avro")
.build())
.build());
}
}

Create a Service Bus Topic output with CSV serialization

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.ServiceBusTopicOutputDataSourceArgs
{
PropertyColumns = new[]
{
"column1",
"column2",
},
ServiceBusNamespace = "sdktest",
SharedAccessPolicyKey = "sharedAccessPolicyKey=",
SharedAccessPolicyName = "RootManageSharedAccessKey",
TopicName = "sdktopic",
Type = "Microsoft.ServiceBus/Topic",
},
JobName = "sj7094",
OutputName = "output7886",
ResourceGroupName = "sjrg6450",
Serialization = new AzureNative.StreamAnalytics.Inputs.CsvSerializationArgs
{
Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
FieldDelimiter = ",",
Type = "Csv",
},
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.ServiceBusTopicOutputDataSource{
PropertyColumns: []string{
"column1",
"column2",
},
ServiceBusNamespace: "sdktest",
SharedAccessPolicyKey: "sharedAccessPolicyKey=",
SharedAccessPolicyName: "RootManageSharedAccessKey",
TopicName: "sdktopic",
Type: "Microsoft.ServiceBus/Topic",
},
JobName: pulumi.String("sj7094"),
OutputName: pulumi.String("output7886"),
ResourceGroupName: pulumi.String("sjrg6450"),
Serialization: streamanalytics.CsvSerialization{
Encoding: streamanalytics.EncodingUTF8,
FieldDelimiter: ",",
Type: "Csv",
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.propertyColumns(
"column1",
"column2")
.serviceBusNamespace("sdktest")
.sharedAccessPolicyKey("sharedAccessPolicyKey=")
.sharedAccessPolicyName("RootManageSharedAccessKey")
.topicName("sdktopic")
.type("Microsoft.ServiceBus/Topic")
.build())
.jobName("sj7094")
.outputName("output7886")
.resourceGroupName("sjrg6450")
.serialization(CsvSerializationArgs.builder()
.encoding("UTF8")
.fieldDelimiter(",")
.type("Csv")
.build())
.build());
}
}

Create a blob output with CSV serialization

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.BlobOutputDataSourceArgs
{
Container = "state",
DateFormat = "yyyy/MM/dd",
PathPattern = "{date}/{time}",
StorageAccounts = new[]
{
new AzureNative.StreamAnalytics.Inputs.StorageAccountArgs
{
AccountKey = "accountKey==",
AccountName = "someAccountName",
},
},
TimeFormat = "HH",
Type = "Microsoft.Storage/Blob",
},
JobName = "sj900",
OutputName = "output1623",
ResourceGroupName = "sjrg5023",
Serialization = new AzureNative.StreamAnalytics.Inputs.CsvSerializationArgs
{
Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
FieldDelimiter = ",",
Type = "Csv",
},
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.BlobOutputDataSource{
Container: "state",
DateFormat: "yyyy/MM/dd",
PathPattern: "{date}/{time}",
StorageAccounts: []streamanalytics.StorageAccount{
{
AccountKey: "accountKey==",
AccountName: "someAccountName",
},
},
TimeFormat: "HH",
Type: "Microsoft.Storage/Blob",
},
JobName: pulumi.String("sj900"),
OutputName: pulumi.String("output1623"),
ResourceGroupName: pulumi.String("sjrg5023"),
Serialization: streamanalytics.CsvSerialization{
Encoding: streamanalytics.EncodingUTF8,
FieldDelimiter: ",",
Type: "Csv",
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.container("state")
.dateFormat("yyyy/MM/dd")
.pathPattern("{date}/{time}")
.storageAccounts(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.timeFormat("HH")
.type("Microsoft.Storage/Blob")
.build())
.jobName("sj900")
.outputName("output1623")
.resourceGroupName("sjrg5023")
.serialization(CsvSerializationArgs.builder()
.encoding("UTF8")
.fieldDelimiter(",")
.type("Csv")
.build())
.build());
}
}

Create an Azure Data Lake Store output with JSON serialization

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureDataLakeStoreOutputDataSourceArgs
{
AccountName = "someaccount",
DateFormat = "yyyy/MM/dd",
FilePathPrefix = "{date}/{time}",
RefreshToken = "someRefreshToken==",
TenantId = "cea4e98b-c798-49e7-8c40-4a2b3beb47dd",
TimeFormat = "HH",
TokenUserDisplayName = "Bob Smith",
TokenUserPrincipalName = "bobsmith@contoso.com",
Type = "Microsoft.DataLake/Accounts",
},
JobName = "sj3310",
OutputName = "output5195",
ResourceGroupName = "sjrg6912",
Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
{
Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
Format = AzureNative.StreamAnalytics.JsonOutputSerializationFormat.Array,
Type = "Json",
},
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.AzureDataLakeStoreOutputDataSource{
AccountName: "someaccount",
DateFormat: "yyyy/MM/dd",
FilePathPrefix: "{date}/{time}",
RefreshToken: "someRefreshToken==",
TenantId: "cea4e98b-c798-49e7-8c40-4a2b3beb47dd",
TimeFormat: "HH",
TokenUserDisplayName: "Bob Smith",
TokenUserPrincipalName: "bobsmith@contoso.com",
Type: "Microsoft.DataLake/Accounts",
},
JobName: pulumi.String("sj3310"),
OutputName: pulumi.String("output5195"),
ResourceGroupName: pulumi.String("sjrg6912"),
Serialization: streamanalytics.JsonSerialization{
Encoding: streamanalytics.EncodingUTF8,
Format: streamanalytics.JsonOutputSerializationFormatArray,
Type: "Json",
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.accountName("someaccount")
.dateFormat("yyyy/MM/dd")
.filePathPrefix("{date}/{time}")
.refreshToken("someRefreshToken==")
.tenantId("cea4e98b-c798-49e7-8c40-4a2b3beb47dd")
.timeFormat("HH")
.tokenUserDisplayName("Bob Smith")
.tokenUserPrincipalName("bobsmith@contoso.com")
.type("Microsoft.DataLake/Accounts")
.build())
.jobName("sj3310")
.outputName("output5195")
.resourceGroupName("sjrg6912")
.serialization(JsonSerializationArgs.builder()
.encoding("UTF8")
.format("Array")
.type("Json")
.build())
.build());
}
}

Create an Azure Data Warehouse output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSynapseOutputDataSourceArgs
{
Database = "zhayaSQLpool",
Password = "password123",
Server = "asatestserver",
Table = "test2",
Type = "Microsoft.Sql/Server/DataWarehouse",
User = "tolladmin",
},
JobName = "sjName",
OutputName = "dwOutput",
ResourceGroupName = "sjrg",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.AzureSynapseOutputDataSource{
Database: "zhayaSQLpool",
Password: "password123",
Server: "asatestserver",
Table: "test2",
Type: "Microsoft.Sql/Server/DataWarehouse",
User: "tolladmin",
},
JobName: pulumi.String("sjName"),
OutputName: pulumi.String("dwOutput"),
ResourceGroupName: pulumi.String("sjrg"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.database("zhayaSQLpool")
.password("password123")
.server("asatestserver")
.table("test2")
.type("Microsoft.Sql/Server/DataWarehouse")
.user("tolladmin")
.build())
.jobName("sjName")
.outputName("dwOutput")
.resourceGroupName("sjrg")
.build());
}
}

Create an Azure Function output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureFunctionOutputDataSourceArgs
{
FunctionAppName = "functionappforasaautomation",
FunctionName = "HttpTrigger2",
MaxBatchCount = 100,
MaxBatchSize = 256,
Type = "Microsoft.AzureFunction",
},
JobName = "sjName",
OutputName = "azureFunction1",
ResourceGroupName = "sjrg",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.AzureFunctionOutputDataSource{
FunctionAppName: "functionappforasaautomation",
FunctionName: "HttpTrigger2",
MaxBatchCount: 100,
MaxBatchSize: 256,
Type: "Microsoft.AzureFunction",
},
JobName: pulumi.String("sjName"),
OutputName: pulumi.String("azureFunction1"),
ResourceGroupName: pulumi.String("sjrg"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.functionAppName("functionappforasaautomation")
.functionName("HttpTrigger2")
.maxBatchCount(100)
.maxBatchSize(256)
.type("Microsoft.AzureFunction")
.build())
.jobName("sjName")
.outputName("azureFunction1")
.resourceGroupName("sjrg")
.build());
}
}

Create an Azure SQL database output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSqlDatabaseOutputDataSourceArgs
{
Database = "someDatabase",
Password = "somePassword",
Server = "someServer",
Table = "someTable",
Type = "Microsoft.Sql/Server/Database",
User = "<user>",
},
JobName = "sj6458",
OutputName = "output1755",
ResourceGroupName = "sjrg2157",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.AzureSqlDatabaseOutputDataSource{
Database: "someDatabase",
Password: "somePassword",
Server: "someServer",
Table: "someTable",
Type: "Microsoft.Sql/Server/Database",
User: "<user>",
},
JobName: pulumi.String("sj6458"),
OutputName: pulumi.String("output1755"),
ResourceGroupName: pulumi.String("sjrg2157"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.database("someDatabase")
.password("somePassword")
.server("someServer")
.table("someTable")
.type("Microsoft.Sql/Server/Database")
.user("<user>")
.build())
.jobName("sj6458")
.outputName("output1755")
.resourceGroupName("sjrg2157")
.build());
}
}

Create an Azure Table output

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureTableOutputDataSourceArgs
{
AccountKey = "accountKey==",
AccountName = "someAccountName",
BatchSize = 25,
ColumnsToRemove = new[]
{
"column1",
"column2",
},
PartitionKey = "partitionKey",
RowKey = "rowKey",
Table = "samples",
Type = "Microsoft.Storage/Table",
},
JobName = "sj2790",
OutputName = "output958",
ResourceGroupName = "sjrg5176",
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.AzureTableOutputDataSource{
AccountKey: "accountKey==",
AccountName: "someAccountName",
BatchSize: 25,
ColumnsToRemove: []string{
"column1",
"column2",
},
PartitionKey: "partitionKey",
RowKey: "rowKey",
Table: "samples",
Type: "Microsoft.Storage/Table",
},
JobName: pulumi.String("sj2790"),
OutputName: pulumi.String("output958"),
ResourceGroupName: pulumi.String("sjrg5176"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.accountKey("accountKey==")
.accountName("someAccountName")
.batchSize(25)
.columnsToRemove(
"column1",
"column2")
.partitionKey("partitionKey")
.rowKey("rowKey")
.table("samples")
.type("Microsoft.Storage/Table")
.build())
.jobName("sj2790")
.outputName("output958")
.resourceGroupName("sjrg5176")
.build());
}
}

Create an Event Hub output with JSON serialization

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var output = new AzureNative.StreamAnalytics.Output("output", new()
{
Datasource = new AzureNative.StreamAnalytics.Inputs.EventHubOutputDataSourceArgs
{
EventHubName = "sdkeventhub",
PartitionKey = "partitionKey",
ServiceBusNamespace = "sdktest",
SharedAccessPolicyKey = "sharedAccessPolicyKey=",
SharedAccessPolicyName = "RootManageSharedAccessKey",
Type = "Microsoft.ServiceBus/EventHub",
},
JobName = "sj3310",
OutputName = "output5195",
ResourceGroupName = "sjrg6912",
Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
{
Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
Format = AzureNative.StreamAnalytics.JsonOutputSerializationFormat.Array,
Type = "Json",
},
});
});
package main
import (
"github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
Datasource: streamanalytics.EventHubOutputDataSource{
EventHubName: "sdkeventhub",
PartitionKey: "partitionKey",
ServiceBusNamespace: "sdktest",
SharedAccessPolicyKey: "sharedAccessPolicyKey=",
SharedAccessPolicyName: "RootManageSharedAccessKey",
Type: "Microsoft.ServiceBus/EventHub",
},
JobName: pulumi.String("sj3310"),
OutputName: pulumi.String("output5195"),
ResourceGroupName: pulumi.String("sjrg6912"),
Serialization: streamanalytics.JsonSerialization{
Encoding: streamanalytics.EncodingUTF8,
Format: streamanalytics.JsonOutputSerializationFormatArray,
Type: "Json",
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var output = new Output("output", OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.eventHubName("sdkeventhub")
.partitionKey("partitionKey")
.serviceBusNamespace("sdktest")
.sharedAccessPolicyKey("sharedAccessPolicyKey=")
.sharedAccessPolicyName("RootManageSharedAccessKey")
.type("Microsoft.ServiceBus/EventHub")
.build())
.jobName("sj3310")
.outputName("output5195")
.resourceGroupName("sjrg6912")
.serialization(JsonSerializationArgs.builder()
.encoding("UTF8")
.format("Array")
.type("Json")
.build())
.build());
}
}

Import

An existing resource can be imported using its type token, name, and identifier, e.g.

$ pulumi import azure-native:streamanalytics:Output output5195 /subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}

Constructors

Link copied to clipboard
constructor(datasource: Output<Any>? = null, jobName: Output<String>? = null, name: Output<String>? = null, outputName: Output<String>? = null, resourceGroupName: Output<String>? = null, serialization: Output<Any>? = null, sizeWindow: Output<Int>? = null, timeWindow: Output<String>? = null)

Properties

Link copied to clipboard
val datasource: Output<Any>? = null

Describes the data source that output will be written to. Required on PUT (CreateOrReplace) requests.

Link copied to clipboard
val jobName: Output<String>? = null

The name of the streaming job.

Link copied to clipboard
val name: Output<String>? = null

Resource name

Link copied to clipboard
val outputName: Output<String>? = null

The name of the output.

Link copied to clipboard
val resourceGroupName: Output<String>? = null

The name of the resource group. The name is case insensitive.

Link copied to clipboard
val serialization: Output<Any>? = null

Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests.

Link copied to clipboard
val sizeWindow: Output<Int>? = null

The size window to constrain a Stream Analytics output to.

Link copied to clipboard
val timeWindow: Output<String>? = null

The time frame for filtering Stream Analytics job outputs.

Functions

Link copied to clipboard
open override fun toJava(): OutputArgs