Streaming Job Args
A streaming job object, containing all information associated with the named streaming job. Uses Azure REST API version 2020-03-01. In version 2.x of the Azure Native provider, it used API version 2020-03-01. Other available API versions: 2021-10-01-preview. These can be accessed by generating a local SDK package using the CLI command pulumi package add azure-native streamanalytics [ApiVersion]
. See the ../../../version-guide/#accessing-any-api-version-via-local-packages for details.
Example Usage
Create a complete streaming job (a streaming job with a transformation, at least 1 input and at least 1 output)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var streamingJob = new AzureNative.StreamAnalytics.StreamingJob("streamingJob", new()
{
CompatibilityLevel = AzureNative.StreamAnalytics.CompatibilityLevel.CompatibilityLevel_1_0,
DataLocale = "en-US",
EventsLateArrivalMaxDelayInSeconds = 5,
EventsOutOfOrderMaxDelayInSeconds = 0,
EventsOutOfOrderPolicy = AzureNative.StreamAnalytics.EventsOutOfOrderPolicy.Drop,
Functions = new[] {},
Inputs = new[]
{
new AzureNative.StreamAnalytics.Inputs.InputArgs
{
Name = "inputtest",
Properties = new AzureNative.StreamAnalytics.Inputs.StreamInputPropertiesArgs
{
Datasource = new AzureNative.StreamAnalytics.Inputs.BlobStreamInputDataSourceArgs
{
Container = "containerName",
PathPattern = "",
StorageAccounts = new[]
{
new AzureNative.StreamAnalytics.Inputs.StorageAccountArgs
{
AccountKey = "yourAccountKey==",
AccountName = "yourAccountName",
},
},
Type = "Microsoft.Storage/Blob",
},
Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
{
Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
Type = "Json",
},
Type = "Stream",
},
},
},
JobName = "sj7804",
Location = "West US",
OutputErrorPolicy = AzureNative.StreamAnalytics.OutputErrorPolicy.Drop,
Outputs = new[]
{
new AzureNative.StreamAnalytics.Inputs.OutputArgs
{
Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSqlDatabaseOutputDataSourceArgs
{
Database = "databaseName",
Password = "userPassword",
Server = "serverName",
Table = "tableName",
Type = "Microsoft.Sql/Server/Database",
User = "<user>",
},
Name = "outputtest",
},
},
ResourceGroupName = "sjrg3276",
Sku = new AzureNative.StreamAnalytics.Inputs.SkuArgs
{
Name = AzureNative.StreamAnalytics.SkuName.Standard,
},
Tags =
{
{ "key1", "value1" },
{ "key3", "value3" },
{ "randomKey", "randomValue" },
},
Transformation = new AzureNative.StreamAnalytics.Inputs.TransformationArgs
{
Name = "transformationtest",
Query = "Select Id, Name from inputtest",
StreamingUnits = 1,
},
});
});
package main
import (
streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewStreamingJob(ctx, "streamingJob", &streamanalytics.StreamingJobArgs{
CompatibilityLevel: pulumi.String(streamanalytics.CompatibilityLevel_1_0),
DataLocale: pulumi.String("en-US"),
EventsLateArrivalMaxDelayInSeconds: pulumi.Int(5),
EventsOutOfOrderMaxDelayInSeconds: pulumi.Int(0),
EventsOutOfOrderPolicy: pulumi.String(streamanalytics.EventsOutOfOrderPolicyDrop),
Functions: streamanalytics.FunctionTypeArray{},
Inputs: streamanalytics.InputTypeArray{
&streamanalytics.InputTypeArgs{
Name: pulumi.String("inputtest"),
Properties: streamanalytics.StreamInputProperties{
Datasource: streamanalytics.BlobStreamInputDataSource{
Container: "containerName",
PathPattern: "",
StorageAccounts: []streamanalytics.StorageAccount{
{
AccountKey: "yourAccountKey==",
AccountName: "yourAccountName",
},
},
Type: "Microsoft.Storage/Blob",
},
Serialization: streamanalytics.JsonSerialization{
Encoding: streamanalytics.EncodingUTF8,
Type: "Json",
},
Type: "Stream",
},
},
},
JobName: pulumi.String("sj7804"),
Location: pulumi.String("West US"),
OutputErrorPolicy: pulumi.String(streamanalytics.OutputErrorPolicyDrop),
Outputs: streamanalytics.OutputTypeArray{
&streamanalytics.OutputTypeArgs{
Datasource: streamanalytics.AzureSqlDatabaseOutputDataSource{
Database: "databaseName",
Password: "userPassword",
Server: "serverName",
Table: "tableName",
Type: "Microsoft.Sql/Server/Database",
User: "<user>",
},
Name: pulumi.String("outputtest"),
},
},
ResourceGroupName: pulumi.String("sjrg3276"),
Sku: &streamanalytics.SkuArgs{
Name: pulumi.String(streamanalytics.SkuNameStandard),
},
Tags: pulumi.StringMap{
"key1": pulumi.String("value1"),
"key3": pulumi.String("value3"),
"randomKey": pulumi.String("randomValue"),
},
Transformation: &streamanalytics.TransformationArgs{
Name: pulumi.String("transformationtest"),
Query: pulumi.String("Select Id, Name from inputtest"),
StreamingUnits: pulumi.Int(1),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.StreamingJob;
import com.pulumi.azurenative.streamanalytics.StreamingJobArgs;
import com.pulumi.azurenative.streamanalytics.inputs.InputArgs;
import com.pulumi.azurenative.streamanalytics.inputs.OutputArgs;
import com.pulumi.azurenative.streamanalytics.inputs.SkuArgs;
import com.pulumi.azurenative.streamanalytics.inputs.TransformationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var streamingJob = new StreamingJob("streamingJob", StreamingJobArgs.builder()
.compatibilityLevel("1.0")
.dataLocale("en-US")
.eventsLateArrivalMaxDelayInSeconds(5)
.eventsOutOfOrderMaxDelayInSeconds(0)
.eventsOutOfOrderPolicy("Drop")
.functions()
.inputs(InputArgs.builder()
.name("inputtest")
.properties(StreamInputPropertiesArgs.builder()
.datasource(BlobStreamInputDataSourceArgs.builder()
.container("containerName")
.pathPattern("")
.storageAccounts(StorageAccountArgs.builder()
.accountKey("yourAccountKey==")
.accountName("yourAccountName")
.build())
.type("Microsoft.Storage/Blob")
.build())
.serialization(JsonSerializationArgs.builder()
.encoding("UTF8")
.type("Json")
.build())
.type("Stream")
.build())
.build())
.jobName("sj7804")
.location("West US")
.outputErrorPolicy("Drop")
.outputs(OutputArgs.builder()
.datasource(AzureDataLakeStoreOutputDataSourceArgs.builder()
.database("databaseName")
.password("userPassword")
.server("serverName")
.table("tableName")
.type("Microsoft.Sql/Server/Database")
.user("<user>")
.build())
.name("outputtest")
.build())
.resourceGroupName("sjrg3276")
.sku(SkuArgs.builder()
.name("Standard")
.build())
.tags(Map.ofEntries(
Map.entry("key1", "value1"),
Map.entry("key3", "value3"),
Map.entry("randomKey", "randomValue")
))
.transformation(TransformationArgs.builder()
.name("transformationtest")
.query("Select Id, Name from inputtest")
.streamingUnits(1)
.build())
.build());
}
}
Create a streaming job shell (a streaming job with no inputs, outputs, transformation, or functions)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var streamingJob = new AzureNative.StreamAnalytics.StreamingJob("streamingJob", new()
{
CompatibilityLevel = AzureNative.StreamAnalytics.CompatibilityLevel.CompatibilityLevel_1_0,
DataLocale = "en-US",
EventsLateArrivalMaxDelayInSeconds = 16,
EventsOutOfOrderMaxDelayInSeconds = 5,
EventsOutOfOrderPolicy = AzureNative.StreamAnalytics.EventsOutOfOrderPolicy.Drop,
Functions = new[] {},
Inputs = new[] {},
JobName = "sj59",
Location = "West US",
OutputErrorPolicy = AzureNative.StreamAnalytics.OutputErrorPolicy.Drop,
Outputs = new[] {},
ResourceGroupName = "sjrg6936",
Sku = new AzureNative.StreamAnalytics.Inputs.SkuArgs
{
Name = AzureNative.StreamAnalytics.SkuName.Standard,
},
Tags =
{
{ "key1", "value1" },
{ "key3", "value3" },
{ "randomKey", "randomValue" },
},
});
});
package main
import (
streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := streamanalytics.NewStreamingJob(ctx, "streamingJob", &streamanalytics.StreamingJobArgs{
CompatibilityLevel: pulumi.String(streamanalytics.CompatibilityLevel_1_0),
DataLocale: pulumi.String("en-US"),
EventsLateArrivalMaxDelayInSeconds: pulumi.Int(16),
EventsOutOfOrderMaxDelayInSeconds: pulumi.Int(5),
EventsOutOfOrderPolicy: pulumi.String(streamanalytics.EventsOutOfOrderPolicyDrop),
Functions: streamanalytics.FunctionTypeArray{},
Inputs: streamanalytics.InputTypeArray{},
JobName: pulumi.String("sj59"),
Location: pulumi.String("West US"),
OutputErrorPolicy: pulumi.String(streamanalytics.OutputErrorPolicyDrop),
Outputs: streamanalytics.OutputTypeArray{},
ResourceGroupName: pulumi.String("sjrg6936"),
Sku: &streamanalytics.SkuArgs{
Name: pulumi.String(streamanalytics.SkuNameStandard),
},
Tags: pulumi.StringMap{
"key1": pulumi.String("value1"),
"key3": pulumi.String("value3"),
"randomKey": pulumi.String("randomValue"),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.StreamingJob;
import com.pulumi.azurenative.streamanalytics.StreamingJobArgs;
import com.pulumi.azurenative.streamanalytics.inputs.SkuArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var streamingJob = new StreamingJob("streamingJob", StreamingJobArgs.builder()
.compatibilityLevel("1.0")
.dataLocale("en-US")
.eventsLateArrivalMaxDelayInSeconds(16)
.eventsOutOfOrderMaxDelayInSeconds(5)
.eventsOutOfOrderPolicy("Drop")
.functions()
.inputs()
.jobName("sj59")
.location("West US")
.outputErrorPolicy("Drop")
.outputs()
.resourceGroupName("sjrg6936")
.sku(SkuArgs.builder()
.name("Standard")
.build())
.tags(Map.ofEntries(
Map.entry("key1", "value1"),
Map.entry("key3", "value3"),
Map.entry("randomKey", "randomValue")
))
.build());
}
}
Import
An existing resource can be imported using its type token, name, and identifier, e.g.
$ pulumi import azure-native:streamanalytics:StreamingJob sj59 /subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}
Constructors
Properties
The cluster which streaming jobs will run on.
Controls certain runtime behaviors of the streaming job.
Valid values are JobStorageAccount and SystemAccount. If set to JobStorageAccount, this requires the user to also specify jobStorageAccount property. .
The data locale of the stream analytics job. Value should be the name of a supported .NET Culture from the set https://msdn.microsoft.com/en-us/library/system.globalization.culturetypes(v=vs.110).aspx. Defaults to 'en-US' if none specified.
The maximum tolerable delay in seconds where events arriving late could be included. Supported range is -1 to 1814399 (20.23:59:59 days) and -1 is used to specify wait indefinitely. If the property is absent, it is interpreted to have a value of -1.
The maximum tolerable delay in seconds where out-of-order events can be adjusted to be back in order.
Indicates the policy to apply to events that arrive out of order in the input event stream.
A list of one or more functions for the streaming job. The name property for each function is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation.
Describes the system-assigned managed identity assigned to this job that can be used to authenticate with inputs and outputs.
A list of one or more inputs to the streaming job. The name property for each input is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual input.
The properties that are associated with an Azure Storage account with MSI
Indicates the policy to apply to events that arrive at the output and cannot be written to the external storage due to being malformed (missing column values, column values of wrong type or size).
A list of one or more outputs for the streaming job. The name property for each output is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual output.
This property should only be utilized when it is desired that the job be started immediately upon creation. Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is started, start at a custom user time stamp specified via the outputStartTime property, or start from the last event output time.
Value is either an ISO-8601 formatted time stamp that indicates the starting point of the output event stream, or null to indicate that the output event stream will start whenever the streaming job is started. This property must have a value if outputStartMode is set to CustomTime.
The name of the resource group. The name is case insensitive.
Indicates the query and the number of streaming units to use for the streaming job. The name property of the transformation is required when specifying this property in a PUT request. This property cannot be modify via a PATCH operation. You must use the PATCH API available for the individual transformation.