Dataset Binary Args
data class DatasetBinaryArgs(val additionalProperties: Output<Map<String, String>>? = null, val annotations: Output<List<String>>? = null, val azureBlobStorageLocation: Output<DatasetBinaryAzureBlobStorageLocationArgs>? = null, val compression: Output<DatasetBinaryCompressionArgs>? = null, val dataFactoryId: Output<String>? = null, val description: Output<String>? = null, val folder: Output<String>? = null, val httpServerLocation: Output<DatasetBinaryHttpServerLocationArgs>? = null, val linkedServiceName: Output<String>? = null, val name: Output<String>? = null, val parameters: Output<Map<String, String>>? = null, val sftpServerLocation: Output<DatasetBinarySftpServerLocationArgs>? = null) : ConvertibleToJava<DatasetBinaryArgs>
Manages a Data Factory Binary Dataset inside an Azure Data Factory.
Example Usage
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceSftp;
import com.pulumi.azure.datafactory.LinkedServiceSftpArgs;
import com.pulumi.azure.datafactory.DatasetBinary;
import com.pulumi.azure.datafactory.DatasetBinaryArgs;
import com.pulumi.azure.datafactory.inputs.DatasetBinarySftpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("West Europe")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.build());
var exampleLinkedServiceSftp = new LinkedServiceSftp("exampleLinkedServiceSftp", LinkedServiceSftpArgs.builder()
.dataFactoryId(exampleFactory.id())
.authenticationType("Basic")
.host("http://www.bing.com")
.port(22)
.username("foo")
.password("bar")
.build());
var exampleDatasetBinary = new DatasetBinary("exampleDatasetBinary", DatasetBinaryArgs.builder()
.dataFactoryId(exampleFactory.id())
.linkedServiceName(exampleLinkedServiceSftp.name())
.sftpServerLocation(DatasetBinarySftpServerLocationArgs.builder()
.path("/test/")
.filename("**")
.build())
.build());
}
}
Content copied to clipboard
Import
Data Factory Binary Datasets can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/datasetBinary:DatasetBinary example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
Content copied to clipboard
Constructors
Link copied to clipboard
fun DatasetBinaryArgs(additionalProperties: Output<Map<String, String>>? = null, annotations: Output<List<String>>? = null, azureBlobStorageLocation: Output<DatasetBinaryAzureBlobStorageLocationArgs>? = null, compression: Output<DatasetBinaryCompressionArgs>? = null, dataFactoryId: Output<String>? = null, description: Output<String>? = null, folder: Output<String>? = null, httpServerLocation: Output<DatasetBinaryHttpServerLocationArgs>? = null, linkedServiceName: Output<String>? = null, name: Output<String>? = null, parameters: Output<Map<String, String>>? = null, sftpServerLocation: Output<DatasetBinarySftpServerLocationArgs>? = null)
Functions
Properties
Link copied to clipboard
Link copied to clipboard
Link copied to clipboard
Specifies the name of the Data Factory Binary Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.