databricks.Pipeline
Explore with Pulumi AI
Use databricks.Pipeline to deploy Delta Live Tables.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const dltDemo = new databricks.Notebook("dlt_demo", {});
const dltDemoRepo = new databricks.Repo("dlt_demo", {});
const _this = new databricks.Pipeline("this", {
    name: "Pipeline Name",
    storage: "/test/first-pipeline",
    configuration: {
        key1: "value1",
        key2: "value2",
    },
    clusters: [
        {
            label: "default",
            numWorkers: 2,
            customTags: {
                cluster_type: "default",
            },
        },
        {
            label: "maintenance",
            numWorkers: 1,
            customTags: {
                cluster_type: "maintenance",
            },
        },
    ],
    libraries: [
        {
            notebook: {
                path: dltDemo.id,
            },
        },
        {
            file: {
                path: pulumi.interpolate`${dltDemoRepo.path}/pipeline.sql`,
            },
        },
    ],
    continuous: false,
    notifications: [{
        emailRecipients: [
            "user@domain.com",
            "user1@domain.com",
        ],
        alerts: [
            "on-update-failure",
            "on-update-fatal-failure",
            "on-update-success",
            "on-flow-failure",
        ],
    }],
});
import pulumi
import pulumi_databricks as databricks
dlt_demo = databricks.Notebook("dlt_demo")
dlt_demo_repo = databricks.Repo("dlt_demo")
this = databricks.Pipeline("this",
    name="Pipeline Name",
    storage="/test/first-pipeline",
    configuration={
        "key1": "value1",
        "key2": "value2",
    },
    clusters=[
        {
            "label": "default",
            "num_workers": 2,
            "custom_tags": {
                "cluster_type": "default",
            },
        },
        {
            "label": "maintenance",
            "num_workers": 1,
            "custom_tags": {
                "cluster_type": "maintenance",
            },
        },
    ],
    libraries=[
        {
            "notebook": {
                "path": dlt_demo.id,
            },
        },
        {
            "file": {
                "path": dlt_demo_repo.path.apply(lambda path: f"{path}/pipeline.sql"),
            },
        },
    ],
    continuous=False,
    notifications=[{
        "email_recipients": [
            "user@domain.com",
            "user1@domain.com",
        ],
        "alerts": [
            "on-update-failure",
            "on-update-fatal-failure",
            "on-update-success",
            "on-flow-failure",
        ],
    }])
package main
import (
	"fmt"
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		dltDemo, err := databricks.NewNotebook(ctx, "dlt_demo", nil)
		if err != nil {
			return err
		}
		dltDemoRepo, err := databricks.NewRepo(ctx, "dlt_demo", nil)
		if err != nil {
			return err
		}
		_, err = databricks.NewPipeline(ctx, "this", &databricks.PipelineArgs{
			Name:    pulumi.String("Pipeline Name"),
			Storage: pulumi.String("/test/first-pipeline"),
			Configuration: pulumi.StringMap{
				"key1": pulumi.String("value1"),
				"key2": pulumi.String("value2"),
			},
			Clusters: databricks.PipelineClusterArray{
				&databricks.PipelineClusterArgs{
					Label:      pulumi.String("default"),
					NumWorkers: pulumi.Int(2),
					CustomTags: pulumi.StringMap{
						"cluster_type": pulumi.String("default"),
					},
				},
				&databricks.PipelineClusterArgs{
					Label:      pulumi.String("maintenance"),
					NumWorkers: pulumi.Int(1),
					CustomTags: pulumi.StringMap{
						"cluster_type": pulumi.String("maintenance"),
					},
				},
			},
			Libraries: databricks.PipelineLibraryArray{
				&databricks.PipelineLibraryArgs{
					Notebook: &databricks.PipelineLibraryNotebookArgs{
						Path: dltDemo.ID(),
					},
				},
				&databricks.PipelineLibraryArgs{
					File: &databricks.PipelineLibraryFileArgs{
						Path: dltDemoRepo.Path.ApplyT(func(path string) (string, error) {
							return fmt.Sprintf("%v/pipeline.sql", path), nil
						}).(pulumi.StringOutput),
					},
				},
			},
			Continuous: pulumi.Bool(false),
			Notifications: databricks.PipelineNotificationArray{
				&databricks.PipelineNotificationArgs{
					EmailRecipients: pulumi.StringArray{
						pulumi.String("user@domain.com"),
						pulumi.String("user1@domain.com"),
					},
					Alerts: pulumi.StringArray{
						pulumi.String("on-update-failure"),
						pulumi.String("on-update-fatal-failure"),
						pulumi.String("on-update-success"),
						pulumi.String("on-flow-failure"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var dltDemo = new Databricks.Notebook("dlt_demo");
    var dltDemoRepo = new Databricks.Repo("dlt_demo");
    var @this = new Databricks.Pipeline("this", new()
    {
        Name = "Pipeline Name",
        Storage = "/test/first-pipeline",
        Configuration = 
        {
            { "key1", "value1" },
            { "key2", "value2" },
        },
        Clusters = new[]
        {
            new Databricks.Inputs.PipelineClusterArgs
            {
                Label = "default",
                NumWorkers = 2,
                CustomTags = 
                {
                    { "cluster_type", "default" },
                },
            },
            new Databricks.Inputs.PipelineClusterArgs
            {
                Label = "maintenance",
                NumWorkers = 1,
                CustomTags = 
                {
                    { "cluster_type", "maintenance" },
                },
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.PipelineLibraryArgs
            {
                Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                {
                    Path = dltDemo.Id,
                },
            },
            new Databricks.Inputs.PipelineLibraryArgs
            {
                File = new Databricks.Inputs.PipelineLibraryFileArgs
                {
                    Path = dltDemoRepo.Path.Apply(path => $"{path}/pipeline.sql"),
                },
            },
        },
        Continuous = false,
        Notifications = new[]
        {
            new Databricks.Inputs.PipelineNotificationArgs
            {
                EmailRecipients = new[]
                {
                    "user@domain.com",
                    "user1@domain.com",
                },
                Alerts = new[]
                {
                    "on-update-failure",
                    "on-update-fatal-failure",
                    "on-update-success",
                    "on-flow-failure",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Notebook;
import com.pulumi.databricks.Repo;
import com.pulumi.databricks.Pipeline;
import com.pulumi.databricks.PipelineArgs;
import com.pulumi.databricks.inputs.PipelineClusterArgs;
import com.pulumi.databricks.inputs.PipelineLibraryArgs;
import com.pulumi.databricks.inputs.PipelineLibraryNotebookArgs;
import com.pulumi.databricks.inputs.PipelineLibraryFileArgs;
import com.pulumi.databricks.inputs.PipelineNotificationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var dltDemo = new Notebook("dltDemo");
        var dltDemoRepo = new Repo("dltDemoRepo");
        var this_ = new Pipeline("this", PipelineArgs.builder()
            .name("Pipeline Name")
            .storage("/test/first-pipeline")
            .configuration(Map.ofEntries(
                Map.entry("key1", "value1"),
                Map.entry("key2", "value2")
            ))
            .clusters(            
                PipelineClusterArgs.builder()
                    .label("default")
                    .numWorkers(2)
                    .customTags(Map.of("cluster_type", "default"))
                    .build(),
                PipelineClusterArgs.builder()
                    .label("maintenance")
                    .numWorkers(1)
                    .customTags(Map.of("cluster_type", "maintenance"))
                    .build())
            .libraries(            
                PipelineLibraryArgs.builder()
                    .notebook(PipelineLibraryNotebookArgs.builder()
                        .path(dltDemo.id())
                        .build())
                    .build(),
                PipelineLibraryArgs.builder()
                    .file(PipelineLibraryFileArgs.builder()
                        .path(dltDemoRepo.path().applyValue(path -> String.format("%s/pipeline.sql", path)))
                        .build())
                    .build())
            .continuous(false)
            .notifications(PipelineNotificationArgs.builder()
                .emailRecipients(                
                    "user@domain.com",
                    "user1@domain.com")
                .alerts(                
                    "on-update-failure",
                    "on-update-fatal-failure",
                    "on-update-success",
                    "on-flow-failure")
                .build())
            .build());
    }
}
resources:
  dltDemo:
    type: databricks:Notebook
    name: dlt_demo
  dltDemoRepo:
    type: databricks:Repo
    name: dlt_demo
  this:
    type: databricks:Pipeline
    properties:
      name: Pipeline Name
      storage: /test/first-pipeline
      configuration:
        key1: value1
        key2: value2
      clusters:
        - label: default
          numWorkers: 2
          customTags:
            cluster_type: default
        - label: maintenance
          numWorkers: 1
          customTags:
            cluster_type: maintenance
      libraries:
        - notebook:
            path: ${dltDemo.id}
        - file:
            path: ${dltDemoRepo.path}/pipeline.sql
      continuous: false
      notifications:
        - emailRecipients:
            - user@domain.com
            - user1@domain.com
          alerts:
            - on-update-failure
            - on-update-fatal-failure
            - on-update-success
            - on-flow-failure
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide.
- databricks.getPipelines to retrieve Delta Live Tables pipeline data.
- databricks.Cluster to create Databricks Clusters.
- databricks.Job to manage Databricks Jobs to run non-interactive code in a databricks_cluster.
- databricks.Notebook to manage Databricks Notebooks.
Create Pipeline Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipeline(name: string, args?: PipelineArgs, opts?: CustomResourceOptions);@overload
def Pipeline(resource_name: str,
             args: Optional[PipelineArgs] = None,
             opts: Optional[ResourceOptions] = None)
@overload
def Pipeline(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             allow_duplicate_names: Optional[bool] = None,
             budget_policy_id: Optional[str] = None,
             catalog: Optional[str] = None,
             cause: Optional[str] = None,
             channel: Optional[str] = None,
             cluster_id: Optional[str] = None,
             clusters: Optional[Sequence[PipelineClusterArgs]] = None,
             configuration: Optional[Mapping[str, str]] = None,
             continuous: Optional[bool] = None,
             creator_user_name: Optional[str] = None,
             deployment: Optional[PipelineDeploymentArgs] = None,
             development: Optional[bool] = None,
             edition: Optional[str] = None,
             expected_last_modified: Optional[int] = None,
             filters: Optional[PipelineFiltersArgs] = None,
             gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
             health: Optional[str] = None,
             ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
             last_modified: Optional[int] = None,
             latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
             libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
             name: Optional[str] = None,
             notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
             photon: Optional[bool] = None,
             restart_window: Optional[PipelineRestartWindowArgs] = None,
             run_as: Optional[PipelineRunAsArgs] = None,
             schema: Optional[str] = None,
             serverless: Optional[bool] = None,
             state: Optional[str] = None,
             storage: Optional[str] = None,
             target: Optional[str] = None,
             trigger: Optional[PipelineTriggerArgs] = None,
             url: Optional[str] = None)func NewPipeline(ctx *Context, name string, args *PipelineArgs, opts ...ResourceOption) (*Pipeline, error)public Pipeline(string name, PipelineArgs? args = null, CustomResourceOptions? opts = null)
public Pipeline(String name, PipelineArgs args)
public Pipeline(String name, PipelineArgs args, CustomResourceOptions options)
type: databricks:Pipeline
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var pipelineResource = new Databricks.Pipeline("pipelineResource", new()
{
    AllowDuplicateNames = false,
    BudgetPolicyId = "string",
    Catalog = "string",
    Cause = "string",
    Channel = "string",
    ClusterId = "string",
    Clusters = new[]
    {
        new Databricks.Inputs.PipelineClusterArgs
        {
            ApplyPolicyDefaultValues = false,
            Autoscale = new Databricks.Inputs.PipelineClusterAutoscaleArgs
            {
                MaxWorkers = 0,
                MinWorkers = 0,
                Mode = "string",
            },
            AwsAttributes = new Databricks.Inputs.PipelineClusterAwsAttributesArgs
            {
                Availability = "string",
                EbsVolumeCount = 0,
                EbsVolumeIops = 0,
                EbsVolumeSize = 0,
                EbsVolumeThroughput = 0,
                EbsVolumeType = "string",
                FirstOnDemand = 0,
                InstanceProfileArn = "string",
                SpotBidPricePercent = 0,
                ZoneId = "string",
            },
            AzureAttributes = new Databricks.Inputs.PipelineClusterAzureAttributesArgs
            {
                Availability = "string",
                FirstOnDemand = 0,
                LogAnalyticsInfo = new Databricks.Inputs.PipelineClusterAzureAttributesLogAnalyticsInfoArgs
                {
                    LogAnalyticsPrimaryKey = "string",
                    LogAnalyticsWorkspaceId = "string",
                },
                SpotBidMaxPrice = 0,
            },
            ClusterLogConf = new Databricks.Inputs.PipelineClusterClusterLogConfArgs
            {
                Dbfs = new Databricks.Inputs.PipelineClusterClusterLogConfDbfsArgs
                {
                    Destination = "string",
                },
                S3 = new Databricks.Inputs.PipelineClusterClusterLogConfS3Args
                {
                    Destination = "string",
                    CannedAcl = "string",
                    EnableEncryption = false,
                    EncryptionType = "string",
                    Endpoint = "string",
                    KmsKey = "string",
                    Region = "string",
                },
                Volumes = new Databricks.Inputs.PipelineClusterClusterLogConfVolumesArgs
                {
                    Destination = "string",
                },
            },
            CustomTags = 
            {
                { "string", "string" },
            },
            DriverInstancePoolId = "string",
            DriverNodeTypeId = "string",
            EnableLocalDiskEncryption = false,
            GcpAttributes = new Databricks.Inputs.PipelineClusterGcpAttributesArgs
            {
                Availability = "string",
                GoogleServiceAccount = "string",
                LocalSsdCount = 0,
                ZoneId = "string",
            },
            InitScripts = new[]
            {
                new Databricks.Inputs.PipelineClusterInitScriptArgs
                {
                    Abfss = new Databricks.Inputs.PipelineClusterInitScriptAbfssArgs
                    {
                        Destination = "string",
                    },
                    File = new Databricks.Inputs.PipelineClusterInitScriptFileArgs
                    {
                        Destination = "string",
                    },
                    Gcs = new Databricks.Inputs.PipelineClusterInitScriptGcsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.PipelineClusterInitScriptS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                    Volumes = new Databricks.Inputs.PipelineClusterInitScriptVolumesArgs
                    {
                        Destination = "string",
                    },
                    Workspace = new Databricks.Inputs.PipelineClusterInitScriptWorkspaceArgs
                    {
                        Destination = "string",
                    },
                },
            },
            InstancePoolId = "string",
            Label = "string",
            NodeTypeId = "string",
            NumWorkers = 0,
            PolicyId = "string",
            SparkConf = 
            {
                { "string", "string" },
            },
            SparkEnvVars = 
            {
                { "string", "string" },
            },
            SshPublicKeys = new[]
            {
                "string",
            },
        },
    },
    Configuration = 
    {
        { "string", "string" },
    },
    Continuous = false,
    CreatorUserName = "string",
    Deployment = new Databricks.Inputs.PipelineDeploymentArgs
    {
        Kind = "string",
        MetadataFilePath = "string",
    },
    Development = false,
    Edition = "string",
    ExpectedLastModified = 0,
    Filters = new Databricks.Inputs.PipelineFiltersArgs
    {
        Excludes = new[]
        {
            "string",
        },
        Includes = new[]
        {
            "string",
        },
    },
    GatewayDefinition = new Databricks.Inputs.PipelineGatewayDefinitionArgs
    {
        ConnectionId = "string",
        ConnectionName = "string",
        GatewayStorageCatalog = "string",
        GatewayStorageName = "string",
        GatewayStorageSchema = "string",
    },
    Health = "string",
    IngestionDefinition = new Databricks.Inputs.PipelineIngestionDefinitionArgs
    {
        ConnectionName = "string",
        IngestionGatewayId = "string",
        Objects = new[]
        {
            new Databricks.Inputs.PipelineIngestionDefinitionObjectArgs
            {
                Report = new Databricks.Inputs.PipelineIngestionDefinitionObjectReportArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    DestinationTable = "string",
                    SourceUrl = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectReportTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
                Schema = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    SourceCatalog = "string",
                    SourceSchema = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
                Table = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableArgs
                {
                    DestinationCatalog = "string",
                    DestinationSchema = "string",
                    DestinationTable = "string",
                    SourceCatalog = "string",
                    SourceSchema = "string",
                    SourceTable = "string",
                    TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableTableConfigurationArgs
                    {
                        PrimaryKeys = new[]
                        {
                            "string",
                        },
                        SalesforceIncludeFormulaFields = false,
                        ScdType = "string",
                        SequenceBies = new[]
                        {
                            "string",
                        },
                    },
                },
            },
        },
        TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionTableConfigurationArgs
        {
            PrimaryKeys = new[]
            {
                "string",
            },
            SalesforceIncludeFormulaFields = false,
            ScdType = "string",
            SequenceBies = new[]
            {
                "string",
            },
        },
    },
    LastModified = 0,
    LatestUpdates = new[]
    {
        new Databricks.Inputs.PipelineLatestUpdateArgs
        {
            CreationTime = "string",
            State = "string",
            UpdateId = "string",
        },
    },
    Libraries = new[]
    {
        new Databricks.Inputs.PipelineLibraryArgs
        {
            File = new Databricks.Inputs.PipelineLibraryFileArgs
            {
                Path = "string",
            },
            Jar = "string",
            Maven = new Databricks.Inputs.PipelineLibraryMavenArgs
            {
                Coordinates = "string",
                Exclusions = new[]
                {
                    "string",
                },
                Repo = "string",
            },
            Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
            {
                Path = "string",
            },
        },
    },
    Name = "string",
    Notifications = new[]
    {
        new Databricks.Inputs.PipelineNotificationArgs
        {
            Alerts = new[]
            {
                "string",
            },
            EmailRecipients = new[]
            {
                "string",
            },
        },
    },
    Photon = false,
    RestartWindow = new Databricks.Inputs.PipelineRestartWindowArgs
    {
        StartHour = 0,
        DaysOfWeeks = new[]
        {
            "string",
        },
        TimeZoneId = "string",
    },
    RunAs = new Databricks.Inputs.PipelineRunAsArgs
    {
        ServicePrincipalName = "string",
        UserName = "string",
    },
    Schema = "string",
    Serverless = false,
    State = "string",
    Storage = "string",
    Target = "string",
    Trigger = new Databricks.Inputs.PipelineTriggerArgs
    {
        Cron = new Databricks.Inputs.PipelineTriggerCronArgs
        {
            QuartzCronSchedule = "string",
            TimezoneId = "string",
        },
        Manual = null,
    },
    Url = "string",
});
example, err := databricks.NewPipeline(ctx, "pipelineResource", &databricks.PipelineArgs{
	AllowDuplicateNames: pulumi.Bool(false),
	BudgetPolicyId:      pulumi.String("string"),
	Catalog:             pulumi.String("string"),
	Cause:               pulumi.String("string"),
	Channel:             pulumi.String("string"),
	ClusterId:           pulumi.String("string"),
	Clusters: databricks.PipelineClusterArray{
		&databricks.PipelineClusterArgs{
			ApplyPolicyDefaultValues: pulumi.Bool(false),
			Autoscale: &databricks.PipelineClusterAutoscaleArgs{
				MaxWorkers: pulumi.Int(0),
				MinWorkers: pulumi.Int(0),
				Mode:       pulumi.String("string"),
			},
			AwsAttributes: &databricks.PipelineClusterAwsAttributesArgs{
				Availability:        pulumi.String("string"),
				EbsVolumeCount:      pulumi.Int(0),
				EbsVolumeIops:       pulumi.Int(0),
				EbsVolumeSize:       pulumi.Int(0),
				EbsVolumeThroughput: pulumi.Int(0),
				EbsVolumeType:       pulumi.String("string"),
				FirstOnDemand:       pulumi.Int(0),
				InstanceProfileArn:  pulumi.String("string"),
				SpotBidPricePercent: pulumi.Int(0),
				ZoneId:              pulumi.String("string"),
			},
			AzureAttributes: &databricks.PipelineClusterAzureAttributesArgs{
				Availability:  pulumi.String("string"),
				FirstOnDemand: pulumi.Int(0),
				LogAnalyticsInfo: &databricks.PipelineClusterAzureAttributesLogAnalyticsInfoArgs{
					LogAnalyticsPrimaryKey:  pulumi.String("string"),
					LogAnalyticsWorkspaceId: pulumi.String("string"),
				},
				SpotBidMaxPrice: pulumi.Float64(0),
			},
			ClusterLogConf: &databricks.PipelineClusterClusterLogConfArgs{
				Dbfs: &databricks.PipelineClusterClusterLogConfDbfsArgs{
					Destination: pulumi.String("string"),
				},
				S3: &databricks.PipelineClusterClusterLogConfS3Args{
					Destination:      pulumi.String("string"),
					CannedAcl:        pulumi.String("string"),
					EnableEncryption: pulumi.Bool(false),
					EncryptionType:   pulumi.String("string"),
					Endpoint:         pulumi.String("string"),
					KmsKey:           pulumi.String("string"),
					Region:           pulumi.String("string"),
				},
				Volumes: &databricks.PipelineClusterClusterLogConfVolumesArgs{
					Destination: pulumi.String("string"),
				},
			},
			CustomTags: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			DriverInstancePoolId:      pulumi.String("string"),
			DriverNodeTypeId:          pulumi.String("string"),
			EnableLocalDiskEncryption: pulumi.Bool(false),
			GcpAttributes: &databricks.PipelineClusterGcpAttributesArgs{
				Availability:         pulumi.String("string"),
				GoogleServiceAccount: pulumi.String("string"),
				LocalSsdCount:        pulumi.Int(0),
				ZoneId:               pulumi.String("string"),
			},
			InitScripts: databricks.PipelineClusterInitScriptArray{
				&databricks.PipelineClusterInitScriptArgs{
					Abfss: &databricks.PipelineClusterInitScriptAbfssArgs{
						Destination: pulumi.String("string"),
					},
					File: &databricks.PipelineClusterInitScriptFileArgs{
						Destination: pulumi.String("string"),
					},
					Gcs: &databricks.PipelineClusterInitScriptGcsArgs{
						Destination: pulumi.String("string"),
					},
					S3: &databricks.PipelineClusterInitScriptS3Args{
						Destination:      pulumi.String("string"),
						CannedAcl:        pulumi.String("string"),
						EnableEncryption: pulumi.Bool(false),
						EncryptionType:   pulumi.String("string"),
						Endpoint:         pulumi.String("string"),
						KmsKey:           pulumi.String("string"),
						Region:           pulumi.String("string"),
					},
					Volumes: &databricks.PipelineClusterInitScriptVolumesArgs{
						Destination: pulumi.String("string"),
					},
					Workspace: &databricks.PipelineClusterInitScriptWorkspaceArgs{
						Destination: pulumi.String("string"),
					},
				},
			},
			InstancePoolId: pulumi.String("string"),
			Label:          pulumi.String("string"),
			NodeTypeId:     pulumi.String("string"),
			NumWorkers:     pulumi.Int(0),
			PolicyId:       pulumi.String("string"),
			SparkConf: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			SparkEnvVars: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			SshPublicKeys: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	Configuration: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Continuous:      pulumi.Bool(false),
	CreatorUserName: pulumi.String("string"),
	Deployment: &databricks.PipelineDeploymentArgs{
		Kind:             pulumi.String("string"),
		MetadataFilePath: pulumi.String("string"),
	},
	Development:          pulumi.Bool(false),
	Edition:              pulumi.String("string"),
	ExpectedLastModified: pulumi.Int(0),
	Filters: &databricks.PipelineFiltersArgs{
		Excludes: pulumi.StringArray{
			pulumi.String("string"),
		},
		Includes: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	GatewayDefinition: &databricks.PipelineGatewayDefinitionArgs{
		ConnectionId:          pulumi.String("string"),
		ConnectionName:        pulumi.String("string"),
		GatewayStorageCatalog: pulumi.String("string"),
		GatewayStorageName:    pulumi.String("string"),
		GatewayStorageSchema:  pulumi.String("string"),
	},
	Health: pulumi.String("string"),
	IngestionDefinition: &databricks.PipelineIngestionDefinitionArgs{
		ConnectionName:     pulumi.String("string"),
		IngestionGatewayId: pulumi.String("string"),
		Objects: databricks.PipelineIngestionDefinitionObjectArray{
			&databricks.PipelineIngestionDefinitionObjectArgs{
				Report: &databricks.PipelineIngestionDefinitionObjectReportArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					DestinationTable:   pulumi.String("string"),
					SourceUrl:          pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectReportTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
				Schema: &databricks.PipelineIngestionDefinitionObjectSchemaArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					SourceCatalog:      pulumi.String("string"),
					SourceSchema:       pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
				Table: &databricks.PipelineIngestionDefinitionObjectTableArgs{
					DestinationCatalog: pulumi.String("string"),
					DestinationSchema:  pulumi.String("string"),
					DestinationTable:   pulumi.String("string"),
					SourceCatalog:      pulumi.String("string"),
					SourceSchema:       pulumi.String("string"),
					SourceTable:        pulumi.String("string"),
					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectTableTableConfigurationArgs{
						PrimaryKeys: pulumi.StringArray{
							pulumi.String("string"),
						},
						SalesforceIncludeFormulaFields: pulumi.Bool(false),
						ScdType:                        pulumi.String("string"),
						SequenceBies: pulumi.StringArray{
							pulumi.String("string"),
						},
					},
				},
			},
		},
		TableConfiguration: &databricks.PipelineIngestionDefinitionTableConfigurationArgs{
			PrimaryKeys: pulumi.StringArray{
				pulumi.String("string"),
			},
			SalesforceIncludeFormulaFields: pulumi.Bool(false),
			ScdType:                        pulumi.String("string"),
			SequenceBies: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	LastModified: pulumi.Int(0),
	LatestUpdates: databricks.PipelineLatestUpdateArray{
		&databricks.PipelineLatestUpdateArgs{
			CreationTime: pulumi.String("string"),
			State:        pulumi.String("string"),
			UpdateId:     pulumi.String("string"),
		},
	},
	Libraries: databricks.PipelineLibraryArray{
		&databricks.PipelineLibraryArgs{
			File: &databricks.PipelineLibraryFileArgs{
				Path: pulumi.String("string"),
			},
			Jar: pulumi.String("string"),
			Maven: &databricks.PipelineLibraryMavenArgs{
				Coordinates: pulumi.String("string"),
				Exclusions: pulumi.StringArray{
					pulumi.String("string"),
				},
				Repo: pulumi.String("string"),
			},
			Notebook: &databricks.PipelineLibraryNotebookArgs{
				Path: pulumi.String("string"),
			},
		},
	},
	Name: pulumi.String("string"),
	Notifications: databricks.PipelineNotificationArray{
		&databricks.PipelineNotificationArgs{
			Alerts: pulumi.StringArray{
				pulumi.String("string"),
			},
			EmailRecipients: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	Photon: pulumi.Bool(false),
	RestartWindow: &databricks.PipelineRestartWindowArgs{
		StartHour: pulumi.Int(0),
		DaysOfWeeks: pulumi.StringArray{
			pulumi.String("string"),
		},
		TimeZoneId: pulumi.String("string"),
	},
	RunAs: &databricks.PipelineRunAsArgs{
		ServicePrincipalName: pulumi.String("string"),
		UserName:             pulumi.String("string"),
	},
	Schema:     pulumi.String("string"),
	Serverless: pulumi.Bool(false),
	State:      pulumi.String("string"),
	Storage:    pulumi.String("string"),
	Target:     pulumi.String("string"),
	Trigger: &databricks.PipelineTriggerArgs{
		Cron: &databricks.PipelineTriggerCronArgs{
			QuartzCronSchedule: pulumi.String("string"),
			TimezoneId:         pulumi.String("string"),
		},
		Manual: &databricks.PipelineTriggerManualArgs{},
	},
	Url: pulumi.String("string"),
})
var pipelineResource = new Pipeline("pipelineResource", PipelineArgs.builder()
    .allowDuplicateNames(false)
    .budgetPolicyId("string")
    .catalog("string")
    .cause("string")
    .channel("string")
    .clusterId("string")
    .clusters(PipelineClusterArgs.builder()
        .applyPolicyDefaultValues(false)
        .autoscale(PipelineClusterAutoscaleArgs.builder()
            .maxWorkers(0)
            .minWorkers(0)
            .mode("string")
            .build())
        .awsAttributes(PipelineClusterAwsAttributesArgs.builder()
            .availability("string")
            .ebsVolumeCount(0)
            .ebsVolumeIops(0)
            .ebsVolumeSize(0)
            .ebsVolumeThroughput(0)
            .ebsVolumeType("string")
            .firstOnDemand(0)
            .instanceProfileArn("string")
            .spotBidPricePercent(0)
            .zoneId("string")
            .build())
        .azureAttributes(PipelineClusterAzureAttributesArgs.builder()
            .availability("string")
            .firstOnDemand(0)
            .logAnalyticsInfo(PipelineClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                .logAnalyticsPrimaryKey("string")
                .logAnalyticsWorkspaceId("string")
                .build())
            .spotBidMaxPrice(0)
            .build())
        .clusterLogConf(PipelineClusterClusterLogConfArgs.builder()
            .dbfs(PipelineClusterClusterLogConfDbfsArgs.builder()
                .destination("string")
                .build())
            .s3(PipelineClusterClusterLogConfS3Args.builder()
                .destination("string")
                .cannedAcl("string")
                .enableEncryption(false)
                .encryptionType("string")
                .endpoint("string")
                .kmsKey("string")
                .region("string")
                .build())
            .volumes(PipelineClusterClusterLogConfVolumesArgs.builder()
                .destination("string")
                .build())
            .build())
        .customTags(Map.of("string", "string"))
        .driverInstancePoolId("string")
        .driverNodeTypeId("string")
        .enableLocalDiskEncryption(false)
        .gcpAttributes(PipelineClusterGcpAttributesArgs.builder()
            .availability("string")
            .googleServiceAccount("string")
            .localSsdCount(0)
            .zoneId("string")
            .build())
        .initScripts(PipelineClusterInitScriptArgs.builder()
            .abfss(PipelineClusterInitScriptAbfssArgs.builder()
                .destination("string")
                .build())
            .file(PipelineClusterInitScriptFileArgs.builder()
                .destination("string")
                .build())
            .gcs(PipelineClusterInitScriptGcsArgs.builder()
                .destination("string")
                .build())
            .s3(PipelineClusterInitScriptS3Args.builder()
                .destination("string")
                .cannedAcl("string")
                .enableEncryption(false)
                .encryptionType("string")
                .endpoint("string")
                .kmsKey("string")
                .region("string")
                .build())
            .volumes(PipelineClusterInitScriptVolumesArgs.builder()
                .destination("string")
                .build())
            .workspace(PipelineClusterInitScriptWorkspaceArgs.builder()
                .destination("string")
                .build())
            .build())
        .instancePoolId("string")
        .label("string")
        .nodeTypeId("string")
        .numWorkers(0)
        .policyId("string")
        .sparkConf(Map.of("string", "string"))
        .sparkEnvVars(Map.of("string", "string"))
        .sshPublicKeys("string")
        .build())
    .configuration(Map.of("string", "string"))
    .continuous(false)
    .creatorUserName("string")
    .deployment(PipelineDeploymentArgs.builder()
        .kind("string")
        .metadataFilePath("string")
        .build())
    .development(false)
    .edition("string")
    .expectedLastModified(0)
    .filters(PipelineFiltersArgs.builder()
        .excludes("string")
        .includes("string")
        .build())
    .gatewayDefinition(PipelineGatewayDefinitionArgs.builder()
        .connectionId("string")
        .connectionName("string")
        .gatewayStorageCatalog("string")
        .gatewayStorageName("string")
        .gatewayStorageSchema("string")
        .build())
    .health("string")
    .ingestionDefinition(PipelineIngestionDefinitionArgs.builder()
        .connectionName("string")
        .ingestionGatewayId("string")
        .objects(PipelineIngestionDefinitionObjectArgs.builder()
            .report(PipelineIngestionDefinitionObjectReportArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .destinationTable("string")
                .sourceUrl("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectReportTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .schema(PipelineIngestionDefinitionObjectSchemaArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .sourceCatalog("string")
                .sourceSchema("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .table(PipelineIngestionDefinitionObjectTableArgs.builder()
                .destinationCatalog("string")
                .destinationSchema("string")
                .destinationTable("string")
                .sourceCatalog("string")
                .sourceSchema("string")
                .sourceTable("string")
                .tableConfiguration(PipelineIngestionDefinitionObjectTableTableConfigurationArgs.builder()
                    .primaryKeys("string")
                    .salesforceIncludeFormulaFields(false)
                    .scdType("string")
                    .sequenceBies("string")
                    .build())
                .build())
            .build())
        .tableConfiguration(PipelineIngestionDefinitionTableConfigurationArgs.builder()
            .primaryKeys("string")
            .salesforceIncludeFormulaFields(false)
            .scdType("string")
            .sequenceBies("string")
            .build())
        .build())
    .lastModified(0)
    .latestUpdates(PipelineLatestUpdateArgs.builder()
        .creationTime("string")
        .state("string")
        .updateId("string")
        .build())
    .libraries(PipelineLibraryArgs.builder()
        .file(PipelineLibraryFileArgs.builder()
            .path("string")
            .build())
        .jar("string")
        .maven(PipelineLibraryMavenArgs.builder()
            .coordinates("string")
            .exclusions("string")
            .repo("string")
            .build())
        .notebook(PipelineLibraryNotebookArgs.builder()
            .path("string")
            .build())
        .build())
    .name("string")
    .notifications(PipelineNotificationArgs.builder()
        .alerts("string")
        .emailRecipients("string")
        .build())
    .photon(false)
    .restartWindow(PipelineRestartWindowArgs.builder()
        .startHour(0)
        .daysOfWeeks("string")
        .timeZoneId("string")
        .build())
    .runAs(PipelineRunAsArgs.builder()
        .servicePrincipalName("string")
        .userName("string")
        .build())
    .schema("string")
    .serverless(false)
    .state("string")
    .storage("string")
    .target("string")
    .trigger(PipelineTriggerArgs.builder()
        .cron(PipelineTriggerCronArgs.builder()
            .quartzCronSchedule("string")
            .timezoneId("string")
            .build())
        .manual()
        .build())
    .url("string")
    .build());
pipeline_resource = databricks.Pipeline("pipelineResource",
    allow_duplicate_names=False,
    budget_policy_id="string",
    catalog="string",
    cause="string",
    channel="string",
    cluster_id="string",
    clusters=[{
        "apply_policy_default_values": False,
        "autoscale": {
            "max_workers": 0,
            "min_workers": 0,
            "mode": "string",
        },
        "aws_attributes": {
            "availability": "string",
            "ebs_volume_count": 0,
            "ebs_volume_iops": 0,
            "ebs_volume_size": 0,
            "ebs_volume_throughput": 0,
            "ebs_volume_type": "string",
            "first_on_demand": 0,
            "instance_profile_arn": "string",
            "spot_bid_price_percent": 0,
            "zone_id": "string",
        },
        "azure_attributes": {
            "availability": "string",
            "first_on_demand": 0,
            "log_analytics_info": {
                "log_analytics_primary_key": "string",
                "log_analytics_workspace_id": "string",
            },
            "spot_bid_max_price": 0,
        },
        "cluster_log_conf": {
            "dbfs": {
                "destination": "string",
            },
            "s3": {
                "destination": "string",
                "canned_acl": "string",
                "enable_encryption": False,
                "encryption_type": "string",
                "endpoint": "string",
                "kms_key": "string",
                "region": "string",
            },
            "volumes": {
                "destination": "string",
            },
        },
        "custom_tags": {
            "string": "string",
        },
        "driver_instance_pool_id": "string",
        "driver_node_type_id": "string",
        "enable_local_disk_encryption": False,
        "gcp_attributes": {
            "availability": "string",
            "google_service_account": "string",
            "local_ssd_count": 0,
            "zone_id": "string",
        },
        "init_scripts": [{
            "abfss": {
                "destination": "string",
            },
            "file": {
                "destination": "string",
            },
            "gcs": {
                "destination": "string",
            },
            "s3": {
                "destination": "string",
                "canned_acl": "string",
                "enable_encryption": False,
                "encryption_type": "string",
                "endpoint": "string",
                "kms_key": "string",
                "region": "string",
            },
            "volumes": {
                "destination": "string",
            },
            "workspace": {
                "destination": "string",
            },
        }],
        "instance_pool_id": "string",
        "label": "string",
        "node_type_id": "string",
        "num_workers": 0,
        "policy_id": "string",
        "spark_conf": {
            "string": "string",
        },
        "spark_env_vars": {
            "string": "string",
        },
        "ssh_public_keys": ["string"],
    }],
    configuration={
        "string": "string",
    },
    continuous=False,
    creator_user_name="string",
    deployment={
        "kind": "string",
        "metadata_file_path": "string",
    },
    development=False,
    edition="string",
    expected_last_modified=0,
    filters={
        "excludes": ["string"],
        "includes": ["string"],
    },
    gateway_definition={
        "connection_id": "string",
        "connection_name": "string",
        "gateway_storage_catalog": "string",
        "gateway_storage_name": "string",
        "gateway_storage_schema": "string",
    },
    health="string",
    ingestion_definition={
        "connection_name": "string",
        "ingestion_gateway_id": "string",
        "objects": [{
            "report": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "destination_table": "string",
                "source_url": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
            "schema": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "source_catalog": "string",
                "source_schema": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
            "table": {
                "destination_catalog": "string",
                "destination_schema": "string",
                "destination_table": "string",
                "source_catalog": "string",
                "source_schema": "string",
                "source_table": "string",
                "table_configuration": {
                    "primary_keys": ["string"],
                    "salesforce_include_formula_fields": False,
                    "scd_type": "string",
                    "sequence_bies": ["string"],
                },
            },
        }],
        "table_configuration": {
            "primary_keys": ["string"],
            "salesforce_include_formula_fields": False,
            "scd_type": "string",
            "sequence_bies": ["string"],
        },
    },
    last_modified=0,
    latest_updates=[{
        "creation_time": "string",
        "state": "string",
        "update_id": "string",
    }],
    libraries=[{
        "file": {
            "path": "string",
        },
        "jar": "string",
        "maven": {
            "coordinates": "string",
            "exclusions": ["string"],
            "repo": "string",
        },
        "notebook": {
            "path": "string",
        },
    }],
    name="string",
    notifications=[{
        "alerts": ["string"],
        "email_recipients": ["string"],
    }],
    photon=False,
    restart_window={
        "start_hour": 0,
        "days_of_weeks": ["string"],
        "time_zone_id": "string",
    },
    run_as={
        "service_principal_name": "string",
        "user_name": "string",
    },
    schema="string",
    serverless=False,
    state="string",
    storage="string",
    target="string",
    trigger={
        "cron": {
            "quartz_cron_schedule": "string",
            "timezone_id": "string",
        },
        "manual": {},
    },
    url="string")
const pipelineResource = new databricks.Pipeline("pipelineResource", {
    allowDuplicateNames: false,
    budgetPolicyId: "string",
    catalog: "string",
    cause: "string",
    channel: "string",
    clusterId: "string",
    clusters: [{
        applyPolicyDefaultValues: false,
        autoscale: {
            maxWorkers: 0,
            minWorkers: 0,
            mode: "string",
        },
        awsAttributes: {
            availability: "string",
            ebsVolumeCount: 0,
            ebsVolumeIops: 0,
            ebsVolumeSize: 0,
            ebsVolumeThroughput: 0,
            ebsVolumeType: "string",
            firstOnDemand: 0,
            instanceProfileArn: "string",
            spotBidPricePercent: 0,
            zoneId: "string",
        },
        azureAttributes: {
            availability: "string",
            firstOnDemand: 0,
            logAnalyticsInfo: {
                logAnalyticsPrimaryKey: "string",
                logAnalyticsWorkspaceId: "string",
            },
            spotBidMaxPrice: 0,
        },
        clusterLogConf: {
            dbfs: {
                destination: "string",
            },
            s3: {
                destination: "string",
                cannedAcl: "string",
                enableEncryption: false,
                encryptionType: "string",
                endpoint: "string",
                kmsKey: "string",
                region: "string",
            },
            volumes: {
                destination: "string",
            },
        },
        customTags: {
            string: "string",
        },
        driverInstancePoolId: "string",
        driverNodeTypeId: "string",
        enableLocalDiskEncryption: false,
        gcpAttributes: {
            availability: "string",
            googleServiceAccount: "string",
            localSsdCount: 0,
            zoneId: "string",
        },
        initScripts: [{
            abfss: {
                destination: "string",
            },
            file: {
                destination: "string",
            },
            gcs: {
                destination: "string",
            },
            s3: {
                destination: "string",
                cannedAcl: "string",
                enableEncryption: false,
                encryptionType: "string",
                endpoint: "string",
                kmsKey: "string",
                region: "string",
            },
            volumes: {
                destination: "string",
            },
            workspace: {
                destination: "string",
            },
        }],
        instancePoolId: "string",
        label: "string",
        nodeTypeId: "string",
        numWorkers: 0,
        policyId: "string",
        sparkConf: {
            string: "string",
        },
        sparkEnvVars: {
            string: "string",
        },
        sshPublicKeys: ["string"],
    }],
    configuration: {
        string: "string",
    },
    continuous: false,
    creatorUserName: "string",
    deployment: {
        kind: "string",
        metadataFilePath: "string",
    },
    development: false,
    edition: "string",
    expectedLastModified: 0,
    filters: {
        excludes: ["string"],
        includes: ["string"],
    },
    gatewayDefinition: {
        connectionId: "string",
        connectionName: "string",
        gatewayStorageCatalog: "string",
        gatewayStorageName: "string",
        gatewayStorageSchema: "string",
    },
    health: "string",
    ingestionDefinition: {
        connectionName: "string",
        ingestionGatewayId: "string",
        objects: [{
            report: {
                destinationCatalog: "string",
                destinationSchema: "string",
                destinationTable: "string",
                sourceUrl: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
            schema: {
                destinationCatalog: "string",
                destinationSchema: "string",
                sourceCatalog: "string",
                sourceSchema: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
            table: {
                destinationCatalog: "string",
                destinationSchema: "string",
                destinationTable: "string",
                sourceCatalog: "string",
                sourceSchema: "string",
                sourceTable: "string",
                tableConfiguration: {
                    primaryKeys: ["string"],
                    salesforceIncludeFormulaFields: false,
                    scdType: "string",
                    sequenceBies: ["string"],
                },
            },
        }],
        tableConfiguration: {
            primaryKeys: ["string"],
            salesforceIncludeFormulaFields: false,
            scdType: "string",
            sequenceBies: ["string"],
        },
    },
    lastModified: 0,
    latestUpdates: [{
        creationTime: "string",
        state: "string",
        updateId: "string",
    }],
    libraries: [{
        file: {
            path: "string",
        },
        jar: "string",
        maven: {
            coordinates: "string",
            exclusions: ["string"],
            repo: "string",
        },
        notebook: {
            path: "string",
        },
    }],
    name: "string",
    notifications: [{
        alerts: ["string"],
        emailRecipients: ["string"],
    }],
    photon: false,
    restartWindow: {
        startHour: 0,
        daysOfWeeks: ["string"],
        timeZoneId: "string",
    },
    runAs: {
        servicePrincipalName: "string",
        userName: "string",
    },
    schema: "string",
    serverless: false,
    state: "string",
    storage: "string",
    target: "string",
    trigger: {
        cron: {
            quartzCronSchedule: "string",
            timezoneId: "string",
        },
        manual: {},
    },
    url: "string",
});
type: databricks:Pipeline
properties:
    allowDuplicateNames: false
    budgetPolicyId: string
    catalog: string
    cause: string
    channel: string
    clusterId: string
    clusters:
        - applyPolicyDefaultValues: false
          autoscale:
            maxWorkers: 0
            minWorkers: 0
            mode: string
          awsAttributes:
            availability: string
            ebsVolumeCount: 0
            ebsVolumeIops: 0
            ebsVolumeSize: 0
            ebsVolumeThroughput: 0
            ebsVolumeType: string
            firstOnDemand: 0
            instanceProfileArn: string
            spotBidPricePercent: 0
            zoneId: string
          azureAttributes:
            availability: string
            firstOnDemand: 0
            logAnalyticsInfo:
                logAnalyticsPrimaryKey: string
                logAnalyticsWorkspaceId: string
            spotBidMaxPrice: 0
          clusterLogConf:
            dbfs:
                destination: string
            s3:
                cannedAcl: string
                destination: string
                enableEncryption: false
                encryptionType: string
                endpoint: string
                kmsKey: string
                region: string
            volumes:
                destination: string
          customTags:
            string: string
          driverInstancePoolId: string
          driverNodeTypeId: string
          enableLocalDiskEncryption: false
          gcpAttributes:
            availability: string
            googleServiceAccount: string
            localSsdCount: 0
            zoneId: string
          initScripts:
            - abfss:
                destination: string
              file:
                destination: string
              gcs:
                destination: string
              s3:
                cannedAcl: string
                destination: string
                enableEncryption: false
                encryptionType: string
                endpoint: string
                kmsKey: string
                region: string
              volumes:
                destination: string
              workspace:
                destination: string
          instancePoolId: string
          label: string
          nodeTypeId: string
          numWorkers: 0
          policyId: string
          sparkConf:
            string: string
          sparkEnvVars:
            string: string
          sshPublicKeys:
            - string
    configuration:
        string: string
    continuous: false
    creatorUserName: string
    deployment:
        kind: string
        metadataFilePath: string
    development: false
    edition: string
    expectedLastModified: 0
    filters:
        excludes:
            - string
        includes:
            - string
    gatewayDefinition:
        connectionId: string
        connectionName: string
        gatewayStorageCatalog: string
        gatewayStorageName: string
        gatewayStorageSchema: string
    health: string
    ingestionDefinition:
        connectionName: string
        ingestionGatewayId: string
        objects:
            - report:
                destinationCatalog: string
                destinationSchema: string
                destinationTable: string
                sourceUrl: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
              schema:
                destinationCatalog: string
                destinationSchema: string
                sourceCatalog: string
                sourceSchema: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
              table:
                destinationCatalog: string
                destinationSchema: string
                destinationTable: string
                sourceCatalog: string
                sourceSchema: string
                sourceTable: string
                tableConfiguration:
                    primaryKeys:
                        - string
                    salesforceIncludeFormulaFields: false
                    scdType: string
                    sequenceBies:
                        - string
        tableConfiguration:
            primaryKeys:
                - string
            salesforceIncludeFormulaFields: false
            scdType: string
            sequenceBies:
                - string
    lastModified: 0
    latestUpdates:
        - creationTime: string
          state: string
          updateId: string
    libraries:
        - file:
            path: string
          jar: string
          maven:
            coordinates: string
            exclusions:
                - string
            repo: string
          notebook:
            path: string
    name: string
    notifications:
        - alerts:
            - string
          emailRecipients:
            - string
    photon: false
    restartWindow:
        daysOfWeeks:
            - string
        startHour: 0
        timeZoneId: string
    runAs:
        servicePrincipalName: string
        userName: string
    schema: string
    serverless: false
    state: string
    storage: string
    target: string
    trigger:
        cron:
            quartzCronSchedule: string
            timezoneId: string
        manual: {}
    url: string
Pipeline Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Pipeline resource accepts the following input properties:
- AllowDuplicate boolNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- BudgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- Catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- Cause string
- Channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- ClusterId string
- Clusters
List<PipelineCluster> 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- Configuration Dictionary<string, string>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- Continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- CreatorUser stringName 
- Deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- Development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- Edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- ExpectedLast intModified 
- Filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- GatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- Health string
- IngestionDefinition PipelineIngestion Definition 
- LastModified int
- LatestUpdates List<PipelineLatest Update> 
- Libraries
List<PipelineLibrary> 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- Name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- Notifications
List<PipelineNotification> 
- Photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- RestartWindow PipelineRestart Window 
- RunAs PipelineRun As 
- Schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- State string
- Storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- Target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- Trigger
PipelineTrigger 
- Url string
- URL of the DLT pipeline on the given workspace.
- AllowDuplicate boolNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- BudgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- Catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- Cause string
- Channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- ClusterId string
- Clusters
[]PipelineCluster Args 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- Configuration map[string]string
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- Continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- CreatorUser stringName 
- Deployment
PipelineDeployment Args 
- Deployment type of this pipeline. Supports following attributes:
- Development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- Edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- ExpectedLast intModified 
- Filters
PipelineFilters Args 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- GatewayDefinition PipelineGateway Definition Args 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- Health string
- IngestionDefinition PipelineIngestion Definition Args 
- LastModified int
- LatestUpdates []PipelineLatest Update Args 
- Libraries
[]PipelineLibrary Args 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- Name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- Notifications
[]PipelineNotification Args 
- Photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- RestartWindow PipelineRestart Window Args 
- RunAs PipelineRun As Args 
- Schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- State string
- Storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- Target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- Trigger
PipelineTrigger Args 
- Url string
- URL of the DLT pipeline on the given workspace.
- allowDuplicate BooleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy StringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog String
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause String
- channel String
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId String
- clusters
List<PipelineCluster> 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Map<String,String>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous Boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser StringName 
- deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- development Boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition String
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast IntegerModified 
- filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health String
- ingestionDefinition PipelineIngestion Definition 
- lastModified Integer
- latestUpdates List<PipelineLatest Update> 
- libraries
List<PipelineLibrary> 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name String
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
List<PipelineNotification> 
- photon Boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow PipelineRestart Window 
- runAs PipelineRun As 
- schema String
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless Boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state String
- storage String
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target String
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger 
- url String
- URL of the DLT pipeline on the given workspace.
- allowDuplicate booleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause string
- channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId string
- clusters
PipelineCluster[] 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration {[key: string]: string}
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser stringName 
- deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- development boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast numberModified 
- filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health string
- ingestionDefinition PipelineIngestion Definition 
- lastModified number
- latestUpdates PipelineLatest Update[] 
- libraries
PipelineLibrary[] 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
PipelineNotification[] 
- photon boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow PipelineRestart Window 
- runAs PipelineRun As 
- schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state string
- storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger 
- url string
- URL of the DLT pipeline on the given workspace.
- allow_duplicate_ boolnames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budget_policy_ strid 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog str
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause str
- channel str
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- cluster_id str
- clusters
Sequence[PipelineCluster Args] 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Mapping[str, str]
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creator_user_ strname 
- deployment
PipelineDeployment Args 
- Deployment type of this pipeline. Supports following attributes:
- development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition str
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expected_last_ intmodified 
- filters
PipelineFilters Args 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gateway_definition PipelineGateway Definition Args 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health str
- ingestion_definition PipelineIngestion Definition Args 
- last_modified int
- latest_updates Sequence[PipelineLatest Update Args] 
- libraries
Sequence[PipelineLibrary Args] 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name str
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
Sequence[PipelineNotification Args] 
- photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- restart_window PipelineRestart Window Args 
- run_as PipelineRun As Args 
- schema str
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state str
- storage str
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target str
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger Args 
- url str
- URL of the DLT pipeline on the given workspace.
- allowDuplicate BooleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy StringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog String
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause String
- channel String
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId String
- clusters List<Property Map>
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Map<String>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous Boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser StringName 
- deployment Property Map
- Deployment type of this pipeline. Supports following attributes:
- development Boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition String
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast NumberModified 
- filters Property Map
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition Property Map
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health String
- ingestionDefinition Property Map
- lastModified Number
- latestUpdates List<Property Map>
- libraries List<Property Map>
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name String
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications List<Property Map>
- photon Boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow Property Map
- runAs Property Map
- schema String
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless Boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state String
- storage String
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target String
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger Property Map
- url String
- URL of the DLT pipeline on the given workspace.
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipeline resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- RunAs stringUser Name 
- Id string
- The provider-assigned unique ID for this managed resource.
- RunAs stringUser Name 
- id String
- The provider-assigned unique ID for this managed resource.
- runAs StringUser Name 
- id string
- The provider-assigned unique ID for this managed resource.
- runAs stringUser Name 
- id str
- The provider-assigned unique ID for this managed resource.
- run_as_ struser_ name 
- id String
- The provider-assigned unique ID for this managed resource.
- runAs StringUser Name 
Look up Existing Pipeline Resource
Get an existing Pipeline resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PipelineState, opts?: CustomResourceOptions): Pipeline@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        allow_duplicate_names: Optional[bool] = None,
        budget_policy_id: Optional[str] = None,
        catalog: Optional[str] = None,
        cause: Optional[str] = None,
        channel: Optional[str] = None,
        cluster_id: Optional[str] = None,
        clusters: Optional[Sequence[PipelineClusterArgs]] = None,
        configuration: Optional[Mapping[str, str]] = None,
        continuous: Optional[bool] = None,
        creator_user_name: Optional[str] = None,
        deployment: Optional[PipelineDeploymentArgs] = None,
        development: Optional[bool] = None,
        edition: Optional[str] = None,
        expected_last_modified: Optional[int] = None,
        filters: Optional[PipelineFiltersArgs] = None,
        gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
        health: Optional[str] = None,
        ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
        last_modified: Optional[int] = None,
        latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
        libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
        name: Optional[str] = None,
        notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
        photon: Optional[bool] = None,
        restart_window: Optional[PipelineRestartWindowArgs] = None,
        run_as: Optional[PipelineRunAsArgs] = None,
        run_as_user_name: Optional[str] = None,
        schema: Optional[str] = None,
        serverless: Optional[bool] = None,
        state: Optional[str] = None,
        storage: Optional[str] = None,
        target: Optional[str] = None,
        trigger: Optional[PipelineTriggerArgs] = None,
        url: Optional[str] = None) -> Pipelinefunc GetPipeline(ctx *Context, name string, id IDInput, state *PipelineState, opts ...ResourceOption) (*Pipeline, error)public static Pipeline Get(string name, Input<string> id, PipelineState? state, CustomResourceOptions? opts = null)public static Pipeline get(String name, Output<String> id, PipelineState state, CustomResourceOptions options)resources:  _:    type: databricks:Pipeline    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AllowDuplicate boolNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- BudgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- Catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- Cause string
- Channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- ClusterId string
- Clusters
List<PipelineCluster> 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- Configuration Dictionary<string, string>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- Continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- CreatorUser stringName 
- Deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- Development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- Edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- ExpectedLast intModified 
- Filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- GatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- Health string
- IngestionDefinition PipelineIngestion Definition 
- LastModified int
- LatestUpdates List<PipelineLatest Update> 
- Libraries
List<PipelineLibrary> 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- Name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- Notifications
List<PipelineNotification> 
- Photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- RestartWindow PipelineRestart Window 
- RunAs PipelineRun As 
- RunAs stringUser Name 
- Schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- State string
- Storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- Target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- Trigger
PipelineTrigger 
- Url string
- URL of the DLT pipeline on the given workspace.
- AllowDuplicate boolNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- BudgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- Catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- Cause string
- Channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- ClusterId string
- Clusters
[]PipelineCluster Args 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- Configuration map[string]string
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- Continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- CreatorUser stringName 
- Deployment
PipelineDeployment Args 
- Deployment type of this pipeline. Supports following attributes:
- Development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- Edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- ExpectedLast intModified 
- Filters
PipelineFilters Args 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- GatewayDefinition PipelineGateway Definition Args 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- Health string
- IngestionDefinition PipelineIngestion Definition Args 
- LastModified int
- LatestUpdates []PipelineLatest Update Args 
- Libraries
[]PipelineLibrary Args 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- Name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- Notifications
[]PipelineNotification Args 
- Photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- RestartWindow PipelineRestart Window Args 
- RunAs PipelineRun As Args 
- RunAs stringUser Name 
- Schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- State string
- Storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- Target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- Trigger
PipelineTrigger Args 
- Url string
- URL of the DLT pipeline on the given workspace.
- allowDuplicate BooleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy StringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog String
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause String
- channel String
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId String
- clusters
List<PipelineCluster> 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Map<String,String>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous Boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser StringName 
- deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- development Boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition String
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast IntegerModified 
- filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health String
- ingestionDefinition PipelineIngestion Definition 
- lastModified Integer
- latestUpdates List<PipelineLatest Update> 
- libraries
List<PipelineLibrary> 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name String
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
List<PipelineNotification> 
- photon Boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow PipelineRestart Window 
- runAs PipelineRun As 
- runAs StringUser Name 
- schema String
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless Boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state String
- storage String
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target String
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger 
- url String
- URL of the DLT pipeline on the given workspace.
- allowDuplicate booleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy stringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog string
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause string
- channel string
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId string
- clusters
PipelineCluster[] 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration {[key: string]: string}
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser stringName 
- deployment
PipelineDeployment 
- Deployment type of this pipeline. Supports following attributes:
- development boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition string
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast numberModified 
- filters
PipelineFilters 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition PipelineGateway Definition 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health string
- ingestionDefinition PipelineIngestion Definition 
- lastModified number
- latestUpdates PipelineLatest Update[] 
- libraries
PipelineLibrary[] 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name string
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
PipelineNotification[] 
- photon boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow PipelineRestart Window 
- runAs PipelineRun As 
- runAs stringUser Name 
- schema string
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state string
- storage string
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target string
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger 
- url string
- URL of the DLT pipeline on the given workspace.
- allow_duplicate_ boolnames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budget_policy_ strid 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog str
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause str
- channel str
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- cluster_id str
- clusters
Sequence[PipelineCluster Args] 
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Mapping[str, str]
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous bool
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creator_user_ strname 
- deployment
PipelineDeployment Args 
- Deployment type of this pipeline. Supports following attributes:
- development bool
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition str
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expected_last_ intmodified 
- filters
PipelineFilters Args 
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gateway_definition PipelineGateway Definition Args 
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health str
- ingestion_definition PipelineIngestion Definition Args 
- last_modified int
- latest_updates Sequence[PipelineLatest Update Args] 
- libraries
Sequence[PipelineLibrary Args] 
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name str
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications
Sequence[PipelineNotification Args] 
- photon bool
- A flag indicating whether to use Photon engine. The default value is false.
- restart_window PipelineRestart Window Args 
- run_as PipelineRun As Args 
- run_as_ struser_ name 
- schema str
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless bool
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state str
- storage str
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target str
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger
PipelineTrigger Args 
- url str
- URL of the DLT pipeline on the given workspace.
- allowDuplicate BooleanNames 
- Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
- budgetPolicy StringId 
- optional string specifying ID of the budget policy for this DLT pipeline.
- catalog String
- The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
- cause String
- channel String
- optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT(default) andPREVIEW.
- clusterId String
- clusters List<Property Map>
- blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscaleblock is extended with themodeparameter that controls the autoscaling algorithm (possible values areENHANCEDfor new, enhanced autoscaling algorithm, orLEGACYfor old algorithm).
- configuration Map<String>
- An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
- continuous Boolean
- A flag indicating whether to run the pipeline continuously. The default value is false.
- creatorUser StringName 
- deployment Property Map
- Deployment type of this pipeline. Supports following attributes:
- development Boolean
- A flag indicating whether to run the pipeline in development mode. The default value is false.
- edition String
- optional name of the product edition. Supported values are: CORE,PRO,ADVANCED(default). Not required whenserverlessis set totrue.
- expectedLast NumberModified 
- filters Property Map
- Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
- gatewayDefinition Property Map
- The definition of a gateway pipeline to support CDC. Consists of following attributes:
- health String
- ingestionDefinition Property Map
- lastModified Number
- latestUpdates List<Property Map>
- libraries List<Property Map>
- blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook&filelibrary types that should have thepathattribute. Right now only thenotebook&filetypes are supported.
- name String
- A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
- notifications List<Property Map>
- photon Boolean
- A flag indicating whether to use Photon engine. The default value is false.
- restartWindow Property Map
- runAs Property Map
- runAs StringUser Name 
- schema String
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- serverless Boolean
- An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalogto be set, as it could be used only with Unity Catalog.
- state String
- storage String
- A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
- target String
- The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
- trigger Property Map
- url String
- URL of the DLT pipeline on the given workspace.
Supporting Types
PipelineCluster, PipelineClusterArgs    
- ApplyPolicy boolDefault Values 
- Autoscale
PipelineCluster Autoscale 
- AwsAttributes PipelineCluster Aws Attributes 
- AzureAttributes PipelineCluster Azure Attributes 
- ClusterLog PipelineConf Cluster Cluster Log Conf 
- Dictionary<string, string>
- DriverInstance stringPool Id 
- DriverNode stringType Id 
- EnableLocal boolDisk Encryption 
- GcpAttributes PipelineCluster Gcp Attributes 
- InitScripts List<PipelineCluster Init Script> 
- InstancePool stringId 
- Label string
- NodeType stringId 
- NumWorkers int
- PolicyId string
- SparkConf Dictionary<string, string>
- SparkEnv Dictionary<string, string>Vars 
- SshPublic List<string>Keys 
- ApplyPolicy boolDefault Values 
- Autoscale
PipelineCluster Autoscale 
- AwsAttributes PipelineCluster Aws Attributes 
- AzureAttributes PipelineCluster Azure Attributes 
- ClusterLog PipelineConf Cluster Cluster Log Conf 
- map[string]string
- DriverInstance stringPool Id 
- DriverNode stringType Id 
- EnableLocal boolDisk Encryption 
- GcpAttributes PipelineCluster Gcp Attributes 
- InitScripts []PipelineCluster Init Script 
- InstancePool stringId 
- Label string
- NodeType stringId 
- NumWorkers int
- PolicyId string
- SparkConf map[string]string
- SparkEnv map[string]stringVars 
- SshPublic []stringKeys 
- applyPolicy BooleanDefault Values 
- autoscale
PipelineCluster Autoscale 
- awsAttributes PipelineCluster Aws Attributes 
- azureAttributes PipelineCluster Azure Attributes 
- clusterLog PipelineConf Cluster Cluster Log Conf 
- Map<String,String>
- driverInstance StringPool Id 
- driverNode StringType Id 
- enableLocal BooleanDisk Encryption 
- gcpAttributes PipelineCluster Gcp Attributes 
- initScripts List<PipelineCluster Init Script> 
- instancePool StringId 
- label String
- nodeType StringId 
- numWorkers Integer
- policyId String
- sparkConf Map<String,String>
- sparkEnv Map<String,String>Vars 
- sshPublic List<String>Keys 
- applyPolicy booleanDefault Values 
- autoscale
PipelineCluster Autoscale 
- awsAttributes PipelineCluster Aws Attributes 
- azureAttributes PipelineCluster Azure Attributes 
- clusterLog PipelineConf Cluster Cluster Log Conf 
- {[key: string]: string}
- driverInstance stringPool Id 
- driverNode stringType Id 
- enableLocal booleanDisk Encryption 
- gcpAttributes PipelineCluster Gcp Attributes 
- initScripts PipelineCluster Init Script[] 
- instancePool stringId 
- label string
- nodeType stringId 
- numWorkers number
- policyId string
- sparkConf {[key: string]: string}
- sparkEnv {[key: string]: string}Vars 
- sshPublic string[]Keys 
- apply_policy_ booldefault_ values 
- autoscale
PipelineCluster Autoscale 
- aws_attributes PipelineCluster Aws Attributes 
- azure_attributes PipelineCluster Azure Attributes 
- cluster_log_ Pipelineconf Cluster Cluster Log Conf 
- Mapping[str, str]
- driver_instance_ strpool_ id 
- driver_node_ strtype_ id 
- enable_local_ booldisk_ encryption 
- gcp_attributes PipelineCluster Gcp Attributes 
- init_scripts Sequence[PipelineCluster Init Script] 
- instance_pool_ strid 
- label str
- node_type_ strid 
- num_workers int
- policy_id str
- spark_conf Mapping[str, str]
- spark_env_ Mapping[str, str]vars 
- ssh_public_ Sequence[str]keys 
- applyPolicy BooleanDefault Values 
- autoscale Property Map
- awsAttributes Property Map
- azureAttributes Property Map
- clusterLog Property MapConf 
- Map<String>
- driverInstance StringPool Id 
- driverNode StringType Id 
- enableLocal BooleanDisk Encryption 
- gcpAttributes Property Map
- initScripts List<Property Map>
- instancePool StringId 
- label String
- nodeType StringId 
- numWorkers Number
- policyId String
- sparkConf Map<String>
- sparkEnv Map<String>Vars 
- sshPublic List<String>Keys 
PipelineClusterAutoscale, PipelineClusterAutoscaleArgs      
- MaxWorkers int
- MinWorkers int
- Mode string
- MaxWorkers int
- MinWorkers int
- Mode string
- maxWorkers Integer
- minWorkers Integer
- mode String
- maxWorkers number
- minWorkers number
- mode string
- max_workers int
- min_workers int
- mode str
- maxWorkers Number
- minWorkers Number
- mode String
PipelineClusterAwsAttributes, PipelineClusterAwsAttributesArgs        
- Availability string
- EbsVolume intCount 
- EbsVolume intIops 
- EbsVolume intSize 
- EbsVolume intThroughput 
- EbsVolume stringType 
- FirstOn intDemand 
- InstanceProfile stringArn 
- SpotBid intPrice Percent 
- ZoneId string
- Availability string
- EbsVolume intCount 
- EbsVolume intIops 
- EbsVolume intSize 
- EbsVolume intThroughput 
- EbsVolume stringType 
- FirstOn intDemand 
- InstanceProfile stringArn 
- SpotBid intPrice Percent 
- ZoneId string
- availability String
- ebsVolume IntegerCount 
- ebsVolume IntegerIops 
- ebsVolume IntegerSize 
- ebsVolume IntegerThroughput 
- ebsVolume StringType 
- firstOn IntegerDemand 
- instanceProfile StringArn 
- spotBid IntegerPrice Percent 
- zoneId String
- availability string
- ebsVolume numberCount 
- ebsVolume numberIops 
- ebsVolume numberSize 
- ebsVolume numberThroughput 
- ebsVolume stringType 
- firstOn numberDemand 
- instanceProfile stringArn 
- spotBid numberPrice Percent 
- zoneId string
- availability str
- ebs_volume_ intcount 
- ebs_volume_ intiops 
- ebs_volume_ intsize 
- ebs_volume_ intthroughput 
- ebs_volume_ strtype 
- first_on_ intdemand 
- instance_profile_ strarn 
- spot_bid_ intprice_ percent 
- zone_id str
- availability String
- ebsVolume NumberCount 
- ebsVolume NumberIops 
- ebsVolume NumberSize 
- ebsVolume NumberThroughput 
- ebsVolume StringType 
- firstOn NumberDemand 
- instanceProfile StringArn 
- spotBid NumberPrice Percent 
- zoneId String
PipelineClusterAzureAttributes, PipelineClusterAzureAttributesArgs        
- availability String
- firstOn NumberDemand 
- logAnalytics Property MapInfo 
- spotBid NumberMax Price 
PipelineClusterAzureAttributesLogAnalyticsInfo, PipelineClusterAzureAttributesLogAnalyticsInfoArgs              
- LogAnalytics stringPrimary Key 
- LogAnalytics stringWorkspace Id 
- LogAnalytics stringPrimary Key 
- LogAnalytics stringWorkspace Id 
- logAnalytics StringPrimary Key 
- logAnalytics StringWorkspace Id 
- logAnalytics stringPrimary Key 
- logAnalytics stringWorkspace Id 
- logAnalytics StringPrimary Key 
- logAnalytics StringWorkspace Id 
PipelineClusterClusterLogConf, PipelineClusterClusterLogConfArgs          
PipelineClusterClusterLogConfDbfs, PipelineClusterClusterLogConfDbfsArgs            
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterClusterLogConfS3, PipelineClusterClusterLogConfS3Args            
- Destination string
- CannedAcl string
- EnableEncryption bool
- EncryptionType string
- Endpoint string
- KmsKey string
- Region string
- Destination string
- CannedAcl string
- EnableEncryption bool
- EncryptionType string
- Endpoint string
- KmsKey string
- Region string
- destination String
- cannedAcl String
- enableEncryption Boolean
- encryptionType String
- endpoint String
- kmsKey String
- region String
- destination string
- cannedAcl string
- enableEncryption boolean
- encryptionType string
- endpoint string
- kmsKey string
- region string
- destination str
- canned_acl str
- enable_encryption bool
- encryption_type str
- endpoint str
- kms_key str
- region str
- destination String
- cannedAcl String
- enableEncryption Boolean
- encryptionType String
- endpoint String
- kmsKey String
- region String
PipelineClusterClusterLogConfVolumes, PipelineClusterClusterLogConfVolumesArgs            
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterGcpAttributes, PipelineClusterGcpAttributesArgs        
- Availability string
- GoogleService stringAccount 
- LocalSsd intCount 
- ZoneId string
- Availability string
- GoogleService stringAccount 
- LocalSsd intCount 
- ZoneId string
- availability String
- googleService StringAccount 
- localSsd IntegerCount 
- zoneId String
- availability string
- googleService stringAccount 
- localSsd numberCount 
- zoneId string
- availability str
- google_service_ straccount 
- local_ssd_ intcount 
- zone_id str
- availability String
- googleService StringAccount 
- localSsd NumberCount 
- zoneId String
PipelineClusterInitScript, PipelineClusterInitScriptArgs        
PipelineClusterInitScriptAbfss, PipelineClusterInitScriptAbfssArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterInitScriptDbfs, PipelineClusterInitScriptDbfsArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterInitScriptFile, PipelineClusterInitScriptFileArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterInitScriptGcs, PipelineClusterInitScriptGcsArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterInitScriptS3, PipelineClusterInitScriptS3Args          
- Destination string
- CannedAcl string
- EnableEncryption bool
- EncryptionType string
- Endpoint string
- KmsKey string
- Region string
- Destination string
- CannedAcl string
- EnableEncryption bool
- EncryptionType string
- Endpoint string
- KmsKey string
- Region string
- destination String
- cannedAcl String
- enableEncryption Boolean
- encryptionType String
- endpoint String
- kmsKey String
- region String
- destination string
- cannedAcl string
- enableEncryption boolean
- encryptionType string
- endpoint string
- kmsKey string
- region string
- destination str
- canned_acl str
- enable_encryption bool
- encryption_type str
- endpoint str
- kms_key str
- region str
- destination String
- cannedAcl String
- enableEncryption Boolean
- encryptionType String
- endpoint String
- kmsKey String
- region String
PipelineClusterInitScriptVolumes, PipelineClusterInitScriptVolumesArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineClusterInitScriptWorkspace, PipelineClusterInitScriptWorkspaceArgs          
- Destination string
- Destination string
- destination String
- destination string
- destination str
- destination String
PipelineDeployment, PipelineDeploymentArgs    
- Kind string
- The deployment method that manages the pipeline.
- MetadataFile stringPath 
- The path to the file containing metadata about the deployment.
- Kind string
- The deployment method that manages the pipeline.
- MetadataFile stringPath 
- The path to the file containing metadata about the deployment.
- kind String
- The deployment method that manages the pipeline.
- metadataFile StringPath 
- The path to the file containing metadata about the deployment.
- kind string
- The deployment method that manages the pipeline.
- metadataFile stringPath 
- The path to the file containing metadata about the deployment.
- kind str
- The deployment method that manages the pipeline.
- metadata_file_ strpath 
- The path to the file containing metadata about the deployment.
- kind String
- The deployment method that manages the pipeline.
- metadataFile StringPath 
- The path to the file containing metadata about the deployment.
PipelineFilters, PipelineFiltersArgs    
PipelineGatewayDefinition, PipelineGatewayDefinitionArgs      
- ConnectionId string
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- ConnectionName string
- GatewayStorage stringCatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- GatewayStorage stringName 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- GatewayStorage stringSchema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
- ConnectionId string
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- ConnectionName string
- GatewayStorage stringCatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- GatewayStorage stringName 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- GatewayStorage stringSchema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
- connectionId String
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- connectionName String
- gatewayStorage StringCatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- gatewayStorage StringName 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- gatewayStorage StringSchema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
- connectionId string
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- connectionName string
- gatewayStorage stringCatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- gatewayStorage stringName 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- gatewayStorage stringSchema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
- connection_id str
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- connection_name str
- gateway_storage_ strcatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- gateway_storage_ strname 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- gateway_storage_ strschema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
- connectionId String
- Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
- connectionName String
- gatewayStorage StringCatalog 
- Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
- gatewayStorage StringName 
- Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
- gatewayStorage StringSchema 
- Required, Immutable. The name of the schema for the gateway pipelines's storage location.
PipelineIngestionDefinition, PipelineIngestionDefinitionArgs      
PipelineIngestionDefinitionObject, PipelineIngestionDefinitionObjectArgs        
- Report
PipelineIngestion Definition Object Report 
- Schema
PipelineIngestion Definition Object Schema 
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Table
PipelineIngestion Definition Object Table 
- Report
PipelineIngestion Definition Object Report 
- Schema
PipelineIngestion Definition Object Schema 
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- Table
PipelineIngestion Definition Object Table 
- report
PipelineIngestion Definition Object Report 
- schema
PipelineIngestion Definition Object Schema 
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- table
PipelineIngestion Definition Object Table 
- report
PipelineIngestion Definition Object Report 
- schema
PipelineIngestion Definition Object Schema 
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- table
PipelineIngestion Definition Object Table 
- report
PipelineIngestion Definition Object Report 
- schema
PipelineIngestion Definition Object Schema 
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- table
PipelineIngestion Definition Object Table 
- report Property Map
- schema Property Map
- The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
- table Property Map
PipelineIngestionDefinitionObjectReport, PipelineIngestionDefinitionObjectReportArgs          
- destinationCatalog String
- destinationSchema String
- destinationTable String
- sourceUrl String
- tableConfiguration Property Map
PipelineIngestionDefinitionObjectReportTableConfiguration, PipelineIngestionDefinitionObjectReportTableConfigurationArgs              
- PrimaryKeys List<string>
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies List<string>
- PrimaryKeys []string
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies []string
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
- primaryKeys string[]
- salesforceInclude booleanFormula Fields 
- scdType string
- sequenceBies string[]
- primary_keys Sequence[str]
- salesforce_include_ boolformula_ fields 
- scd_type str
- sequence_bies Sequence[str]
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
PipelineIngestionDefinitionObjectSchema, PipelineIngestionDefinitionObjectSchemaArgs          
- destinationCatalog String
- destinationSchema String
- sourceCatalog String
- sourceSchema String
- tableConfiguration Property Map
PipelineIngestionDefinitionObjectSchemaTableConfiguration, PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs              
- PrimaryKeys List<string>
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies List<string>
- PrimaryKeys []string
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies []string
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
- primaryKeys string[]
- salesforceInclude booleanFormula Fields 
- scdType string
- sequenceBies string[]
- primary_keys Sequence[str]
- salesforce_include_ boolformula_ fields 
- scd_type str
- sequence_bies Sequence[str]
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
PipelineIngestionDefinitionObjectTable, PipelineIngestionDefinitionObjectTableArgs          
- DestinationCatalog string
- DestinationSchema string
- DestinationTable string
- SourceCatalog string
- SourceSchema string
- SourceTable string
- TableConfiguration PipelineIngestion Definition Object Table Table Configuration 
- DestinationCatalog string
- DestinationSchema string
- DestinationTable string
- SourceCatalog string
- SourceSchema string
- SourceTable string
- TableConfiguration PipelineIngestion Definition Object Table Table Configuration 
- destinationCatalog String
- destinationSchema String
- destinationTable String
- sourceCatalog String
- sourceSchema String
- sourceTable String
- tableConfiguration PipelineIngestion Definition Object Table Table Configuration 
- destinationCatalog string
- destinationSchema string
- destinationTable string
- sourceCatalog string
- sourceSchema string
- sourceTable string
- tableConfiguration PipelineIngestion Definition Object Table Table Configuration 
- destinationCatalog String
- destinationSchema String
- destinationTable String
- sourceCatalog String
- sourceSchema String
- sourceTable String
- tableConfiguration Property Map
PipelineIngestionDefinitionObjectTableTableConfiguration, PipelineIngestionDefinitionObjectTableTableConfigurationArgs              
- PrimaryKeys List<string>
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies List<string>
- PrimaryKeys []string
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies []string
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
- primaryKeys string[]
- salesforceInclude booleanFormula Fields 
- scdType string
- sequenceBies string[]
- primary_keys Sequence[str]
- salesforce_include_ boolformula_ fields 
- scd_type str
- sequence_bies Sequence[str]
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
PipelineIngestionDefinitionTableConfiguration, PipelineIngestionDefinitionTableConfigurationArgs          
- PrimaryKeys List<string>
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies List<string>
- PrimaryKeys []string
- SalesforceInclude boolFormula Fields 
- ScdType string
- SequenceBies []string
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
- primaryKeys string[]
- salesforceInclude booleanFormula Fields 
- scdType string
- sequenceBies string[]
- primary_keys Sequence[str]
- salesforce_include_ boolformula_ fields 
- scd_type str
- sequence_bies Sequence[str]
- primaryKeys List<String>
- salesforceInclude BooleanFormula Fields 
- scdType String
- sequenceBies List<String>
PipelineLatestUpdate, PipelineLatestUpdateArgs      
- CreationTime string
- State string
- UpdateId string
- CreationTime string
- State string
- UpdateId string
- creationTime String
- state String
- updateId String
- creationTime string
- state string
- updateId string
- creation_time str
- state str
- update_id str
- creationTime String
- state String
- updateId String
PipelineLibrary, PipelineLibraryArgs    
- file Property Map
- jar String
- maven Property Map
- notebook Property Map
- whl String
PipelineLibraryFile, PipelineLibraryFileArgs      
- Path string
- Path string
- path String
- path string
- path str
- path String
PipelineLibraryMaven, PipelineLibraryMavenArgs      
- Coordinates string
- Exclusions List<string>
- Repo string
- Coordinates string
- Exclusions []string
- Repo string
- coordinates String
- exclusions List<String>
- repo String
- coordinates string
- exclusions string[]
- repo string
- coordinates str
- exclusions Sequence[str]
- repo str
- coordinates String
- exclusions List<String>
- repo String
PipelineLibraryNotebook, PipelineLibraryNotebookArgs      
- Path string
- Path string
- path String
- path string
- path str
- path String
PipelineNotification, PipelineNotificationArgs    
- Alerts List<string>
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- EmailRecipients List<string>
- non-empty list of emails to notify.
- Alerts []string
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- EmailRecipients []string
- non-empty list of emails to notify.
- alerts List<String>
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- emailRecipients List<String>
- non-empty list of emails to notify.
- alerts string[]
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- emailRecipients string[]
- non-empty list of emails to notify.
- alerts Sequence[str]
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- email_recipients Sequence[str]
- non-empty list of emails to notify.
- alerts List<String>
- non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list- on-update-success- a pipeline update completes successfully.
- on-update-failure- a pipeline update fails with a retryable error.
- on-update-fatal-failure- a pipeline update fails with a non-retryable (fatal) error.
- on-flow-failure- a single data flow fails.
 
- emailRecipients List<String>
- non-empty list of emails to notify.
PipelineRestartWindow, PipelineRestartWindowArgs      
- StartHour int
- DaysOf List<string>Weeks 
- TimeZone stringId 
- StartHour int
- DaysOf []stringWeeks 
- TimeZone stringId 
- startHour Integer
- daysOf List<String>Weeks 
- timeZone StringId 
- startHour number
- daysOf string[]Weeks 
- timeZone stringId 
- start_hour int
- days_of_ Sequence[str]weeks 
- time_zone_ strid 
- startHour Number
- daysOf List<String>Weeks 
- timeZone StringId 
PipelineRunAs, PipelineRunAsArgs      
- ServicePrincipal stringName 
- UserName string
- ServicePrincipal stringName 
- UserName string
- servicePrincipal StringName 
- userName String
- servicePrincipal stringName 
- userName string
- service_principal_ strname 
- user_name str
- servicePrincipal StringName 
- userName String
PipelineTrigger, PipelineTriggerArgs    
PipelineTriggerCron, PipelineTriggerCronArgs      
- QuartzCron stringSchedule 
- TimezoneId string
- QuartzCron stringSchedule 
- TimezoneId string
- quartzCron StringSchedule 
- timezoneId String
- quartzCron stringSchedule 
- timezoneId string
- quartz_cron_ strschedule 
- timezone_id str
- quartzCron StringSchedule 
- timezoneId String
Import
The resource job can be imported using the id of the pipeline
bash
$ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the databricksTerraform Provider.