databricks.Credential
Explore with Pulumi AI
This resource can only be used with a workspace-level provider.
A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential.
The type of credential to be created is determined by the purpose field, which should be either SERVICE or STORAGE.
The caller must be a metastore admin or have the metastore privilege CREATE_STORAGE_CREDENTIAL for storage credentials, or CREATE_SERVICE_CREDENTIAL for service credentials. The user who creates the credential can delegate ownership to another user or group to manage permissions on it
On AWS, the IAM role for a credential requires a trust policy. See documentation for more details. The data source databricks.getAwsUnityCatalogAssumeRolePolicy can be used to create the necessary AWS Unity Catalog assume role policy.
Example Usage
For AWS
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.Credential("external", {
    name: externalDataAccess.name,
    awsIamRole: {
        roleArn: externalDataAccess.arn,
    },
    purpose: "SERVICE",
    comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
    credential: external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["ACCESS"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.Credential("external",
    name=external_data_access["name"],
    aws_iam_role={
        "role_arn": external_data_access["arn"],
    },
    purpose="SERVICE",
    comment="Managed by TF")
external_creds = databricks.Grants("external_creds",
    credential=external.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["ACCESS"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		external, err := databricks.NewCredential(ctx, "external", &databricks.CredentialArgs{
			Name: pulumi.Any(externalDataAccess.Name),
			AwsIamRole: &databricks.CredentialAwsIamRoleArgs{
				RoleArn: pulumi.Any(externalDataAccess.Arn),
			},
			Purpose: pulumi.String("SERVICE"),
			Comment: pulumi.String("Managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			Credential: external.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("ACCESS"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var external = new Databricks.Credential("external", new()
    {
        Name = externalDataAccess.Name,
        AwsIamRole = new Databricks.Inputs.CredentialAwsIamRoleArgs
        {
            RoleArn = externalDataAccess.Arn,
        },
        Purpose = "SERVICE",
        Comment = "Managed by TF",
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        Credential = external.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "ACCESS",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var external = new Credential("external", CredentialArgs.builder()
            .name(externalDataAccess.name())
            .awsIamRole(CredentialAwsIamRoleArgs.builder()
                .roleArn(externalDataAccess.arn())
                .build())
            .purpose("SERVICE")
            .comment("Managed by TF")
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .credential(external.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("ACCESS")
                .build())
            .build());
    }
}
resources:
  external:
    type: databricks:Credential
    properties:
      name: ${externalDataAccess.name}
      awsIamRole:
        roleArn: ${externalDataAccess.arn}
      purpose: SERVICE
      comment: Managed by TF
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      credential: ${external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - ACCESS
For Azure
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalMi = new databricks.Credential("external_mi", {
    name: "mi_credential",
    azureManagedIdentity: {
        accessConnectorId: example.id,
    },
    purpose: "SERVICE",
    comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
    credential: externalMi.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["ACCESS"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external_mi = databricks.Credential("external_mi",
    name="mi_credential",
    azure_managed_identity={
        "access_connector_id": example["id"],
    },
    purpose="SERVICE",
    comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("external_creds",
    credential=external_mi.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["ACCESS"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		externalMi, err := databricks.NewCredential(ctx, "external_mi", &databricks.CredentialArgs{
			Name: pulumi.String("mi_credential"),
			AzureManagedIdentity: &databricks.CredentialAzureManagedIdentityArgs{
				AccessConnectorId: pulumi.Any(example.Id),
			},
			Purpose: pulumi.String("SERVICE"),
			Comment: pulumi.String("Managed identity credential managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			Credential: externalMi.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("ACCESS"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var externalMi = new Databricks.Credential("external_mi", new()
    {
        Name = "mi_credential",
        AzureManagedIdentity = new Databricks.Inputs.CredentialAzureManagedIdentityArgs
        {
            AccessConnectorId = example.Id,
        },
        Purpose = "SERVICE",
        Comment = "Managed identity credential managed by TF",
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        Credential = externalMi.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "ACCESS",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var externalMi = new Credential("externalMi", CredentialArgs.builder()
            .name("mi_credential")
            .azureManagedIdentity(CredentialAzureManagedIdentityArgs.builder()
                .accessConnectorId(example.id())
                .build())
            .purpose("SERVICE")
            .comment("Managed identity credential managed by TF")
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .credential(externalMi.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("ACCESS")
                .build())
            .build());
    }
}
resources:
  externalMi:
    type: databricks:Credential
    name: external_mi
    properties:
      name: mi_credential
      azureManagedIdentity:
        accessConnectorId: ${example.id}
      purpose: SERVICE
      comment: Managed identity credential managed by TF
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      credential: ${externalMi.id}
      grants:
        - principal: Data Engineers
          privileges:
            - ACCESS
For GCP
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalGcpSa = new databricks.Credential("external_gcp_sa", {
    name: "gcp_sa_credential",
    databricksGcpServiceAccount: {},
    purpose: "SERVICE",
    comment: "GCP SA credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
    credential: externalGcpSa.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["ACCESS"],
    }],
});
import pulumi
import pulumi_databricks as databricks
external_gcp_sa = databricks.Credential("external_gcp_sa",
    name="gcp_sa_credential",
    databricks_gcp_service_account={},
    purpose="SERVICE",
    comment="GCP SA credential managed by TF")
external_creds = databricks.Grants("external_creds",
    credential=external_gcp_sa.id,
    grants=[{
        "principal": "Data Engineers",
        "privileges": ["ACCESS"],
    }])
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		externalGcpSa, err := databricks.NewCredential(ctx, "external_gcp_sa", &databricks.CredentialArgs{
			Name:                        pulumi.String("gcp_sa_credential"),
			DatabricksGcpServiceAccount: &databricks.CredentialDatabricksGcpServiceAccountArgs{},
			Purpose:                     pulumi.String("SERVICE"),
			Comment:                     pulumi.String("GCP SA credential managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
			Credential: externalGcpSa.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("ACCESS"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var externalGcpSa = new Databricks.Credential("external_gcp_sa", new()
    {
        Name = "gcp_sa_credential",
        DatabricksGcpServiceAccount = null,
        Purpose = "SERVICE",
        Comment = "GCP SA credential managed by TF",
    });
    var externalCreds = new Databricks.Grants("external_creds", new()
    {
        Credential = externalGcpSa.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "ACCESS",
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var externalGcpSa = new Credential("externalGcpSa", CredentialArgs.builder()
            .name("gcp_sa_credential")
            .databricksGcpServiceAccount()
            .purpose("SERVICE")
            .comment("GCP SA credential managed by TF")
            .build());
        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
            .credential(externalGcpSa.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("ACCESS")
                .build())
            .build());
    }
}
resources:
  externalGcpSa:
    type: databricks:Credential
    name: external_gcp_sa
    properties:
      name: gcp_sa_credential
      databricksGcpServiceAccount: {}
      purpose: SERVICE
      comment: GCP SA credential managed by TF
  externalCreds:
    type: databricks:Grants
    name: external_creds
    properties:
      credential: ${externalGcpSa.id}
      grants:
        - principal: Data Engineers
          privileges:
            - ACCESS
Create Credential Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Credential(name: string, args: CredentialArgs, opts?: CustomResourceOptions);@overload
def Credential(resource_name: str,
               args: CredentialArgs,
               opts: Optional[ResourceOptions] = None)
@overload
def Credential(resource_name: str,
               opts: Optional[ResourceOptions] = None,
               purpose: Optional[str] = None,
               isolation_mode: Optional[str] = None,
               used_for_managed_storage: Optional[bool] = None,
               full_name: Optional[str] = None,
               created_at: Optional[int] = None,
               created_by: Optional[str] = None,
               databricks_gcp_service_account: Optional[CredentialDatabricksGcpServiceAccountArgs] = None,
               force_destroy: Optional[bool] = None,
               force_update: Optional[bool] = None,
               comment: Optional[str] = None,
               azure_service_principal: Optional[CredentialAzureServicePrincipalArgs] = None,
               name: Optional[str] = None,
               metastore_id: Optional[str] = None,
               owner: Optional[str] = None,
               azure_managed_identity: Optional[CredentialAzureManagedIdentityArgs] = None,
               read_only: Optional[bool] = None,
               skip_validation: Optional[bool] = None,
               updated_at: Optional[int] = None,
               updated_by: Optional[str] = None,
               aws_iam_role: Optional[CredentialAwsIamRoleArgs] = None)func NewCredential(ctx *Context, name string, args CredentialArgs, opts ...ResourceOption) (*Credential, error)public Credential(string name, CredentialArgs args, CustomResourceOptions? opts = null)
public Credential(String name, CredentialArgs args)
public Credential(String name, CredentialArgs args, CustomResourceOptions options)
type: databricks:Credential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var credentialResource = new Databricks.Credential("credentialResource", new()
{
    Purpose = "string",
    IsolationMode = "string",
    UsedForManagedStorage = false,
    FullName = "string",
    CreatedAt = 0,
    CreatedBy = "string",
    DatabricksGcpServiceAccount = new Databricks.Inputs.CredentialDatabricksGcpServiceAccountArgs
    {
        CredentialId = "string",
        Email = "string",
        PrivateKeyId = "string",
    },
    ForceDestroy = false,
    ForceUpdate = false,
    Comment = "string",
    AzureServicePrincipal = new Databricks.Inputs.CredentialAzureServicePrincipalArgs
    {
        ApplicationId = "string",
        ClientSecret = "string",
        DirectoryId = "string",
    },
    Name = "string",
    MetastoreId = "string",
    Owner = "string",
    AzureManagedIdentity = new Databricks.Inputs.CredentialAzureManagedIdentityArgs
    {
        AccessConnectorId = "string",
        CredentialId = "string",
        ManagedIdentityId = "string",
    },
    ReadOnly = false,
    SkipValidation = false,
    UpdatedAt = 0,
    UpdatedBy = "string",
    AwsIamRole = new Databricks.Inputs.CredentialAwsIamRoleArgs
    {
        ExternalId = "string",
        RoleArn = "string",
        UnityCatalogIamArn = "string",
    },
});
example, err := databricks.NewCredential(ctx, "credentialResource", &databricks.CredentialArgs{
	Purpose:               pulumi.String("string"),
	IsolationMode:         pulumi.String("string"),
	UsedForManagedStorage: pulumi.Bool(false),
	FullName:              pulumi.String("string"),
	CreatedAt:             pulumi.Int(0),
	CreatedBy:             pulumi.String("string"),
	DatabricksGcpServiceAccount: &databricks.CredentialDatabricksGcpServiceAccountArgs{
		CredentialId: pulumi.String("string"),
		Email:        pulumi.String("string"),
		PrivateKeyId: pulumi.String("string"),
	},
	ForceDestroy: pulumi.Bool(false),
	ForceUpdate:  pulumi.Bool(false),
	Comment:      pulumi.String("string"),
	AzureServicePrincipal: &databricks.CredentialAzureServicePrincipalArgs{
		ApplicationId: pulumi.String("string"),
		ClientSecret:  pulumi.String("string"),
		DirectoryId:   pulumi.String("string"),
	},
	Name:        pulumi.String("string"),
	MetastoreId: pulumi.String("string"),
	Owner:       pulumi.String("string"),
	AzureManagedIdentity: &databricks.CredentialAzureManagedIdentityArgs{
		AccessConnectorId: pulumi.String("string"),
		CredentialId:      pulumi.String("string"),
		ManagedIdentityId: pulumi.String("string"),
	},
	ReadOnly:       pulumi.Bool(false),
	SkipValidation: pulumi.Bool(false),
	UpdatedAt:      pulumi.Int(0),
	UpdatedBy:      pulumi.String("string"),
	AwsIamRole: &databricks.CredentialAwsIamRoleArgs{
		ExternalId:         pulumi.String("string"),
		RoleArn:            pulumi.String("string"),
		UnityCatalogIamArn: pulumi.String("string"),
	},
})
var credentialResource = new Credential("credentialResource", CredentialArgs.builder()
    .purpose("string")
    .isolationMode("string")
    .usedForManagedStorage(false)
    .fullName("string")
    .createdAt(0)
    .createdBy("string")
    .databricksGcpServiceAccount(CredentialDatabricksGcpServiceAccountArgs.builder()
        .credentialId("string")
        .email("string")
        .privateKeyId("string")
        .build())
    .forceDestroy(false)
    .forceUpdate(false)
    .comment("string")
    .azureServicePrincipal(CredentialAzureServicePrincipalArgs.builder()
        .applicationId("string")
        .clientSecret("string")
        .directoryId("string")
        .build())
    .name("string")
    .metastoreId("string")
    .owner("string")
    .azureManagedIdentity(CredentialAzureManagedIdentityArgs.builder()
        .accessConnectorId("string")
        .credentialId("string")
        .managedIdentityId("string")
        .build())
    .readOnly(false)
    .skipValidation(false)
    .updatedAt(0)
    .updatedBy("string")
    .awsIamRole(CredentialAwsIamRoleArgs.builder()
        .externalId("string")
        .roleArn("string")
        .unityCatalogIamArn("string")
        .build())
    .build());
credential_resource = databricks.Credential("credentialResource",
    purpose="string",
    isolation_mode="string",
    used_for_managed_storage=False,
    full_name="string",
    created_at=0,
    created_by="string",
    databricks_gcp_service_account={
        "credential_id": "string",
        "email": "string",
        "private_key_id": "string",
    },
    force_destroy=False,
    force_update=False,
    comment="string",
    azure_service_principal={
        "application_id": "string",
        "client_secret": "string",
        "directory_id": "string",
    },
    name="string",
    metastore_id="string",
    owner="string",
    azure_managed_identity={
        "access_connector_id": "string",
        "credential_id": "string",
        "managed_identity_id": "string",
    },
    read_only=False,
    skip_validation=False,
    updated_at=0,
    updated_by="string",
    aws_iam_role={
        "external_id": "string",
        "role_arn": "string",
        "unity_catalog_iam_arn": "string",
    })
const credentialResource = new databricks.Credential("credentialResource", {
    purpose: "string",
    isolationMode: "string",
    usedForManagedStorage: false,
    fullName: "string",
    createdAt: 0,
    createdBy: "string",
    databricksGcpServiceAccount: {
        credentialId: "string",
        email: "string",
        privateKeyId: "string",
    },
    forceDestroy: false,
    forceUpdate: false,
    comment: "string",
    azureServicePrincipal: {
        applicationId: "string",
        clientSecret: "string",
        directoryId: "string",
    },
    name: "string",
    metastoreId: "string",
    owner: "string",
    azureManagedIdentity: {
        accessConnectorId: "string",
        credentialId: "string",
        managedIdentityId: "string",
    },
    readOnly: false,
    skipValidation: false,
    updatedAt: 0,
    updatedBy: "string",
    awsIamRole: {
        externalId: "string",
        roleArn: "string",
        unityCatalogIamArn: "string",
    },
});
type: databricks:Credential
properties:
    awsIamRole:
        externalId: string
        roleArn: string
        unityCatalogIamArn: string
    azureManagedIdentity:
        accessConnectorId: string
        credentialId: string
        managedIdentityId: string
    azureServicePrincipal:
        applicationId: string
        clientSecret: string
        directoryId: string
    comment: string
    createdAt: 0
    createdBy: string
    databricksGcpServiceAccount:
        credentialId: string
        email: string
        privateKeyId: string
    forceDestroy: false
    forceUpdate: false
    fullName: string
    isolationMode: string
    metastoreId: string
    name: string
    owner: string
    purpose: string
    readOnly: false
    skipValidation: false
    updatedAt: 0
    updatedBy: string
    usedForManagedStorage: false
Credential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Credential resource accepts the following input properties:
- Purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- AwsIam CredentialRole Aws Iam Role 
- AzureManaged CredentialIdentity Azure Managed Identity 
- AzureService CredentialPrincipal Azure Service Principal 
- Comment string
- CreatedAt int
- CreatedBy string
- DatabricksGcp CredentialService Account Databricks Gcp Service Account 
- ForceDestroy bool
- Delete credential regardless of its dependencies.
- ForceUpdate bool
- Update credential regardless of its dependents.
- FullName string
- IsolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- ReadOnly bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- SkipValidation bool
- Suppress validation errors if any & force save the credential.
- UpdatedAt int
- UpdatedBy string
- UsedFor boolManaged Storage 
- Purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- AwsIam CredentialRole Aws Iam Role Args 
- AzureManaged CredentialIdentity Azure Managed Identity Args 
- AzureService CredentialPrincipal Azure Service Principal Args 
- Comment string
- CreatedAt int
- CreatedBy string
- DatabricksGcp CredentialService Account Databricks Gcp Service Account Args 
- ForceDestroy bool
- Delete credential regardless of its dependencies.
- ForceUpdate bool
- Update credential regardless of its dependents.
- FullName string
- IsolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- ReadOnly bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- SkipValidation bool
- Suppress validation errors if any & force save the credential.
- UpdatedAt int
- UpdatedBy string
- UsedFor boolManaged Storage 
- purpose String
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- awsIam CredentialRole Aws Iam Role 
- azureManaged CredentialIdentity Azure Managed Identity 
- azureService CredentialPrincipal Azure Service Principal 
- comment String
- createdAt Integer
- createdBy String
- databricksGcp CredentialService Account Databricks Gcp Service Account 
- forceDestroy Boolean
- Delete credential regardless of its dependencies.
- forceUpdate Boolean
- Update credential regardless of its dependents.
- fullName String
- isolationMode String
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- readOnly Boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation Boolean
- Suppress validation errors if any & force save the credential.
- updatedAt Integer
- updatedBy String
- usedFor BooleanManaged Storage 
- purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- awsIam CredentialRole Aws Iam Role 
- azureManaged CredentialIdentity Azure Managed Identity 
- azureService CredentialPrincipal Azure Service Principal 
- comment string
- createdAt number
- createdBy string
- databricksGcp CredentialService Account Databricks Gcp Service Account 
- forceDestroy boolean
- Delete credential regardless of its dependencies.
- forceUpdate boolean
- Update credential regardless of its dependents.
- fullName string
- isolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId string
- name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the credential owner.
- readOnly boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation boolean
- Suppress validation errors if any & force save the credential.
- updatedAt number
- updatedBy string
- usedFor booleanManaged Storage 
- purpose str
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- aws_iam_ Credentialrole Aws Iam Role Args 
- azure_managed_ Credentialidentity Azure Managed Identity Args 
- azure_service_ Credentialprincipal Azure Service Principal Args 
- comment str
- created_at int
- created_by str
- databricks_gcp_ Credentialservice_ account Databricks Gcp Service Account Args 
- force_destroy bool
- Delete credential regardless of its dependencies.
- force_update bool
- Update credential regardless of its dependents.
- full_name str
- isolation_mode str
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastore_id str
- name str
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the credential owner.
- read_only bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skip_validation bool
- Suppress validation errors if any & force save the credential.
- updated_at int
- updated_by str
- used_for_ boolmanaged_ storage 
- purpose String
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- awsIam Property MapRole 
- azureManaged Property MapIdentity 
- azureService Property MapPrincipal 
- comment String
- createdAt Number
- createdBy String
- databricksGcp Property MapService Account 
- forceDestroy Boolean
- Delete credential regardless of its dependencies.
- forceUpdate Boolean
- Update credential regardless of its dependents.
- fullName String
- isolationMode String
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- readOnly Boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation Boolean
- Suppress validation errors if any & force save the credential.
- updatedAt Number
- updatedBy String
- usedFor BooleanManaged Storage 
Outputs
All input properties are implicitly available as output properties. Additionally, the Credential resource produces the following output properties:
- CredentialId string
- Unique ID of the credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- CredentialId string
- Unique ID of the credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- credentialId String
- Unique ID of the credential.
- id String
- The provider-assigned unique ID for this managed resource.
- credentialId string
- Unique ID of the credential.
- id string
- The provider-assigned unique ID for this managed resource.
- credential_id str
- Unique ID of the credential.
- id str
- The provider-assigned unique ID for this managed resource.
- credentialId String
- Unique ID of the credential.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Credential Resource
Get an existing Credential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: CredentialState, opts?: CustomResourceOptions): Credential@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        aws_iam_role: Optional[CredentialAwsIamRoleArgs] = None,
        azure_managed_identity: Optional[CredentialAzureManagedIdentityArgs] = None,
        azure_service_principal: Optional[CredentialAzureServicePrincipalArgs] = None,
        comment: Optional[str] = None,
        created_at: Optional[int] = None,
        created_by: Optional[str] = None,
        credential_id: Optional[str] = None,
        databricks_gcp_service_account: Optional[CredentialDatabricksGcpServiceAccountArgs] = None,
        force_destroy: Optional[bool] = None,
        force_update: Optional[bool] = None,
        full_name: Optional[str] = None,
        isolation_mode: Optional[str] = None,
        metastore_id: Optional[str] = None,
        name: Optional[str] = None,
        owner: Optional[str] = None,
        purpose: Optional[str] = None,
        read_only: Optional[bool] = None,
        skip_validation: Optional[bool] = None,
        updated_at: Optional[int] = None,
        updated_by: Optional[str] = None,
        used_for_managed_storage: Optional[bool] = None) -> Credentialfunc GetCredential(ctx *Context, name string, id IDInput, state *CredentialState, opts ...ResourceOption) (*Credential, error)public static Credential Get(string name, Input<string> id, CredentialState? state, CustomResourceOptions? opts = null)public static Credential get(String name, Output<String> id, CredentialState state, CustomResourceOptions options)resources:  _:    type: databricks:Credential    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AwsIam CredentialRole Aws Iam Role 
- AzureManaged CredentialIdentity Azure Managed Identity 
- AzureService CredentialPrincipal Azure Service Principal 
- Comment string
- CreatedAt int
- CreatedBy string
- CredentialId string
- Unique ID of the credential.
- DatabricksGcp CredentialService Account Databricks Gcp Service Account 
- ForceDestroy bool
- Delete credential regardless of its dependencies.
- ForceUpdate bool
- Update credential regardless of its dependents.
- FullName string
- IsolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- ReadOnly bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- SkipValidation bool
- Suppress validation errors if any & force save the credential.
- UpdatedAt int
- UpdatedBy string
- UsedFor boolManaged Storage 
- AwsIam CredentialRole Aws Iam Role Args 
- AzureManaged CredentialIdentity Azure Managed Identity Args 
- AzureService CredentialPrincipal Azure Service Principal Args 
- Comment string
- CreatedAt int
- CreatedBy string
- CredentialId string
- Unique ID of the credential.
- DatabricksGcp CredentialService Account Databricks Gcp Service Account Args 
- ForceDestroy bool
- Delete credential regardless of its dependencies.
- ForceUpdate bool
- Update credential regardless of its dependents.
- FullName string
- IsolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- MetastoreId string
- Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- ReadOnly bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- SkipValidation bool
- Suppress validation errors if any & force save the credential.
- UpdatedAt int
- UpdatedBy string
- UsedFor boolManaged Storage 
- awsIam CredentialRole Aws Iam Role 
- azureManaged CredentialIdentity Azure Managed Identity 
- azureService CredentialPrincipal Azure Service Principal 
- comment String
- createdAt Integer
- createdBy String
- credentialId String
- Unique ID of the credential.
- databricksGcp CredentialService Account Databricks Gcp Service Account 
- forceDestroy Boolean
- Delete credential regardless of its dependencies.
- forceUpdate Boolean
- Update credential regardless of its dependents.
- fullName String
- isolationMode String
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- purpose String
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- readOnly Boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation Boolean
- Suppress validation errors if any & force save the credential.
- updatedAt Integer
- updatedBy String
- usedFor BooleanManaged Storage 
- awsIam CredentialRole Aws Iam Role 
- azureManaged CredentialIdentity Azure Managed Identity 
- azureService CredentialPrincipal Azure Service Principal 
- comment string
- createdAt number
- createdBy string
- credentialId string
- Unique ID of the credential.
- databricksGcp CredentialService Account Databricks Gcp Service Account 
- forceDestroy boolean
- Delete credential regardless of its dependencies.
- forceUpdate boolean
- Update credential regardless of its dependents.
- fullName string
- isolationMode string
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId string
- name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the credential owner.
- purpose string
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- readOnly boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation boolean
- Suppress validation errors if any & force save the credential.
- updatedAt number
- updatedBy string
- usedFor booleanManaged Storage 
- aws_iam_ Credentialrole Aws Iam Role Args 
- azure_managed_ Credentialidentity Azure Managed Identity Args 
- azure_service_ Credentialprincipal Azure Service Principal Args 
- comment str
- created_at int
- created_by str
- credential_id str
- Unique ID of the credential.
- databricks_gcp_ Credentialservice_ account Databricks Gcp Service Account Args 
- force_destroy bool
- Delete credential regardless of its dependencies.
- force_update bool
- Update credential regardless of its dependents.
- full_name str
- isolation_mode str
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastore_id str
- name str
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the credential owner.
- purpose str
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- read_only bool
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skip_validation bool
- Suppress validation errors if any & force save the credential.
- updated_at int
- updated_by str
- used_for_ boolmanaged_ storage 
- awsIam Property MapRole 
- azureManaged Property MapIdentity 
- azureService Property MapPrincipal 
- comment String
- createdAt Number
- createdBy String
- credentialId String
- Unique ID of the credential.
- databricksGcp Property MapService Account 
- forceDestroy Boolean
- Delete credential regardless of its dependencies.
- forceUpdate Boolean
- Update credential regardless of its dependents.
- fullName String
- isolationMode String
- Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be - ISOLATION_MODE_ISOLATEDor- ISOLATION_MODE_OPEN. Setting the credential to- ISOLATION_MODE_ISOLATEDwill automatically restrict access to only from the current workspace.- aws_iam_roleoptional configuration block for credential details for AWS:
- metastoreId String
- name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- purpose String
- Indicates the purpose of the credential. Can be SERVICEorSTORAGE.
- readOnly Boolean
- Indicates whether the credential is only usable for read operations. Only applicable when purpose is STORAGE.
- skipValidation Boolean
- Suppress validation errors if any & force save the credential.
- updatedAt Number
- updatedBy String
- usedFor BooleanManaged Storage 
Supporting Types
CredentialAwsIamRole, CredentialAwsIamRoleArgs        
- ExternalId string
- RoleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- UnityCatalog stringIam Arn 
- ExternalId string
- RoleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- UnityCatalog stringIam Arn 
- externalId String
- roleArn String
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- unityCatalog StringIam Arn 
- externalId string
- roleArn string
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- unityCatalog stringIam Arn 
- external_id str
- role_arn str
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- unity_catalog_ striam_ arn 
- externalId String
- roleArn String
- The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form - arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF- azure_managed_identityoptional configuration block for using managed identity as credential details for Azure (recommended over- azure_service_principal):
- unityCatalog StringIam Arn 
CredentialAzureManagedIdentity, CredentialAzureManagedIdentityArgs        
- AccessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- CredentialId string
- Unique ID of the credential.
- ManagedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
- AccessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- CredentialId string
- Unique ID of the credential.
- ManagedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
- accessConnector StringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId String
- Unique ID of the credential.
- managedIdentity StringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
- accessConnector stringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId string
- Unique ID of the credential.
- managedIdentity stringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
- access_connector_ strid 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credential_id str
- Unique ID of the credential.
- managed_identity_ strid 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
- accessConnector StringId 
- The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
- credentialId String
- Unique ID of the credential.
- managedIdentity StringId 
- The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form - /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.- azure_service_principaloptional configuration block to use service principal as credential details for Azure. Only applicable when purpose is- STORAGE(Legacy):
CredentialAzureServicePrincipal, CredentialAzureServicePrincipalArgs        
- ApplicationId string
- The application ID of the application registration within the referenced AAD tenant
- ClientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- DirectoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- ApplicationId string
- The application ID of the application registration within the referenced AAD tenant
- ClientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- DirectoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId String
- The application ID of the application registration within the referenced AAD tenant
- clientSecret String
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- directoryId String
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId string
- The application ID of the application registration within the referenced AAD tenant
- clientSecret string
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- directoryId string
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_id str
- The application ID of the application registration within the referenced AAD tenant
- client_secret str
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- directory_id str
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- applicationId String
- The application ID of the application registration within the referenced AAD tenant
- clientSecret String
- The client secret generated for the above app ID in AAD. This field is redacted on output - databricks_gcp_service_accountoptional configuration block for creating a Databricks-managed GCP Service Account:
- directoryId String
- The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
CredentialDatabricksGcpServiceAccount, CredentialDatabricksGcpServiceAccountArgs          
- CredentialId string
- Unique ID of the credential.
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- PrivateKey stringId 
- CredentialId string
- Unique ID of the credential.
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- PrivateKey stringId 
- credentialId String
- Unique ID of the credential.
- email String
- The email of the GCP service account created, to be granted access to relevant buckets.
- privateKey StringId 
- credentialId string
- Unique ID of the credential.
- email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- privateKey stringId 
- credential_id str
- Unique ID of the credential.
- email str
- The email of the GCP service account created, to be granted access to relevant buckets.
- private_key_ strid 
- credentialId String
- Unique ID of the credential.
- email String
- The email of the GCP service account created, to be granted access to relevant buckets.
- privateKey StringId 
Import
This resource can be imported by name:
bash
$ pulumi import databricks:index/credential:Credential this <name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the databricksTerraform Provider.