databricks.User
Explore with Pulumi AI
This resource allows you to manage users in Databricks Workspace, Databricks Account Console or Azure Databricks Account Console. You can also associate Databricks users to databricks_group. Upon user creation the user will receive a welcome email. You can also get information about caller identity using databricks.getCurrentUser data source.
To assign account level users to workspace use databricks_mws_permission_assignment.
Entitlements, like,
allow_cluster_create,allow_instance_pool_create,databricks_sql_access,workspace_accessapplicable only for workspace-level users. Use databricks.Entitlements resource to assign entitlements inside a workspace to account-level users.
To create users in the Databricks account, the provider must be configured with host = "https://accounts.cloud.databricks.com" on AWS deployments or host = "https://accounts.azuredatabricks.net" and authenticate using AAD tokens on Azure deployments.
The default behavior when deleting a databricks.User resource depends on whether the provider is configured at the workspace-level or account-level. When the provider is configured at the workspace-level, the user will be deleted from the workspace. When the provider is configured at the account-level, the user will be deactivated but not deleted. When the provider is configured at the account level, to delete the user from the account when the resource is deleted, set disable_as_user_deletion = false. Conversely, when the provider is configured at the account-level, to deactivate the user when the resource is deleted, set disable_as_user_deletion = true.
Example Usage
Creating regular user:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const me = new databricks.User("me", {userName: "me@example.com"});
import pulumi
import pulumi_databricks as databricks
me = databricks.User("me", user_name="me@example.com")
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName: pulumi.String("me@example.com"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .build());
    }
}
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
Creating user with administrative permissions - referencing special admins databricks.Group in databricks.GroupMember resource:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const admins = databricks.getGroup({
    displayName: "admins",
});
const me = new databricks.User("me", {userName: "me@example.com"});
const i_am_admin = new databricks.GroupMember("i-am-admin", {
    groupId: admins.then(admins => admins.id),
    memberId: me.id,
});
import pulumi
import pulumi_databricks as databricks
admins = databricks.get_group(display_name="admins")
me = databricks.User("me", user_name="me@example.com")
i_am_admin = databricks.GroupMember("i-am-admin",
    group_id=admins.id,
    member_id=me.id)
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		admins, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
			DisplayName: "admins",
		}, nil)
		if err != nil {
			return err
		}
		me, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName: pulumi.String("me@example.com"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGroupMember(ctx, "i-am-admin", &databricks.GroupMemberArgs{
			GroupId:  pulumi.String(admins.Id),
			MemberId: me.ID(),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var admins = Databricks.GetGroup.Invoke(new()
    {
        DisplayName = "admins",
    });
    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
    });
    var i_am_admin = new Databricks.GroupMember("i-am-admin", new()
    {
        GroupId = admins.Apply(getGroupResult => getGroupResult.Id),
        MemberId = me.Id,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetGroupArgs;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import com.pulumi.databricks.GroupMember;
import com.pulumi.databricks.GroupMemberArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var admins = DatabricksFunctions.getGroup(GetGroupArgs.builder()
            .displayName("admins")
            .build());
        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .build());
        var i_am_admin = new GroupMember("i-am-admin", GroupMemberArgs.builder()
            .groupId(admins.applyValue(getGroupResult -> getGroupResult.id()))
            .memberId(me.id())
            .build());
    }
}
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
  i-am-admin:
    type: databricks:GroupMember
    properties:
      groupId: ${admins.id}
      memberId: ${me.id}
variables:
  admins:
    fn::invoke:
      function: databricks:getGroup
      arguments:
        displayName: admins
Creating user with cluster create permissions:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const me = new databricks.User("me", {
    userName: "me@example.com",
    displayName: "Example user",
    allowClusterCreate: true,
});
import pulumi
import pulumi_databricks as databricks
me = databricks.User("me",
    user_name="me@example.com",
    display_name="Example user",
    allow_cluster_create=True)
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName:           pulumi.String("me@example.com"),
			DisplayName:        pulumi.String("Example user"),
			AllowClusterCreate: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
        AllowClusterCreate = true,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .allowClusterCreate(true)
            .build());
    }
}
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
      displayName: Example user
      allowClusterCreate: true
Creating user in AWS Databricks account:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const accountUser = new databricks.User("account_user", {
    userName: "me@example.com",
    displayName: "Example user",
});
import pulumi
import pulumi_databricks as databricks
account_user = databricks.User("account_user",
    user_name="me@example.com",
    display_name="Example user")
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "account_user", &databricks.UserArgs{
			UserName:    pulumi.String("me@example.com"),
			DisplayName: pulumi.String("Example user"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var accountUser = new Databricks.User("account_user", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var accountUser = new User("accountUser", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .build());
    }
}
resources:
  accountUser:
    type: databricks:User
    name: account_user
    properties:
      userName: me@example.com
      displayName: Example user
Creating user in Azure Databricks account:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const accountUser = new databricks.User("account_user", {
    userName: "me@example.com",
    displayName: "Example user",
});
import pulumi
import pulumi_databricks as databricks
account_user = databricks.User("account_user",
    user_name="me@example.com",
    display_name="Example user")
package main
import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "account_user", &databricks.UserArgs{
			UserName:    pulumi.String("me@example.com"),
			DisplayName: pulumi.String("Example user"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() => 
{
    var accountUser = new Databricks.User("account_user", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var accountUser = new User("accountUser", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .build());
    }
}
resources:
  accountUser:
    type: databricks:User
    name: account_user
    properties:
      userName: me@example.com
      displayName: Example user
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide.
- databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
- databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
- databricks.GroupInstanceProfile to attach databricks.InstanceProfile (AWS) to databricks_group.
- databricks.GroupMember to attach users and groups as group members.
- databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
- databricks.User data to retrieve information about databricks_user.
Create User Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new User(name: string, args: UserArgs, opts?: CustomResourceOptions);@overload
def User(resource_name: str,
         args: UserArgs,
         opts: Optional[ResourceOptions] = None)
@overload
def User(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         user_name: Optional[str] = None,
         display_name: Optional[str] = None,
         force: Optional[bool] = None,
         allow_instance_pool_create: Optional[bool] = None,
         databricks_sql_access: Optional[bool] = None,
         disable_as_user_deletion: Optional[bool] = None,
         acl_principal_id: Optional[str] = None,
         external_id: Optional[str] = None,
         allow_cluster_create: Optional[bool] = None,
         force_delete_home_dir: Optional[bool] = None,
         force_delete_repos: Optional[bool] = None,
         home: Optional[str] = None,
         repos: Optional[str] = None,
         active: Optional[bool] = None,
         workspace_access: Optional[bool] = None)func NewUser(ctx *Context, name string, args UserArgs, opts ...ResourceOption) (*User, error)public User(string name, UserArgs args, CustomResourceOptions? opts = null)type: databricks:User
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var userResource = new Databricks.User("userResource", new()
{
    UserName = "string",
    DisplayName = "string",
    Force = false,
    AllowInstancePoolCreate = false,
    DatabricksSqlAccess = false,
    DisableAsUserDeletion = false,
    AclPrincipalId = "string",
    ExternalId = "string",
    AllowClusterCreate = false,
    ForceDeleteHomeDir = false,
    ForceDeleteRepos = false,
    Home = "string",
    Repos = "string",
    Active = false,
    WorkspaceAccess = false,
});
example, err := databricks.NewUser(ctx, "userResource", &databricks.UserArgs{
	UserName:                pulumi.String("string"),
	DisplayName:             pulumi.String("string"),
	Force:                   pulumi.Bool(false),
	AllowInstancePoolCreate: pulumi.Bool(false),
	DatabricksSqlAccess:     pulumi.Bool(false),
	DisableAsUserDeletion:   pulumi.Bool(false),
	AclPrincipalId:          pulumi.String("string"),
	ExternalId:              pulumi.String("string"),
	AllowClusterCreate:      pulumi.Bool(false),
	ForceDeleteHomeDir:      pulumi.Bool(false),
	ForceDeleteRepos:        pulumi.Bool(false),
	Home:                    pulumi.String("string"),
	Repos:                   pulumi.String("string"),
	Active:                  pulumi.Bool(false),
	WorkspaceAccess:         pulumi.Bool(false),
})
var userResource = new User("userResource", UserArgs.builder()
    .userName("string")
    .displayName("string")
    .force(false)
    .allowInstancePoolCreate(false)
    .databricksSqlAccess(false)
    .disableAsUserDeletion(false)
    .aclPrincipalId("string")
    .externalId("string")
    .allowClusterCreate(false)
    .forceDeleteHomeDir(false)
    .forceDeleteRepos(false)
    .home("string")
    .repos("string")
    .active(false)
    .workspaceAccess(false)
    .build());
user_resource = databricks.User("userResource",
    user_name="string",
    display_name="string",
    force=False,
    allow_instance_pool_create=False,
    databricks_sql_access=False,
    disable_as_user_deletion=False,
    acl_principal_id="string",
    external_id="string",
    allow_cluster_create=False,
    force_delete_home_dir=False,
    force_delete_repos=False,
    home="string",
    repos="string",
    active=False,
    workspace_access=False)
const userResource = new databricks.User("userResource", {
    userName: "string",
    displayName: "string",
    force: false,
    allowInstancePoolCreate: false,
    databricksSqlAccess: false,
    disableAsUserDeletion: false,
    aclPrincipalId: "string",
    externalId: "string",
    allowClusterCreate: false,
    forceDeleteHomeDir: false,
    forceDeleteRepos: false,
    home: "string",
    repos: "string",
    active: false,
    workspaceAccess: false,
});
type: databricks:User
properties:
    aclPrincipalId: string
    active: false
    allowClusterCreate: false
    allowInstancePoolCreate: false
    databricksSqlAccess: false
    disableAsUserDeletion: false
    displayName: string
    externalId: string
    force: false
    forceDeleteHomeDir: false
    forceDeleteRepos: false
    home: string
    repos: string
    userName: string
    workspaceAccess: false
User Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The User resource accepts the following input properties:
- UserName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- AclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- Active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- AllowCluster boolCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- AllowInstance boolPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- DatabricksSql boolAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- DisableAs boolUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- DisplayName string
- This is an alias for the username that can be the full name of the user.
- ExternalId string
- ID of the user in an external identity provider.
- Force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- ForceDelete boolHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- ForceDelete boolRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- Repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- WorkspaceAccess bool
- UserName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- AclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- Active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- AllowCluster boolCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- AllowInstance boolPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- DatabricksSql boolAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- DisableAs boolUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- DisplayName string
- This is an alias for the username that can be the full name of the user.
- ExternalId string
- ID of the user in an external identity provider.
- Force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- ForceDelete boolHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- ForceDelete boolRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- Repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- WorkspaceAccess bool
- userName String
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- aclPrincipal StringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active Boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster BooleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance BooleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql BooleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs BooleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName String
- This is an alias for the username that can be the full name of the user.
- externalId String
- ID of the user in an external identity provider.
- force Boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete BooleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete BooleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos String
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- workspaceAccess Boolean
- userName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- aclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster booleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance booleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql booleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs booleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName string
- This is an alias for the username that can be the full name of the user.
- externalId string
- ID of the user in an external identity provider.
- force boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete booleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete booleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- workspaceAccess boolean
- user_name str
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- acl_principal_ strid 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow_cluster_ boolcreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allow_instance_ boolpool_ create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks_sql_ boolaccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disable_as_ booluser_ deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- display_name str
- This is an alias for the username that can be the full name of the user.
- external_id str
- ID of the user in an external identity provider.
- force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- force_delete_ boolhome_ dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force_delete_ boolrepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home str
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos str
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- workspace_access bool
- userName String
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- aclPrincipal StringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active Boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster BooleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance BooleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql BooleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs BooleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName String
- This is an alias for the username that can be the full name of the user.
- externalId String
- ID of the user in an external identity provider.
- force Boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete BooleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete BooleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos String
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- workspaceAccess Boolean
Outputs
All input properties are implicitly available as output properties. Additionally, the User resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing User Resource
Get an existing User resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: UserState, opts?: CustomResourceOptions): User@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        acl_principal_id: Optional[str] = None,
        active: Optional[bool] = None,
        allow_cluster_create: Optional[bool] = None,
        allow_instance_pool_create: Optional[bool] = None,
        databricks_sql_access: Optional[bool] = None,
        disable_as_user_deletion: Optional[bool] = None,
        display_name: Optional[str] = None,
        external_id: Optional[str] = None,
        force: Optional[bool] = None,
        force_delete_home_dir: Optional[bool] = None,
        force_delete_repos: Optional[bool] = None,
        home: Optional[str] = None,
        repos: Optional[str] = None,
        user_name: Optional[str] = None,
        workspace_access: Optional[bool] = None) -> Userfunc GetUser(ctx *Context, name string, id IDInput, state *UserState, opts ...ResourceOption) (*User, error)public static User Get(string name, Input<string> id, UserState? state, CustomResourceOptions? opts = null)public static User get(String name, Output<String> id, UserState state, CustomResourceOptions options)resources:  _:    type: databricks:User    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- Active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- AllowCluster boolCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- AllowInstance boolPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- DatabricksSql boolAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- DisableAs boolUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- DisplayName string
- This is an alias for the username that can be the full name of the user.
- ExternalId string
- ID of the user in an external identity provider.
- Force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- ForceDelete boolHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- ForceDelete boolRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- Repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- UserName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- WorkspaceAccess bool
- AclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- Active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- AllowCluster boolCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- AllowInstance boolPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- DatabricksSql boolAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- DisableAs boolUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- DisplayName string
- This is an alias for the username that can be the full name of the user.
- ExternalId string
- ID of the user in an external identity provider.
- Force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- ForceDelete boolHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- ForceDelete boolRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- Repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- UserName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- WorkspaceAccess bool
- aclPrincipal StringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active Boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster BooleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance BooleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql BooleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs BooleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName String
- This is an alias for the username that can be the full name of the user.
- externalId String
- ID of the user in an external identity provider.
- force Boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete BooleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete BooleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos String
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- userName String
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- workspaceAccess Boolean
- aclPrincipal stringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster booleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance booleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql booleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs booleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName string
- This is an alias for the username that can be the full name of the user.
- externalId string
- ID of the user in an external identity provider.
- force boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete booleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete booleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home string
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos string
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- userName string
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- workspaceAccess boolean
- acl_principal_ strid 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active bool
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow_cluster_ boolcreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allow_instance_ boolpool_ create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks_sql_ boolaccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disable_as_ booluser_ deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- display_name str
- This is an alias for the username that can be the full name of the user.
- external_id str
- ID of the user in an external identity provider.
- force bool
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- force_delete_ boolhome_ dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force_delete_ boolrepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home str
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos str
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- user_name str
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- workspace_access bool
- aclPrincipal StringId 
- identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
- active Boolean
- Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allowCluster BooleanCreate 
- Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_idargument. Everyone withoutallow_cluster_createargument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
- allowInstance BooleanPool Create 
- Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricksSql BooleanAccess 
- This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- disableAs BooleanUser Deletion 
- Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to truewhen the provider is configured at the account-level andfalsewhen configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
- displayName String
- This is an alias for the username that can be the full name of the user.
- externalId String
- ID of the user in an external identity provider.
- force Boolean
- Ignore cannot create user: User with username X already existserrors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
- forceDelete BooleanHome Dir 
- This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- forceDelete BooleanRepos 
- This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
- Home folder of the user, e.g. /Users/mr.foo@example.com.
- repos String
- Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
- userName String
- This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
- workspaceAccess Boolean
Import
The resource scim user can be imported using id:
bash
$ pulumi import databricks:index/user:User me <user-id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the databricksTerraform Provider.