databricks.Library
Installs a library on databricks_cluster. Each different type of library has a slightly different syntax. It’s possible to set only one type of library within one resource. Otherwise, the plan will fail with an error.
This resource can only be used with a workspace-level provider!
databricks.Libraryresource would always start the associated cluster if it’s not running, so make sure to have auto-termination configured. It’s not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.
Plugin Framework Migration
The library resource has been migrated from sdkv2 to plugin framework。 If you encounter any problem with this resource and suspect it is due to the migration, you can fallback to sdkv2 by setting the environment variable in the following way export USE_SDK_V2_RESOURCES=<span pulumi-lang-nodejs=""databricks.Library"" pulumi-lang-dotnet=""databricks.Library"" pulumi-lang-go=""Library"" pulumi-lang-python=""Library"" pulumi-lang-yaml=""databricks.Library"" pulumi-lang-java=""databricks.Library"">"databricks.Library"</span>.
Installing library on all clusters
You can install libraries on all clusters with the help of databricks.getClusters data resource:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
export = async () => {
const all = await databricks.getClusters({});
const cli: databricks.Library[] = [];
for (const range of all.ids.map((v, k) => ({key: k, value: v}))) {
cli.push(new databricks.Library(`cli-${range.key}`, {
clusterId: range.key,
pypi: {
"package": "databricks-cli",
},
}));
}
}
import pulumi
import pulumi_databricks as databricks
all = databricks.get_clusters()
cli = []
for range in [{"key": k, "value": v} for [k, v] in enumerate(all.ids)]:
cli.append(databricks.Library(f"cli-{range['key']}",
cluster_id=range["key"],
pypi={
"package": "databricks-cli",
}))
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
all, err := databricks.GetClusters(ctx, &databricks.GetClustersArgs{}, nil)
if err != nil {
return err
}
var cli []*databricks.Library
for key0, _ := range all.Ids {
__res, err := databricks.NewLibrary(ctx, fmt.Sprintf("cli-%v", key0), &databricks.LibraryArgs{
ClusterId: pulumi.Float64(key0),
Pypi: &databricks.LibraryPypiArgs{
Package: pulumi.String("databricks-cli"),
},
})
if err != nil {
return err
}
cli = append(cli, __res)
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(async() =>
{
var all = await Databricks.GetClusters.InvokeAsync();
var cli = new List<Databricks.Library>();
foreach (var range in )
{
cli.Add(new Databricks.Library($"cli-{range.Key}", new()
{
ClusterId = range.Key,
Pypi = new Databricks.Inputs.LibraryPypiArgs
{
Package = "databricks-cli",
},
}));
}
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetClustersArgs;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryPypiArgs;
import com.pulumi.codegen.internal.KeyedValue;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var all = DatabricksFunctions.getClusters(GetClustersArgs.builder()
.build());
final var cli = all.applyValue(getClustersResult -> {
final var resources = new ArrayList<Library>();
for (var range : KeyedValue.of(getClustersResult.ids())) {
var resource = new Library("cli-" + range.key(), LibraryArgs.builder()
.clusterId(range.key())
.pypi(LibraryPypiArgs.builder()
.package_("databricks-cli")
.build())
.build());
resources.add(resource);
}
return resources;
});
}
}
resources:
cli:
type: databricks:Library
properties:
clusterId: ${range.key}
pypi:
package: databricks-cli
options: {}
variables:
all:
fn::invoke:
function: databricks:getClusters
arguments: {}
Java/Scala Maven
Installing artifacts from Maven repository. You can also optionally specify a repo parameter for a custom Maven-style repository, that should be accessible without any authentication. Maven libraries are resolved in Databricks Control Plane, so repo should be accessible from it. It can even be properly configured maven s3 wagon, AWS CodeArtifact or Azure Artifacts.
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const deequ = new databricks.Library("deequ", {
clusterId: _this.id,
maven: {
coordinates: "com.amazon.deequ:deequ:1.0.4",
exclusions: ["org.apache.avro:avro"],
},
});
import pulumi
import pulumi_databricks as databricks
deequ = databricks.Library("deequ",
cluster_id=this["id"],
maven={
"coordinates": "com.amazon.deequ:deequ:1.0.4",
"exclusions": ["org.apache.avro:avro"],
})
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewLibrary(ctx, "deequ", &databricks.LibraryArgs{
ClusterId: pulumi.Any(this.Id),
Maven: &databricks.LibraryMavenArgs{
Coordinates: pulumi.String("com.amazon.deequ:deequ:1.0.4"),
Exclusions: pulumi.StringArray{
pulumi.String("org.apache.avro:avro"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var deequ = new Databricks.Library("deequ", new()
{
ClusterId = @this.Id,
Maven = new Databricks.Inputs.LibraryMavenArgs
{
Coordinates = "com.amazon.deequ:deequ:1.0.4",
Exclusions = new[]
{
"org.apache.avro:avro",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryMavenArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var deequ = new Library("deequ", LibraryArgs.builder()
.clusterId(this_.id())
.maven(LibraryMavenArgs.builder()
.coordinates("com.amazon.deequ:deequ:1.0.4")
.exclusions("org.apache.avro:avro")
.build())
.build());
}
}
resources:
deequ:
type: databricks:Library
properties:
clusterId: ${this.id}
maven:
coordinates: com.amazon.deequ:deequ:1.0.4
exclusions:
- org.apache.avro:avro
Python PyPI
Installing Python PyPI artifacts. You can optionally also specify the repo parameter for a custom PyPI mirror, which should be accessible without any authentication for the network that cluster runs in.
repohost should be accessible from the Internet by Databricks control plane. If connectivity to custom PyPI repositories is required, please modify cluster-node/etc/pip.confthrough databricks_global_init_script.
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const fbprophet = new databricks.Library("fbprophet", {
clusterId: _this.id,
pypi: {
"package": "fbprophet==0.6",
},
});
import pulumi
import pulumi_databricks as databricks
fbprophet = databricks.Library("fbprophet",
cluster_id=this["id"],
pypi={
"package": "fbprophet==0.6",
})
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewLibrary(ctx, "fbprophet", &databricks.LibraryArgs{
ClusterId: pulumi.Any(this.Id),
Pypi: &databricks.LibraryPypiArgs{
Package: pulumi.String("fbprophet==0.6"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var fbprophet = new Databricks.Library("fbprophet", new()
{
ClusterId = @this.Id,
Pypi = new Databricks.Inputs.LibraryPypiArgs
{
Package = "fbprophet==0.6",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryPypiArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var fbprophet = new Library("fbprophet", LibraryArgs.builder()
.clusterId(this_.id())
.pypi(LibraryPypiArgs.builder()
.package_("fbprophet==0.6")
.build())
.build());
}
}
resources:
fbprophet:
type: databricks:Library
properties:
clusterId: ${this.id}
pypi:
package: fbprophet==0.6
Python requirements files
Installing Python libraries listed in the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. Requires a cluster with DBR 15.0+.
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const libraries = new databricks.Library("libraries", {
clusterId: _this.id,
requirements: "/Workspace/path/to/requirements.txt",
});
import pulumi
import pulumi_databricks as databricks
libraries = databricks.Library("libraries",
cluster_id=this["id"],
requirements="/Workspace/path/to/requirements.txt")
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewLibrary(ctx, "libraries", &databricks.LibraryArgs{
ClusterId: pulumi.Any(this.Id),
Requirements: pulumi.String("/Workspace/path/to/requirements.txt"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var libraries = new Databricks.Library("libraries", new()
{
ClusterId = @this.Id,
Requirements = "/Workspace/path/to/requirements.txt",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var libraries = new Library("libraries", LibraryArgs.builder()
.clusterId(this_.id())
.requirements("/Workspace/path/to/requirements.txt")
.build());
}
}
resources:
libraries:
type: databricks:Library
properties:
clusterId: ${this.id}
requirements: /Workspace/path/to/requirements.txt
R CRan
Installing artifacts from CRan. You can also optionally specify a repo parameter for a custom cran mirror.
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const rkeops = new databricks.Library("rkeops", {
clusterId: _this.id,
cran: {
"package": "rkeops",
},
});
import pulumi
import pulumi_databricks as databricks
rkeops = databricks.Library("rkeops",
cluster_id=this["id"],
cran={
"package": "rkeops",
})
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewLibrary(ctx, "rkeops", &databricks.LibraryArgs{
ClusterId: pulumi.Any(this.Id),
Cran: &databricks.LibraryCranArgs{
Package: pulumi.String("rkeops"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var rkeops = new Databricks.Library("rkeops", new()
{
ClusterId = @this.Id,
Cran = new Databricks.Inputs.LibraryCranArgs
{
Package = "rkeops",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryCranArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var rkeops = new Library("rkeops", LibraryArgs.builder()
.clusterId(this_.id())
.cran(LibraryCranArgs.builder()
.package_("rkeops")
.build())
.build());
}
}
resources:
rkeops:
type: databricks:Library
properties:
clusterId: ${this.id}
cran:
package: rkeops
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide. * databricks.getClusters data to retrieve a list of databricks.Cluster ids. * databricks.Cluster to create Databricks Clusters. * databricks.ClusterPolicy to create a databricks.Cluster policy, which limits the ability to create clusters based on a set of rules. * databricks.GlobalInitScript to manage global init scripts, which are run on all databricks.Cluster and databricks_job. * databricks.Job to manage Databricks Jobs to run non-interactive code in a databricks_cluster. * databricks.Pipeline to deploy Lakeflow Declarative Pipelines. * databricks.Repo to manage Databricks Repos.
Create Library Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Library(name: string, args: LibraryArgs, opts?: CustomResourceOptions);@overload
def Library(resource_name: str,
args: LibraryArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Library(resource_name: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
cran: Optional[LibraryCranArgs] = None,
egg: Optional[str] = None,
jar: Optional[str] = None,
library_id: Optional[str] = None,
maven: Optional[LibraryMavenArgs] = None,
provider_config: Optional[LibraryProviderConfigArgs] = None,
pypi: Optional[LibraryPypiArgs] = None,
requirements: Optional[str] = None,
whl: Optional[str] = None)func NewLibrary(ctx *Context, name string, args LibraryArgs, opts ...ResourceOption) (*Library, error)public Library(string name, LibraryArgs args, CustomResourceOptions? opts = null)
public Library(String name, LibraryArgs args)
public Library(String name, LibraryArgs args, CustomResourceOptions options)
type: databricks:Library
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LibraryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LibraryArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LibraryArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LibraryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LibraryArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var libraryResource = new Databricks.Library("libraryResource", new()
{
ClusterId = "string",
Cran = new Databricks.Inputs.LibraryCranArgs
{
Package = "string",
Repo = "string",
},
Jar = "string",
LibraryId = "string",
Maven = new Databricks.Inputs.LibraryMavenArgs
{
Coordinates = "string",
Exclusions = new[]
{
"string",
},
Repo = "string",
},
ProviderConfig = new Databricks.Inputs.LibraryProviderConfigArgs
{
WorkspaceId = "string",
},
Pypi = new Databricks.Inputs.LibraryPypiArgs
{
Package = "string",
Repo = "string",
},
Requirements = "string",
Whl = "string",
});
example, err := databricks.NewLibrary(ctx, "libraryResource", &databricks.LibraryArgs{
ClusterId: pulumi.String("string"),
Cran: &databricks.LibraryCranArgs{
Package: pulumi.String("string"),
Repo: pulumi.String("string"),
},
Jar: pulumi.String("string"),
LibraryId: pulumi.String("string"),
Maven: &databricks.LibraryMavenArgs{
Coordinates: pulumi.String("string"),
Exclusions: pulumi.StringArray{
pulumi.String("string"),
},
Repo: pulumi.String("string"),
},
ProviderConfig: &databricks.LibraryProviderConfigArgs{
WorkspaceId: pulumi.String("string"),
},
Pypi: &databricks.LibraryPypiArgs{
Package: pulumi.String("string"),
Repo: pulumi.String("string"),
},
Requirements: pulumi.String("string"),
Whl: pulumi.String("string"),
})
var libraryResource = new Library("libraryResource", LibraryArgs.builder()
.clusterId("string")
.cran(LibraryCranArgs.builder()
.package_("string")
.repo("string")
.build())
.jar("string")
.libraryId("string")
.maven(LibraryMavenArgs.builder()
.coordinates("string")
.exclusions("string")
.repo("string")
.build())
.providerConfig(LibraryProviderConfigArgs.builder()
.workspaceId("string")
.build())
.pypi(LibraryPypiArgs.builder()
.package_("string")
.repo("string")
.build())
.requirements("string")
.whl("string")
.build());
library_resource = databricks.Library("libraryResource",
cluster_id="string",
cran={
"package": "string",
"repo": "string",
},
jar="string",
library_id="string",
maven={
"coordinates": "string",
"exclusions": ["string"],
"repo": "string",
},
provider_config={
"workspace_id": "string",
},
pypi={
"package": "string",
"repo": "string",
},
requirements="string",
whl="string")
const libraryResource = new databricks.Library("libraryResource", {
clusterId: "string",
cran: {
"package": "string",
repo: "string",
},
jar: "string",
libraryId: "string",
maven: {
coordinates: "string",
exclusions: ["string"],
repo: "string",
},
providerConfig: {
workspaceId: "string",
},
pypi: {
"package": "string",
repo: "string",
},
requirements: "string",
whl: "string",
});
type: databricks:Library
properties:
clusterId: string
cran:
package: string
repo: string
jar: string
libraryId: string
maven:
coordinates: string
exclusions:
- string
repo: string
providerConfig:
workspaceId: string
pypi:
package: string
repo: string
requirements: string
whl: string
Library Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Library resource accepts the following input properties:
- Cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- Cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- Egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - Jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - Library
Id string - Maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- Provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- Pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- Requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - Whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- Cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- Cran
Library
Cran Args - Configuration block for a CRAN library. The block consists of the following fields:
- Egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - Jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - Library
Id string - Maven
Library
Maven Args - Configuration block for a Maven library. The block consists of the following fields:
- Provider
Config LibraryProvider Config Args - Configuration block for management through the account provider. This block consists of the following fields:
- Pypi
Library
Pypi Args - Configuration block for a PyPI library. The block consists of the following fields:
- Requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - Whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id String ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- egg String
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar String
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id String - maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- requirements String
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl String
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id string - maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster_
id str ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran Args - Configuration block for a CRAN library. The block consists of the following fields:
- egg str
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar str
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library_
id str - maven
Library
Maven Args - Configuration block for a Maven library. The block consists of the following fields:
- provider_
config LibraryProvider Config Args - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi Args - Configuration block for a PyPI library. The block consists of the following fields:
- requirements str
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl str
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id String ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran Property Map
- Configuration block for a CRAN library. The block consists of the following fields:
- egg String
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar String
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id String - maven Property Map
- Configuration block for a Maven library. The block consists of the following fields:
- provider
Config Property Map - Configuration block for management through the account provider. This block consists of the following fields:
- pypi Property Map
- Configuration block for a PyPI library. The block consists of the following fields:
- requirements String
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl String
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
Outputs
All input properties are implicitly available as output properties. Additionally, the Library resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Library Resource
Get an existing Library resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LibraryState, opts?: CustomResourceOptions): Library@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
cran: Optional[LibraryCranArgs] = None,
egg: Optional[str] = None,
jar: Optional[str] = None,
library_id: Optional[str] = None,
maven: Optional[LibraryMavenArgs] = None,
provider_config: Optional[LibraryProviderConfigArgs] = None,
pypi: Optional[LibraryPypiArgs] = None,
requirements: Optional[str] = None,
whl: Optional[str] = None) -> Libraryfunc GetLibrary(ctx *Context, name string, id IDInput, state *LibraryState, opts ...ResourceOption) (*Library, error)public static Library Get(string name, Input<string> id, LibraryState? state, CustomResourceOptions? opts = null)public static Library get(String name, Output<String> id, LibraryState state, CustomResourceOptions options)resources: _: type: databricks:Library get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- Cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- Egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - Jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - Library
Id string - Maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- Provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- Pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- Requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - Whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- Cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- Cran
Library
Cran Args - Configuration block for a CRAN library. The block consists of the following fields:
- Egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - Jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - Library
Id string - Maven
Library
Maven Args - Configuration block for a Maven library. The block consists of the following fields:
- Provider
Config LibraryProvider Config Args - Configuration block for management through the account provider. This block consists of the following fields:
- Pypi
Library
Pypi Args - Configuration block for a PyPI library. The block consists of the following fields:
- Requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - Whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id String ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- egg String
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar String
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id String - maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- requirements String
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl String
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id string ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran - Configuration block for a CRAN library. The block consists of the following fields:
- egg string
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar string
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id string - maven
Library
Maven - Configuration block for a Maven library. The block consists of the following fields:
- provider
Config LibraryProvider Config - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi - Configuration block for a PyPI library. The block consists of the following fields:
- requirements string
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl string
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster_
id str ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran
Library
Cran Args - Configuration block for a CRAN library. The block consists of the following fields:
- egg str
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar str
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library_
id str - maven
Library
Maven Args - Configuration block for a Maven library. The block consists of the following fields:
- provider_
config LibraryProvider Config Args - Configuration block for management through the account provider. This block consists of the following fields:
- pypi
Library
Pypi Args - Configuration block for a PyPI library. The block consists of the following fields:
- requirements str
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl str
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
- cluster
Id String ID of the databricks.Cluster to install the library on.
You must specify exactly one of the following library types:
- cran Property Map
- Configuration block for a CRAN library. The block consists of the following fields:
- egg String
- Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use
whlorpypiinstead. - jar String
- Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.jar,/Volumes/path/to/library.jarors3://my-bucket/library.jar. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. - library
Id String - maven Property Map
- Configuration block for a Maven library. The block consists of the following fields:
- provider
Config Property Map - Configuration block for management through the account provider. This block consists of the following fields:
- pypi Property Map
- Configuration block for a PyPI library. The block consists of the following fields:
- requirements String
- Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example:
/Workspace/path/to/requirements.txtor/Volumes/path/to/requirements.txt. Requires a cluster with DBR 15.0+. - whl String
- Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example:
/Workspace/path/to/library.whl,/Volumes/path/to/library.whlors3://my-bucket/library.whl. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI.
Supporting Types
LibraryCran, LibraryCranArgs
LibraryMaven, LibraryMavenArgs
- Coordinates string
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - Exclusions List<string>
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - Repo string
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
- Coordinates string
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - Exclusions []string
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - Repo string
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
- coordinates String
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - exclusions List<String>
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - repo String
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
- coordinates string
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - exclusions string[]
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - repo string
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
- coordinates str
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - exclusions Sequence[str]
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - repo str
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
- coordinates String
- Gradle-style Maven coordinates. For example:
org.jsoup:jsoup:1.7.2. - exclusions List<String>
- List of dependencies to exclude. For example:
["slf4j:slf4j", "*:hadoop-client"]. See Maven dependency exclusions for more information. - repo String
- Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.
LibraryProviderConfig, LibraryProviderConfigArgs
- Workspace
Id string - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
- Workspace
Id string - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
- workspace
Id String - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
- workspace
Id string - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
- workspace_
id str - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
- workspace
Id String - Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with.
LibraryPypi, LibraryPypiArgs
Import
!> Importing this resource is not currently supported.
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricksTerraform Provider.
