Databricks API client interface.
Project description
[This documentation is auto-generated]
This package provides a simplified interface for the Databricks REST API. The interface is autogenerated on instantiation using the underlying client library used in the official databricks-cli python package.
The docs here describe the interface for version 0.8.2 of the databricks-cli package for API version 2.0. Assuming there are no major changes to the databricks-cli package structure, this package should continue to work without a required update.
The databricks-api package contains a DatabricksAPI class which provides instance attributes for the databricks-cli ApiClient, as well as each of the available service instances. The attributes of a DatabricksAPI instance are:
DatabricksAPI.client <databricks_cli.sdk.api_client.ApiClient>
DatabricksAPI.jobs <databricks_cli.sdk.service.JobsService>
DatabricksAPI.cluster <databricks_cli.sdk.service.ClusterService>
DatabricksAPI.managed_library <databricks_cli.sdk.service.ManagedLibraryService>
DatabricksAPI.dbfs <databricks_cli.sdk.service.DbfsService>
DatabricksAPI.workspace <databricks_cli.sdk.service.WorkspaceService>
DatabricksAPI.secret <databricks_cli.sdk.service.SecretService>
DatabricksAPI.groups <databricks_cli.sdk.service.GroupsService>
To instantiate the client, provide the databricks host and either a token or user and password. Also shown is the full signature of the underlying ApiClient.__init__
from databricks_api import DatabricksAPI
# Provide a host and token
db = DatabricksAPI(
host="example.cloud.databricks.com",
token="dpapi123..."
)
# OR a host and user and password
db = DatabricksAPI(
host="example.cloud.databricks.com",
user="me@example.com",
password="password"
)
# Full __init__ signature
db = DatabricksAPI(
user=None,
password=None,
host=None,
token=None,
apiVersion=2.0,
default_headers={},
verify=True,
command_name=''
)
Refer to the official documentation on the functionality and required arguments of each method below.
Each of the service instance attributes provides the following public methods:
DatabricksAPI.jobs
DatabricksAPI.jobs.cancel_run(run_id)
DatabricksAPI.jobs.create_job(
name=None,
existing_cluster_id=None,
new_cluster=None,
libraries=None,
email_notifications=None,
timeout_seconds=None,
max_retries=None,
min_retry_interval_millis=None,
retry_on_timeout=None,
schedule=None,
notebook_task=None,
spark_jar_task=None,
spark_python_task=None,
spark_submit_task=None,
max_concurrent_runs=None
)
DatabricksAPI.jobs.delete_job(job_id)
DatabricksAPI.jobs.delete_run(run_id=None)
DatabricksAPI.jobs.export_run(
run_id,
views_to_export=None
)
DatabricksAPI.jobs.get_job(job_id)
DatabricksAPI.jobs.get_run(run_id=None)
DatabricksAPI.jobs.get_run_output(run_id)
DatabricksAPI.jobs.list_jobs()
DatabricksAPI.jobs.list_runs(
job_id=None,
active_only=None,
completed_only=None,
offset=None,
limit=None
)
DatabricksAPI.jobs.reset_job(
job_id,
new_settings
)
DatabricksAPI.jobs.run_now(
job_id=None,
jar_params=None,
notebook_params=None,
python_params=None,
spark_submit_params=None
)
DatabricksAPI.jobs.submit_run(
run_name=None,
existing_cluster_id=None,
new_cluster=None,
libraries=None,
notebook_task=None,
spark_jar_task=None,
spark_python_task=None,
spark_submit_task=None,
timeout_seconds=None
)
DatabricksAPI.cluster
DatabricksAPI.cluster.create_cluster(
num_workers=None,
autoscale=None,
cluster_name=None,
spark_version=None,
spark_conf=None,
aws_attributes=None,
node_type_id=None,
driver_node_type_id=None,
ssh_public_keys=None,
custom_tags=None,
cluster_log_conf=None,
spark_env_vars=None,
autotermination_minutes=None,
enable_elastic_disk=None,
cluster_source=None
)
DatabricksAPI.cluster.delete_cluster(cluster_id)
DatabricksAPI.cluster.edit_cluster(
cluster_id,
num_workers=None,
autoscale=None,
cluster_name=None,
spark_version=None,
spark_conf=None,
aws_attributes=None,
node_type_id=None,
driver_node_type_id=None,
ssh_public_keys=None,
custom_tags=None,
cluster_log_conf=None,
spark_env_vars=None,
autotermination_minutes=None,
enable_elastic_disk=None,
cluster_source=None
)
DatabricksAPI.cluster.get_cluster(cluster_id)
DatabricksAPI.cluster.list_available_zones()
DatabricksAPI.cluster.list_clusters()
DatabricksAPI.cluster.list_node_types()
DatabricksAPI.cluster.list_spark_versions()
DatabricksAPI.cluster.resize_cluster(
cluster_id,
num_workers=None,
autoscale=None
)
DatabricksAPI.cluster.restart_cluster(cluster_id)
DatabricksAPI.cluster.start_cluster(cluster_id)
DatabricksAPI.managed_library
DatabricksAPI.managed_library.all_cluster_statuses()
DatabricksAPI.managed_library.cluster_status(cluster_id)
DatabricksAPI.managed_library.install_libraries(
cluster_id,
libraries=None
)
DatabricksAPI.managed_library.uninstall_libraries(
cluster_id,
libraries=None
)
DatabricksAPI.dbfs
DatabricksAPI.dbfs.add_block(
handle,
data
)
DatabricksAPI.dbfs.close(handle)
DatabricksAPI.dbfs.create(
path,
overwrite=None
)
DatabricksAPI.dbfs.delete(
path,
recursive=None
)
DatabricksAPI.dbfs.get_status(path)
DatabricksAPI.dbfs.list(path)
DatabricksAPI.dbfs.mkdirs(path)
DatabricksAPI.dbfs.move(
source_path,
destination_path
)
DatabricksAPI.dbfs.put(
path,
contents=None,
overwrite=None
)
DatabricksAPI.dbfs.read(
path,
offset=None,
length=None
)
DatabricksAPI.workspace
DatabricksAPI.workspace.delete(
path,
recursive=None
)
DatabricksAPI.workspace.export_workspace(
path,
format=None,
direct_download=None
)
DatabricksAPI.workspace.get_status(path)
DatabricksAPI.workspace.import_workspace(
path,
format=None,
language=None,
content=None,
overwrite=None
)
DatabricksAPI.workspace.list(path)
DatabricksAPI.workspace.mkdirs(path)
DatabricksAPI.secret
DatabricksAPI.secret.create_scope(
scope,
initial_manage_principal=None
)
DatabricksAPI.secret.delete_acl(
scope,
principal
)
DatabricksAPI.secret.delete_scope(scope)
DatabricksAPI.secret.delete_secret(
scope,
key
)
DatabricksAPI.secret.get_acl(
scope,
principal
)
DatabricksAPI.secret.list_acls(scope)
DatabricksAPI.secret.list_scopes()
DatabricksAPI.secret.list_secrets(scope)
DatabricksAPI.secret.put_acl(
scope,
principal,
permission
)
DatabricksAPI.secret.put_secret(
scope,
key,
string_value=None,
bytes_value=None
)
DatabricksAPI.groups
DatabricksAPI.groups.add_to_group(
parent_name,
user_name=None,
group_name=None
)
DatabricksAPI.groups.create_group(group_name)
DatabricksAPI.groups.get_group_members(group_name)
DatabricksAPI.groups.get_groups()
DatabricksAPI.groups.get_groups_for_principal(
user_name=None,
group_name=None
)
DatabricksAPI.groups.remove_from_group(
parent_name,
user_name=None,
group_name=None
)
DatabricksAPI.groups.remove_group(group_name)
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
Built Distribution
Hashes for databricks_api-0.1.0-py2.py3-none-any.whl
Algorithm | Hash digest | |
---|---|---|
SHA256 | 276ab4a26b8aa60d9a86c5fb305431c133acac4cc8e4188fbaecbc9e46636a68 |
|
MD5 | db7ce28762263b40eca51e1d9eb42a55 |
|
BLAKE2b-256 | 3052f2f6abd00d59059d0f0733a2a1b5088b1ce3bc4a2eafd8ff55d032c245de |