从官方databricks cli包自动生成的databricks api客户端

databricks-api的Python项目详细描述


pypipyversions

[此文档是自动生成的]

这个包为databricks rest api提供了一个简化的接口。 接口在使用底层客户机实例化时自动生成 在正式的databricks-clipython包中使用的库。

使用安装

pip install databricks-api

这里的文档描述了版本0.9.0的接口 api版本2.0的databricks-cli。 假设databricks-cli包没有新的主版本或次版本 结构,此包应在不需要更新的情况下继续工作。

databricks-api包包含一个DatabricksAPI类,该类提供 databricks-cliApiClient的实例属性,以及 可用的服务实例。DatabricksAPI实例的属性是:

  • databricks api.client<;databricks_cli.sdk.api_client.apiclient>;
  • databricksapi.jobs<;databricks_cli.sdk.service.jobsservice>;
  • databricksapi.cluster<;databricks_cli.sdk.service.clusterservice>;
  • databricksapi.managed_library<;databricks_cli.sdk.service.managedlibraryservice>;
  • databricksapi.dbfs<;databricks_cli.sdk.service.dbfsservice>;
  • databricksapi.workspace<;databricks cli.sdk.service.workspaceservice>;
  • databricksapi.secret<;数据库cli.sdk.service.secretservice>;
  • databricksapi.groups<;databricks_cli.sdk.service.groupsservice>;
  • databricksapi.instance_pool<;databricks_cli.sdk.service.instancepoolservice>;

要实例化客户机,请提供databricks主机和令牌或 用户和密码。还显示了 底层ApiClient.__init__

fromdatabricks_apiimportDatabricksAPI# Provide a host and tokendb=DatabricksAPI(host="example.cloud.databricks.com",token="dpapi123...")# OR a host and user and passworddb=DatabricksAPI(host="example.cloud.databricks.com",user="me@example.com",password="password")# Full __init__ signaturedb=DatabricksAPI(user=None,password=None,host=None,token=None,apiVersion=2.0,default_headers={},verify=True,command_name='')

参考official documentation 关于下面每个方法的功能和所需参数。

每个服务实例属性都提供以下公共方法:

databricksapi.jobs

DatabricksAPI.jobs.cancel_run(run_id,headers=None,)DatabricksAPI.jobs.create_job(name=None,existing_cluster_id=None,new_cluster=None,libraries=None,email_notifications=None,timeout_seconds=None,max_retries=None,min_retry_interval_millis=None,retry_on_timeout=None,schedule=None,notebook_task=None,spark_jar_task=None,spark_python_task=None,spark_submit_task=None,max_concurrent_runs=None,headers=None,)DatabricksAPI.jobs.delete_job(job_id,headers=None,)DatabricksAPI.jobs.delete_run(run_id=None,headers=None,)DatabricksAPI.jobs.export_run(run_id,views_to_export=None,headers=None,)DatabricksAPI.jobs.get_job(job_id,headers=None,)DatabricksAPI.jobs.get_run(run_id=None,headers=None,)DatabricksAPI.jobs.get_run_output(run_id,headers=None,)DatabricksAPI.jobs.list_jobs(headers=None)DatabricksAPI.jobs.list_runs(job_id=None,active_only=None,completed_only=None,offset=None,limit=None,headers=None,)DatabricksAPI.jobs.reset_job(job_id,new_settings,headers=None,)DatabricksAPI.jobs.run_now(job_id=None,jar_params=None,notebook_params=None,python_params=None,spark_submit_params=None,headers=None,)DatabricksAPI.jobs.submit_run(run_name=None,existing_cluster_id=None,new_cluster=None,libraries=None,notebook_task=None,spark_jar_task=None,spark_python_task=None,spark_submit_task=None,timeout_seconds=None,headers=None,)

databricksapi.cluster

DatabricksAPI.cluster.create_cluster(num_workers=None,autoscale=None,cluster_name=None,spark_version=None,spark_conf=None,aws_attributes=None,node_type_id=None,driver_node_type_id=None,ssh_public_keys=None,custom_tags=None,cluster_log_conf=None,spark_env_vars=None,autotermination_minutes=None,enable_elastic_disk=None,cluster_source=None,instance_pool_id=None,headers=None,)DatabricksAPI.cluster.delete_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.edit_cluster(cluster_id,num_workers=None,autoscale=None,cluster_name=None,spark_version=None,spark_conf=None,aws_attributes=None,node_type_id=None,driver_node_type_id=None,ssh_public_keys=None,custom_tags=None,cluster_log_conf=None,spark_env_vars=None,autotermination_minutes=None,enable_elastic_disk=None,cluster_source=None,instance_pool_id=None,headers=None,)DatabricksAPI.cluster.get_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.get_events(cluster_id,start_time=None,end_time=None,order=None,event_types=None,offset=None,limit=None,headers=None,)DatabricksAPI.cluster.list_available_zones(headers=None)DatabricksAPI.cluster.list_clusters(headers=None)DatabricksAPI.cluster.list_node_types(headers=None)DatabricksAPI.cluster.list_spark_versions(headers=None)DatabricksAPI.cluster.permanent_delete_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.pin_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.resize_cluster(cluster_id,num_workers=None,autoscale=None,headers=None,)DatabricksAPI.cluster.restart_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.start_cluster(cluster_id,headers=None,)DatabricksAPI.cluster.unpin_cluster(cluster_id,headers=None,)

数据库管理库
DatabricksAPI.managed_library.all_cluster_statuses(headers=None)DatabricksAPI.managed_library.cluster_status(cluster_id,headers=None,)DatabricksAPI.managed_library.install_libraries(cluster_id,libraries=None,headers=None,)DatabricksAPI.managed_library.uninstall_libraries(cluster_id,libraries=None,headers=None,)

databricksapi.dbfs

DatabricksAPI.dbfs.add_block(handle,data,headers=None,)DatabricksAPI.dbfs.close(handle,headers=None,)DatabricksAPI.dbfs.create(path,overwrite=None,headers=None,)DatabricksAPI.dbfs.delete(path,recursive=None,headers=None,)DatabricksAPI.dbfs.get_status(path,headers=None,)DatabricksAPI.dbfs.list(path,headers=None,)DatabricksAPI.dbfs.mkdirs(path,headers=None,)DatabricksAPI.dbfs.move(source_path,destination_path,headers=None,)DatabricksAPI.dbfs.put(path,contents=None,overwrite=None,headers=None,)DatabricksAPI.dbfs.read(path,offset=None,length=None,headers=None,)

databricksapi.workspace

DatabricksAPI.workspace.delete(path,recursive=None,headers=None,)DatabricksAPI.workspace.export_workspace(path,format=None,direct_download=None,headers=None,)DatabricksAPI.workspace.get_status(path,headers=None,)DatabricksAPI.workspace.import_workspace(path,format=None,language=None,content=None,overwrite=None,headers=None,)DatabricksAPI.workspace.list(path,headers=None,)DatabricksAPI.workspace.mkdirs(path,headers=None,)

databricksapi.secret

DatabricksAPI.secret.create_scope(scope,initial_manage_principal=None,scope_backend_type=None,headers=None,)DatabricksAPI.secret.delete_acl(scope,principal,headers=None,)DatabricksAPI.secret.delete_scope(scope,headers=None,)DatabricksAPI.secret.delete_secret(scope,key,headers=None,)DatabricksAPI.secret.get_acl(scope,principal,headers=None,)DatabricksAPI.secret.list_acls(scope,headers=None,)DatabricksAPI.secret.list_scopes(headers=None)DatabricksAPI.secret.list_secrets(scope,headers=None,)DatabricksAPI.secret.put_acl(scope,principal,permission,headers=None,)DatabricksAPI.secret.put_secret(scope,key,string_value=None,bytes_value=None,headers=None,)

数据库组

DatabricksAPI.groups.add_to_group(parent_name,user_name=None,group_name=None,headers=None,)DatabricksAPI.groups.create_group(group_name,headers=None,)DatabricksAPI.groups.get_group_members(group_name,headers=None,)DatabricksAPI.groups.get_groups(headers=None)DatabricksAPI.groups.get_groups_for_principal(user_name=None,group_name=None,headers=None,)DatabricksAPI.groups.remove_from_group(parent_name,user_name=None,group_name=None,headers=None,)DatabricksAPI.groups.remove_group(group_name,headers=None,)

databricksapi.instance_池

DatabricksAPI.instance_pool.create_instance_pool(instance_pool_name=None,min_idle_instances=None,max_capacity=None,aws_attributes=None,node_type_id=None,custom_tags=None,idle_instance_autotermination_minutes=None,enable_elastic_disk=None,disk_spec=None,preloaded_spark_versions=None,headers=None,)DatabricksAPI.instance_pool.delete_instance_pool(instance_pool_id=None,headers=None,)DatabricksAPI.instance_pool.edit_instance_pool(instance_pool_id,instance_pool_name=None,min_idle_instances=None,max_capacity=None,aws_attributes=None,node_type_id=None,custom_tags=None,idle_instance_autotermination_minutes=None,enable_elastic_disk=None,disk_spec=None,preloaded_spark_versions=None,headers=None,)DatabricksAPI.instance_pool.get_instance_pool(instance_pool_id=None,headers=None,)DatabricksAPI.instance_pool.list_instance_pools(headers=None)

欢迎加入QQ群-->: 979659372 Python中文网_新手群

推荐PyPI第三方库


热门话题
java我的int值在SharedReferences中不被记住   java如何编辑Spring可分页对象?   java如何在gradle中单独调用任务   jvm以编程方式设置最大java堆大小   java如果满足多个条件,如何使用If语句计算数字?   如何在java中从json文件中获取特定值   如何在Sphinx4中为Java语音识别添加自定义语法?   java int[]copy=data;//当数据是数组时会发生什么?   java豪猪管理器。停下来。destroy()不起作用   安卓在API级别28中找不到画布变量   基于SOLID的java冗余   用于talend作业的java Liferay和portlet   从java到安卓的视频流   java获取在控件的类定义中添加自定义控件的场景大小   awt Java IndexOutOfBoundsException   java如何使用Spring JmsTemplate更改MQ头   java遍历数组并打印每个对象   java Google Map api v2标记在我旋转手机和地图“刷新”之前不会移动