Skip to content

Commit 9aa0779

Browse files
Update SDK to 2b7168368e19535c8b6ffedd47cbe4ea5b39852b
1 parent cc0e2ec commit 9aa0779

File tree

13 files changed

+56
-75
lines changed

13 files changed

+56
-75
lines changed

.codegen/_openapi_sha

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
6700bbe74746844cbb713c810c90e9187056b894
1+
2b7168368e19535c8b6ffedd47cbe4ea5b39852b

docs/account/iam/workspace_assignment.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,9 @@
4343
4444
a = AccountClient()
4545
46-
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
46+
workspace_id = os.environ["TEST_WORKSPACE_ID"]
4747
48-
all = a.workspace_assignment.list(workspace_id=workspace_id)
48+
all = a.workspace_assignment.list(list=workspace_id)
4949
5050
Get the permission assignments for the specified Databricks account and Databricks workspace.
5151

@@ -74,9 +74,9 @@
7474
7575
spn_id = spn.id
7676
77-
workspace_id = os.environ["TEST_WORKSPACE_ID"]
77+
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
7878
79-
a.workspace_assignment.update(
79+
_ = a.workspace_assignment.update(
8080
workspace_id=workspace_id,
8181
principal_id=spn_id,
8282
permissions=[iam.WorkspacePermission.USER],

docs/account/provisioning/credentials.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,15 @@
2424
2525
a = AccountClient()
2626
27-
creds = a.credentials.create(
27+
role = a.credentials.create(
2828
credentials_name=f"sdk-{time.time_ns()}",
2929
aws_credentials=provisioning.CreateCredentialAwsCredentials(
30-
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
30+
sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
3131
),
3232
)
3333
3434
# cleanup
35-
a.credentials.delete(credentials_id=creds.credentials_id)
35+
a.credentials.delete(credentials_id=role.credentials_id)
3636
3737
Creates a Databricks credential configuration that represents cloud cross-account credentials for a
3838
specified account. Databricks uses this to set up network infrastructure properly to host Databricks

docs/account/provisioning/storage.rst

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,21 @@
1616

1717
.. code-block::
1818
19+
import os
1920
import time
2021
2122
from databricks.sdk import AccountClient
2223
from databricks.sdk.service import provisioning
2324
2425
a = AccountClient()
2526
26-
bucket = a.storage.create(
27+
storage = a.storage.create(
2728
storage_configuration_name=f"sdk-{time.time_ns()}",
28-
root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
29+
root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]),
2930
)
3031
3132
# cleanup
32-
a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
33+
a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
3334
3435
Creates a Databricks storage configuration for an account.
3536

docs/workspace/catalog/external_locations.rst

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -107,20 +107,20 @@
107107
108108
credential = w.storage_credentials.create(
109109
name=f"sdk-{time.time_ns()}",
110-
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
110+
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
111111
)
112112
113113
created = w.external_locations.create(
114114
name=f"sdk-{time.time_ns()}",
115115
credential_name=credential.name,
116-
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
116+
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
117117
)
118118
119-
_ = w.external_locations.get(name=created.name)
119+
_ = w.external_locations.get(get=created.name)
120120
121121
# cleanup
122-
w.storage_credentials.delete(name=credential.name)
123-
w.external_locations.delete(name=created.name)
122+
w.storage_credentials.delete(delete=credential.name)
123+
w.external_locations.delete(delete=created.name)
124124
125125
Gets an external location from the metastore. The caller must be either a metastore admin, the owner
126126
of the external location, or a user that has some privilege on the external location.
@@ -193,24 +193,24 @@
193193
194194
credential = w.storage_credentials.create(
195195
name=f"sdk-{time.time_ns()}",
196-
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
196+
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
197197
)
198198
199199
created = w.external_locations.create(
200200
name=f"sdk-{time.time_ns()}",
201201
credential_name=credential.name,
202-
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
202+
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
203203
)
204204
205205
_ = w.external_locations.update(
206206
name=created.name,
207207
credential_name=credential.name,
208-
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
208+
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
209209
)
210210
211211
# cleanup
212-
w.storage_credentials.delete(delete=credential.name)
213-
w.external_locations.delete(delete=created.name)
212+
w.storage_credentials.delete(name=credential.name)
213+
w.external_locations.delete(name=created.name)
214214
215215
Updates an external location in the metastore. The caller must be the owner of the external location,
216216
or be a metastore admin. In the second case, the admin can only update the name of the external

docs/workspace/catalog/storage_credentials.rst

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,13 +99,13 @@
9999
100100
created = w.storage_credentials.create(
101101
name=f"sdk-{time.time_ns()}",
102-
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
102+
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
103103
)
104104
105-
by_name = w.storage_credentials.get(name=created.name)
105+
by_name = w.storage_credentials.get(get=created.name)
106106
107107
# cleanup
108-
w.storage_credentials.delete(name=created.name)
108+
w.storage_credentials.delete(delete=created.name)
109109
110110
Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
111111
storage credential, or have some permission on the storage credential.
@@ -124,11 +124,10 @@
124124
.. code-block::
125125
126126
from databricks.sdk import WorkspaceClient
127-
from databricks.sdk.service import catalog
128127
129128
w = WorkspaceClient()
130129
131-
all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest())
130+
all = w.storage_credentials.list()
132131
133132
Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
134133
only those storage credentials the caller has permission to access. If the caller is a metastore

docs/workspace/catalog/tables.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@
156156
157157
created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
158158
159-
all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name)
159+
summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name)
160160
161161
# cleanup
162162
w.schemas.delete(full_name=created_schema.full_name)

docs/workspace/compute/clusters.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -645,10 +645,11 @@
645645
.. code-block::
646646
647647
from databricks.sdk import WorkspaceClient
648+
from databricks.sdk.service import compute
648649
649650
w = WorkspaceClient()
650651
651-
nodes = w.clusters.list_node_types()
652+
all = w.clusters.list(compute.ListClustersRequest())
652653
653654
Return information about all pinned and active clusters, and all clusters terminated within the last
654655
30 days. Clusters terminated prior to this period are not included.

docs/workspace/iam/permissions.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
4545
obj = w.workspace.get_status(path=notebook_path)
4646
47-
_ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
47+
levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
4848
4949
Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
5050
object.

docs/workspace/jobs/jobs.rst

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -524,37 +524,11 @@
524524

525525
.. code-block::
526526
527-
import os
528-
import time
529-
530527
from databricks.sdk import WorkspaceClient
531-
from databricks.sdk.service import jobs
532528
533529
w = WorkspaceClient()
534530
535-
notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
536-
537-
cluster_id = (
538-
w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
539-
)
540-
541-
created_job = w.jobs.create(
542-
name=f"sdk-{time.time_ns()}",
543-
tasks=[
544-
jobs.Task(
545-
description="test",
546-
existing_cluster_id=cluster_id,
547-
notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
548-
task_key="test",
549-
timeout_seconds=0,
550-
)
551-
],
552-
)
553-
554-
run_list = w.jobs.list_runs(job_id=created_job.job_id)
555-
556-
# cleanup
557-
w.jobs.delete(job_id=created_job.job_id)
531+
job_list = w.jobs.list(expand_tasks=False)
558532
559533
List jobs.
560534

0 commit comments

Comments
 (0)