From b843ac0b485c0b0e450604a475f4a3374965986b Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 12 Oct 2023 14:30:48 +0200 Subject: [PATCH] Update OpenAPI spec to 12 Oct 2023 (#165) ## Changes Notable changes: * Remove fields that cannot be edited from instance pool update APIs * Add workspace binding APIs ## Tests Integration tests pass in all environments. --- .codegen/_openapi_sha | 2 +- .gitattributes | 19 ++- .../com/databricks/sdk/AccountClient.java | 23 --- .../com/databricks/sdk/WorkspaceClient.java | 21 ++- .../sdk/service/billing/BillableUsageAPI.java | 8 +- .../service/billing/BillableUsageImpl.java | 7 +- .../service/billing/BillableUsageService.java | 2 +- .../sdk/service/billing/DownloadResponse.java | 41 ++++++ .../catalog/AccountStorageCredentialsAPI.java | 21 ++- .../AccountStorageCredentialsImpl.java | 18 ++- .../AccountsUpdateStorageCredential.java | 16 +- ...DeleteAccountStorageCredentialRequest.java | 17 ++- .../GetAccountStorageCredentialRequest.java | 17 ++- .../service/catalog/GetBindingsRequest.java | 57 ++++++++ .../catalog/StorageCredentialsAPI.java | 13 +- .../catalog/StorageCredentialsService.java | 13 +- .../sdk/service/catalog/UpdateConnection.java | 19 ++- .../UpdateWorkspaceBindingsParameters.java | 88 +++++++++++ .../sdk/service/catalog/WorkspaceBinding.java | 59 ++++++++ .../catalog/WorkspaceBindingBindingType.java | 11 ++ .../service/catalog/WorkspaceBindingsAPI.java | 52 ++++++- .../catalog/WorkspaceBindingsImpl.java | 23 +++ .../catalog/WorkspaceBindingsResponse.java | 44 ++++++ .../catalog/WorkspaceBindingsService.java | 37 ++++- .../sdk/service/compute/ClusterDetails.java | 21 +++ .../sdk/service/compute/ClusterSpec.java | 18 ++- .../sdk/service/compute/CreateCluster.java | 2 +- .../sdk/service/compute/EditCluster.java | 2 +- .../sdk/service/compute/EditInstancePool.java | 137 +----------------- .../sdk/service/jobs/CreateJob.java | 48 ++++-- .../sdk/service/jobs/CreateJobUiState.java | 17 +++ .../sdk/service/jobs/JobDeployment.java | 63 ++++++++ .../sdk/service/jobs/JobDeploymentKind.java | 15 ++ .../sdk/service/jobs/JobSettings.java | 48 ++++-- .../sdk/service/jobs/JobSettingsUiState.java | 17 +++ .../databricks/sdk/service/jobs/JobsAPI.java | 6 +- .../sdk/service/jobs/JobsService.java | 6 +- .../sdk/service/jobs/RunOutput.java | 18 +-- .../sdk/service/jobs/SubmitRun.java | 17 +-- .../sdk/service/jobs/SubmitTask.java | 9 +- .../com/databricks/sdk/service/jobs/Task.java | 41 ++++-- .../sdk/service/pipelines/PipelinesAPI.java | 8 +- .../service/pipelines/PipelinesService.java | 8 +- .../sdk/service/serving/ServedModelInput.java | 25 +++- .../service/serving/ServedModelOutput.java | 25 +++- .../settings/AccountIpAccessListsImpl.java | 14 +- .../settings/AccountNetworkPolicyAPI.java | 75 ---------- .../settings/AccountNetworkPolicyImpl.java | 54 ------- .../settings/AccountNetworkPolicyMessage.java | 49 ------- .../settings/AccountNetworkPolicyService.java | 44 ------ .../DeleteAccountNetworkPolicyRequest.java | 50 ------- .../DeleteAccountNetworkPolicyResponse.java | 49 ------- .../settings/GetIpAccessListResponse.java | 19 ++- .../service/settings/IpAccessListsImpl.java | 4 +- .../settings/IpAccessListsService.java | 2 +- .../settings/ListIpAccessListResponse.java | 45 ++++++ .../ReadAccountNetworkPolicyRequest.java | 50 ------- .../UpdateAccountNetworkPolicyRequest.java | 59 -------- .../sdk/service/sql/ChannelName.java | 1 - .../sdk/service/workspace/Import.java | 11 +- .../sdk/service/workspace/ImportFormat.java | 10 +- .../sdk/service/workspace/WorkspaceAPI.java | 4 +- .../service/workspace/WorkspaceService.java | 4 +- 63 files changed, 924 insertions(+), 799 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 7d4ee2a67..e36ae5312 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -bcbf6e851e3d82fd910940910dd31c10c059746c \ No newline at end of file +493a76554afd3afdd15dc858773d01643f80352a \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 552ff9d42..e37601b60 100755 --- a/.gitattributes +++ b/.gitattributes @@ -15,6 +15,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDe databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java linguist-generated=true @@ -143,6 +144,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountM databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true @@ -262,6 +264,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStora databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java linguist-generated=true @@ -272,8 +275,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java linguist-generated=true @@ -602,6 +608,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask. databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true @@ -626,6 +633,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessContr databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java linguist-generated=true @@ -637,6 +646,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourceDirtyState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java linguist-generated=true @@ -1105,10 +1115,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConf databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true @@ -1123,8 +1129,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Credential databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true @@ -1146,6 +1150,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true @@ -1154,7 +1159,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalCo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessageEnum.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadPersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java linguist-generated=true @@ -1178,7 +1182,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 0efa4e933..63f4bc6bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -53,8 +53,6 @@ import com.databricks.sdk.service.provisioning.WorkspacesService; import com.databricks.sdk.service.settings.AccountIpAccessListsAPI; import com.databricks.sdk.service.settings.AccountIpAccessListsService; -import com.databricks.sdk.service.settings.AccountNetworkPolicyAPI; -import com.databricks.sdk.service.settings.AccountNetworkPolicyService; import com.databricks.sdk.service.settings.AccountSettingsAPI; import com.databricks.sdk.service.settings.AccountSettingsService; import com.databricks.sdk.support.Generated; @@ -76,7 +74,6 @@ public class AccountClient { private LogDeliveryAPI logDeliveryAPI; private AccountMetastoreAssignmentsAPI metastoreAssignmentsAPI; private AccountMetastoresAPI metastoresAPI; - private AccountNetworkPolicyAPI networkPolicyAPI; private NetworksAPI networksAPI; private OAuthEnrollmentAPI oAuthEnrollmentAPI; private OAuthPublishedAppsAPI oAuthPublishedAppsAPI; @@ -111,7 +108,6 @@ public AccountClient(DatabricksConfig config) { logDeliveryAPI = new LogDeliveryAPI(apiClient); metastoreAssignmentsAPI = new AccountMetastoreAssignmentsAPI(apiClient); metastoresAPI = new AccountMetastoresAPI(apiClient); - networkPolicyAPI = new AccountNetworkPolicyAPI(apiClient); networksAPI = new NetworksAPI(apiClient); oAuthEnrollmentAPI = new OAuthEnrollmentAPI(apiClient); oAuthPublishedAppsAPI = new OAuthPublishedAppsAPI(apiClient); @@ -309,19 +305,6 @@ public AccountMetastoresAPI metastores() { return metastoresAPI; } - /** - * Network policy is a set of rules that defines what can be accessed from your Databricks - * network. E.g.: You can choose to block your SQL UDF to access internet from your Databricks - * serverless clusters. - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - */ - public AccountNetworkPolicyAPI networkPolicy() { - return networkPolicyAPI; - } - /** * These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used * when creating a new workspace if you use customer-managed VPCs. @@ -534,12 +517,6 @@ public AccountClient withMetastoresImpl(AccountMetastoresService accountMetastor return this; } - /** Override AccountNetworkPolicyAPI with mock */ - public AccountClient withNetworkPolicyImpl(AccountNetworkPolicyService accountNetworkPolicy) { - networkPolicyAPI = new AccountNetworkPolicyAPI(accountNetworkPolicy); - return this; - } - /** Override NetworksAPI with mock */ public AccountClient withNetworksImpl(NetworksService networks) { networksAPI = new NetworksAPI(networks); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d8edd7c19..79cee2a8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -1212,12 +1212,23 @@ public WorkspaceAPI workspace() { } /** - * A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can - * be accessed from any workspace, while an __ISOLATED__ catalog can only be access from a - * configured list of workspaces. + * A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable + * can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a + * configured list of workspaces. This API allows you to configure (bind) securables to + * workspaces. * - *

A catalog's workspace bindings can be configured by a metastore admin or the owner of the - * catalog. + *

NOTE: The __isolation_mode__ is configured for the securable itself (using its Update + * method) and the workspace bindings are only consulted when the securable's __isolation_mode__ + * is set to __ISOLATED__. + * + *

A securable's workspace bindings can be configured by a metastore admin or the owner of the + * securable. + * + *

The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. + * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) + * which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). + * + *

Securables that support binding: - catalog */ public WorkspaceBindingsAPI workspaceBindings() { return workspaceBindingsAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java index 0357d7223..c5c8e08ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java @@ -26,8 +26,8 @@ public BillableUsageAPI(BillableUsageService mock) { impl = mock; } - public void download(String startMonth, String endMonth) { - download(new DownloadRequest().setStartMonth(startMonth).setEndMonth(endMonth)); + public DownloadResponse download(String startMonth, String endMonth) { + return download(new DownloadRequest().setStartMonth(startMonth).setEndMonth(endMonth)); } /** @@ -45,8 +45,8 @@ public void download(String startMonth, String endMonth) { *

[CSV file schema]: * https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema */ - public void download(DownloadRequest request) { - impl.download(request); + public DownloadResponse download(DownloadRequest request) { + return impl.download(request); } public BillableUsageService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java index bf243b185..1330ece71 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import java.io.InputStream; import java.util.HashMap; import java.util.Map; @@ -16,10 +17,12 @@ public BillableUsageImpl(ApiClient apiClient) { } @Override - public void download(DownloadRequest request) { + public DownloadResponse download(DownloadRequest request) { String path = String.format("/api/2.0/accounts/%s/usage/download", apiClient.configuredAccountID()); Map headers = new HashMap<>(); - apiClient.GET(path, request, Void.class, headers); + headers.put("Accept", "text/plain"); + InputStream response = apiClient.GET(path, request, InputStream.class, headers); + return new DownloadResponse().setContents(response); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java index e10e02545..c4726ccf2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java @@ -28,5 +28,5 @@ public interface BillableUsageService { *

[CSV file schema]: * https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema */ - void download(DownloadRequest downloadRequest); + DownloadResponse download(DownloadRequest downloadRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java new file mode 100755 index 000000000..3bd0e773e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.io.InputStream; +import java.util.Objects; + +@Generated +public class DownloadResponse { + /** */ + private InputStream contents; + + public DownloadResponse setContents(InputStream contents) { + this.contents = contents; + return this; + } + + public InputStream getContents() { + return contents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DownloadResponse that = (DownloadResponse) o; + return Objects.equals(contents, that.contents); + } + + @Override + public int hashCode() { + return Objects.hash(contents); + } + + @Override + public String toString() { + return new ToStringer(DownloadResponse.class).add("contents", contents).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java index d80ec6a29..c7fc0c681 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java @@ -42,8 +42,11 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ return impl.create(request); } - public void delete(String metastoreId, String name) { - delete(new DeleteAccountStorageCredentialRequest().setMetastoreId(metastoreId).setName(name)); + public void delete(String metastoreId, String storageCredentialName) { + delete( + new DeleteAccountStorageCredentialRequest() + .setMetastoreId(metastoreId) + .setStorageCredentialName(storageCredentialName)); } /** @@ -56,8 +59,11 @@ public void delete(DeleteAccountStorageCredentialRequest request) { impl.delete(request); } - public AccountsStorageCredentialInfo get(String metastoreId, String name) { - return get(new GetAccountStorageCredentialRequest().setMetastoreId(metastoreId).setName(name)); + public AccountsStorageCredentialInfo get(String metastoreId, String storageCredentialName) { + return get( + new GetAccountStorageCredentialRequest() + .setMetastoreId(metastoreId) + .setStorageCredentialName(storageCredentialName)); } /** @@ -83,8 +89,11 @@ public Iterable list(ListAccountStorageCredentialsRequest return impl.list(request); } - public AccountsStorageCredentialInfo update(String metastoreId, String name) { - return update(new AccountsUpdateStorageCredential().setMetastoreId(metastoreId).setName(name)); + public AccountsStorageCredentialInfo update(String metastoreId, String storageCredentialName) { + return update( + new AccountsUpdateStorageCredential() + .setMetastoreId(metastoreId) + .setStorageCredentialName(storageCredentialName)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java index adf7f272e..3c6ff119e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java @@ -32,8 +32,10 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ public void delete(DeleteAccountStorageCredentialRequest request) { String path = String.format( - "/api/2.0/accounts/%s/metastores/%s/storage-credentials/", - apiClient.configuredAccountID(), request.getMetastoreId()); + "/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s", + apiClient.configuredAccountID(), + request.getMetastoreId(), + request.getStorageCredentialName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); apiClient.DELETE(path, request, Void.class, headers); @@ -43,8 +45,10 @@ public void delete(DeleteAccountStorageCredentialRequest request) { public AccountsStorageCredentialInfo get(GetAccountStorageCredentialRequest request) { String path = String.format( - "/api/2.0/accounts/%s/metastores/%s/storage-credentials/", - apiClient.configuredAccountID(), request.getMetastoreId()); + "/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s", + apiClient.configuredAccountID(), + request.getMetastoreId(), + request.getStorageCredentialName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.GET(path, request, AccountsStorageCredentialInfo.class, headers); @@ -65,8 +69,10 @@ public Collection list(ListAccountStorageCredentialsReque public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential request) { String path = String.format( - "/api/2.0/accounts/%s/metastores/%s/storage-credentials/", - apiClient.configuredAccountID(), request.getMetastoreId()); + "/api/2.0/accounts/%s/metastores/%s/storage-credentials/%s", + apiClient.configuredAccountID(), + request.getMetastoreId(), + request.getStorageCredentialName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java index 9db563977..ecd83cafc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountsUpdateStorageCredential.java @@ -17,7 +17,7 @@ public class AccountsUpdateStorageCredential { private String metastoreId; /** Name of the storage credential. */ - private String name; + private String storageCredentialName; public AccountsUpdateStorageCredential setCredentialInfo(UpdateStorageCredential credentialInfo) { this.credentialInfo = credentialInfo; @@ -37,13 +37,13 @@ public String getMetastoreId() { return metastoreId; } - public AccountsUpdateStorageCredential setName(String name) { - this.name = name; + public AccountsUpdateStorageCredential setStorageCredentialName(String storageCredentialName) { + this.storageCredentialName = storageCredentialName; return this; } - public String getName() { - return name; + public String getStorageCredentialName() { + return storageCredentialName; } @Override @@ -53,12 +53,12 @@ public boolean equals(Object o) { AccountsUpdateStorageCredential that = (AccountsUpdateStorageCredential) o; return Objects.equals(credentialInfo, that.credentialInfo) && Objects.equals(metastoreId, that.metastoreId) - && Objects.equals(name, that.name); + && Objects.equals(storageCredentialName, that.storageCredentialName); } @Override public int hashCode() { - return Objects.hash(credentialInfo, metastoreId, name); + return Objects.hash(credentialInfo, metastoreId, storageCredentialName); } @Override @@ -66,7 +66,7 @@ public String toString() { return new ToStringer(AccountsUpdateStorageCredential.class) .add("credentialInfo", credentialInfo) .add("metastoreId", metastoreId) - .add("name", name) + .add("storageCredentialName", storageCredentialName) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java index 2ad343042..5e951e34d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAccountStorageCredentialRequest.java @@ -18,7 +18,7 @@ public class DeleteAccountStorageCredentialRequest { private String metastoreId; /** Name of the storage credential. */ - private String name; + private String storageCredentialName; public DeleteAccountStorageCredentialRequest setForce(Boolean force) { this.force = force; @@ -38,13 +38,14 @@ public String getMetastoreId() { return metastoreId; } - public DeleteAccountStorageCredentialRequest setName(String name) { - this.name = name; + public DeleteAccountStorageCredentialRequest setStorageCredentialName( + String storageCredentialName) { + this.storageCredentialName = storageCredentialName; return this; } - public String getName() { - return name; + public String getStorageCredentialName() { + return storageCredentialName; } @Override @@ -54,12 +55,12 @@ public boolean equals(Object o) { DeleteAccountStorageCredentialRequest that = (DeleteAccountStorageCredentialRequest) o; return Objects.equals(force, that.force) && Objects.equals(metastoreId, that.metastoreId) - && Objects.equals(name, that.name); + && Objects.equals(storageCredentialName, that.storageCredentialName); } @Override public int hashCode() { - return Objects.hash(force, metastoreId, name); + return Objects.hash(force, metastoreId, storageCredentialName); } @Override @@ -67,7 +68,7 @@ public String toString() { return new ToStringer(DeleteAccountStorageCredentialRequest.class) .add("force", force) .add("metastoreId", metastoreId) - .add("name", name) + .add("storageCredentialName", storageCredentialName) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java index 2b36d5c27..57e3b81c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java @@ -13,7 +13,7 @@ public class GetAccountStorageCredentialRequest { private String metastoreId; /** Name of the storage credential. */ - private String name; + private String storageCredentialName; public GetAccountStorageCredentialRequest setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; @@ -24,13 +24,13 @@ public String getMetastoreId() { return metastoreId; } - public GetAccountStorageCredentialRequest setName(String name) { - this.name = name; + public GetAccountStorageCredentialRequest setStorageCredentialName(String storageCredentialName) { + this.storageCredentialName = storageCredentialName; return this; } - public String getName() { - return name; + public String getStorageCredentialName() { + return storageCredentialName; } @Override @@ -38,19 +38,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetAccountStorageCredentialRequest that = (GetAccountStorageCredentialRequest) o; - return Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name); + return Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(storageCredentialName, that.storageCredentialName); } @Override public int hashCode() { - return Objects.hash(metastoreId, name); + return Objects.hash(metastoreId, storageCredentialName); } @Override public String toString() { return new ToStringer(GetAccountStorageCredentialRequest.class) .add("metastoreId", metastoreId) - .add("name", name) + .add("storageCredentialName", storageCredentialName) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java new file mode 100755 index 000000000..e9f7f6652 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get securable workspace bindings */ +@Generated +public class GetBindingsRequest { + /** The name of the securable. */ + private String securableName; + + /** The type of the securable. */ + private String securableType; + + public GetBindingsRequest setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public GetBindingsRequest setSecurableType(String securableType) { + this.securableType = securableType; + return this; + } + + public String getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetBindingsRequest that = (GetBindingsRequest) o; + return Objects.equals(securableName, that.securableName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(securableName, securableType); + } + + @Override + public String toString() { + return new ToStringer(GetBindingsRequest.class) + .add("securableName", securableName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java index 2fca84e47..268bf4ab0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java @@ -42,14 +42,7 @@ public StorageCredentialInfo create(String name) { /** * Create a storage credential. * - *

Creates a new storage credential. The request object is specific to the cloud: - * - *

* **AwsIamRole** for AWS credentials. * **AzureServicePrincipal** for Azure credentials. * - * **AzureManagedIdentity** for Azure managed credentials. * **DatabricksGcpServiceAccount** for - * GCP managed credentials. - * - *

The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on - * the metastore. + *

Creates a new storage credential. */ public StorageCredentialInfo create(CreateStorageCredential request) { return impl.create(request); @@ -102,9 +95,7 @@ public StorageCredentialInfo update(String name) { /** * Update a credential. * - *

Updates a storage credential on the metastore. The caller must be the owner of the storage - * credential or a metastore admin. If the caller is a metastore admin, only the __owner__ - * credential can be changed. + *

Updates a storage credential on the metastore. */ public StorageCredentialInfo update(UpdateStorageCredential request) { return impl.update(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java index 4910a8198..39f43a059 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java @@ -25,14 +25,7 @@ public interface StorageCredentialsService { /** * Create a storage credential. * - *

Creates a new storage credential. The request object is specific to the cloud: - * - *

* **AwsIamRole** for AWS credentials. * **AzureServicePrincipal** for Azure credentials. * - * **AzureManagedIdentity** for Azure managed credentials. * **DatabricksGcpServiceAccount** for - * GCP managed credentials. - * - *

The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on - * the metastore. + *

Creates a new storage credential. */ StorageCredentialInfo create(CreateStorageCredential createStorageCredential); @@ -65,9 +58,7 @@ public interface StorageCredentialsService { /** * Update a credential. * - *

Updates a storage credential on the metastore. The caller must be the owner of the storage - * credential or a metastore admin. If the caller is a metastore admin, only the __owner__ - * credential can be changed. + *

Updates a storage credential on the metastore. */ StorageCredentialInfo update(UpdateStorageCredential updateStorageCredential); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java index b29163ece..06f0edb2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java @@ -21,6 +21,10 @@ public class UpdateConnection { @JsonProperty("options") private Map options; + /** Username of current owner of the connection. */ + @JsonProperty("owner") + private String owner; + public UpdateConnection setName(String name) { this.name = name; return this; @@ -48,6 +52,15 @@ public Map getOptions() { return options; } + public UpdateConnection setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -55,12 +68,13 @@ public boolean equals(Object o) { UpdateConnection that = (UpdateConnection) o; return Objects.equals(name, that.name) && Objects.equals(nameArg, that.nameArg) - && Objects.equals(options, that.options); + && Objects.equals(options, that.options) + && Objects.equals(owner, that.owner); } @Override public int hashCode() { - return Objects.hash(name, nameArg, options); + return Objects.hash(name, nameArg, options, owner); } @Override @@ -69,6 +83,7 @@ public String toString() { .add("name", name) .add("nameArg", nameArg) .add("options", options) + .add("owner", owner) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java new file mode 100755 index 000000000..f6d2c21f8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateWorkspaceBindingsParameters { + /** List of workspace bindings */ + @JsonProperty("add") + private Collection add; + + /** List of workspace bindings */ + @JsonProperty("remove") + private Collection remove; + + /** The name of the securable. */ + private String securableName; + + /** The type of the securable. */ + private String securableType; + + public UpdateWorkspaceBindingsParameters setAdd(Collection add) { + this.add = add; + return this; + } + + public Collection getAdd() { + return add; + } + + public UpdateWorkspaceBindingsParameters setRemove(Collection remove) { + this.remove = remove; + return this; + } + + public Collection getRemove() { + return remove; + } + + public UpdateWorkspaceBindingsParameters setSecurableName(String securableName) { + this.securableName = securableName; + return this; + } + + public String getSecurableName() { + return securableName; + } + + public UpdateWorkspaceBindingsParameters setSecurableType(String securableType) { + this.securableType = securableType; + return this; + } + + public String getSecurableType() { + return securableType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBindingsParameters that = (UpdateWorkspaceBindingsParameters) o; + return Objects.equals(add, that.add) + && Objects.equals(remove, that.remove) + && Objects.equals(securableName, that.securableName) + && Objects.equals(securableType, that.securableType); + } + + @Override + public int hashCode() { + return Objects.hash(add, remove, securableName, securableType); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBindingsParameters.class) + .add("add", add) + .add("remove", remove) + .add("securableName", securableName) + .add("securableType", securableType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java new file mode 100755 index 000000000..f7c28ff2a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class WorkspaceBinding { + /** */ + @JsonProperty("binding_type") + private WorkspaceBindingBindingType bindingType; + + /** */ + @JsonProperty("workspace_id") + private Long workspaceId; + + public WorkspaceBinding setBindingType(WorkspaceBindingBindingType bindingType) { + this.bindingType = bindingType; + return this; + } + + public WorkspaceBindingBindingType getBindingType() { + return bindingType; + } + + public WorkspaceBinding setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceBinding that = (WorkspaceBinding) o; + return Objects.equals(bindingType, that.bindingType) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(bindingType, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceBinding.class) + .add("bindingType", bindingType) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java new file mode 100755 index 000000000..23ffd7f73 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum WorkspaceBindingBindingType { + BINDING_TYPE_READ_ONLY, + BINDING_TYPE_READ_WRITE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java index 05cc59f58..4012db49b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java @@ -7,12 +7,22 @@ import org.slf4j.LoggerFactory; /** - * A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can be - * accessed from any workspace, while an __ISOLATED__ catalog can only be access from a configured - * list of workspaces. + * A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable + * can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a + * configured list of workspaces. This API allows you to configure (bind) securables to workspaces. * - *

A catalog's workspace bindings can be configured by a metastore admin or the owner of the - * catalog. + *

NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) + * and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to + * __ISOLATED__. + * + *

A securable's workspace bindings can be configured by a metastore admin or the owner of the + * securable. + * + *

The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. + * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which + * introduces the ability to bind a securable in READ_ONLY mode (catalogs only). + * + *

Securables that support binding: - catalog */ @Generated public class WorkspaceBindingsAPI { @@ -44,6 +54,21 @@ public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) { return impl.get(request); } + public WorkspaceBindingsResponse getBindings(String securableType, String securableName) { + return getBindings( + new GetBindingsRequest().setSecurableType(securableType).setSecurableName(securableName)); + } + + /** + * Get securable workspace bindings. + * + *

Gets workspace bindings of the securable. The caller must be a metastore admin or an owner + * of the securable. + */ + public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) { + return impl.getBindings(request); + } + public CurrentWorkspaceBindings update(String name) { return update(new UpdateWorkspaceBindings().setName(name)); } @@ -58,6 +83,23 @@ public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { return impl.update(request); } + public WorkspaceBindingsResponse updateBindings(String securableType, String securableName) { + return updateBindings( + new UpdateWorkspaceBindingsParameters() + .setSecurableType(securableType) + .setSecurableName(securableName)); + } + + /** + * Update securable workspace bindings. + * + *

Updates workspace bindings of the securable. The caller must be a metastore admin or an + * owner of the securable. + */ + public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { + return impl.updateBindings(request); + } + public WorkspaceBindingsService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java index 1c0721e03..8e5f5114b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java @@ -24,6 +24,17 @@ public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) { return apiClient.GET(path, request, CurrentWorkspaceBindings.class, headers); } + @Override + public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) { + String path = + String.format( + "/api/2.1/unity-catalog/bindings/%s/%s", + request.getSecurableType(), request.getSecurableName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, WorkspaceBindingsResponse.class, headers); + } + @Override public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { String path = @@ -33,4 +44,16 @@ public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { headers.put("Content-Type", "application/json"); return apiClient.PATCH(path, request, CurrentWorkspaceBindings.class, headers); } + + @Override + public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) { + String path = + String.format( + "/api/2.1/unity-catalog/bindings/%s/%s", + request.getSecurableType(), request.getSecurableName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, WorkspaceBindingsResponse.class, headers); + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java new file mode 100755 index 000000000..ca5e4e112 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Currently assigned workspace bindings */ +@Generated +public class WorkspaceBindingsResponse { + /** List of workspace bindings */ + @JsonProperty("bindings") + private Collection bindings; + + public WorkspaceBindingsResponse setBindings(Collection bindings) { + this.bindings = bindings; + return this; + } + + public Collection getBindings() { + return bindings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceBindingsResponse that = (WorkspaceBindingsResponse) o; + return Objects.equals(bindings, that.bindings); + } + + @Override + public int hashCode() { + return Objects.hash(bindings); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceBindingsResponse.class).add("bindings", bindings).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java index 6f4c0fb7b..e2e879766 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java @@ -4,12 +4,22 @@ import com.databricks.sdk.support.Generated; /** - * A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can be - * accessed from any workspace, while an __ISOLATED__ catalog can only be access from a configured - * list of workspaces. + * A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable + * can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a + * configured list of workspaces. This API allows you to configure (bind) securables to workspaces. * - *

A catalog's workspace bindings can be configured by a metastore admin or the owner of the - * catalog. + *

NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) + * and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to + * __ISOLATED__. + * + *

A securable's workspace bindings can be configured by a metastore admin or the owner of the + * securable. + * + *

The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. + * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which + * introduces the ability to bind a securable in READ_ONLY mode (catalogs only). + * + *

Securables that support binding: - catalog * *

This is the high-level interface, that contains generated methods. * @@ -25,6 +35,14 @@ public interface WorkspaceBindingsService { */ CurrentWorkspaceBindings get(GetWorkspaceBindingRequest getWorkspaceBindingRequest); + /** + * Get securable workspace bindings. + * + *

Gets workspace bindings of the securable. The caller must be a metastore admin or an owner + * of the securable. + */ + WorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest); + /** * Update catalog workspace bindings. * @@ -32,4 +50,13 @@ public interface WorkspaceBindingsService { * of the catalog. */ CurrentWorkspaceBindings update(UpdateWorkspaceBindings updateWorkspaceBindings); + + /** + * Update securable workspace bindings. + * + *

Updates workspace bindings of the securable. The caller must be a metastore admin or an + * owner of the securable. + */ + WorkspaceBindingsResponse updateBindings( + UpdateWorkspaceBindingsParameters updateWorkspaceBindingsParameters); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index 666efc7aa..5ca111cc3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -293,6 +293,15 @@ public class ClusterDetails { @JsonProperty("spark_version") private String sparkVersion; + /** + * `spec` contains a snapshot of the field values that were used to create or edit this cluster. + * The contents of `spec` can be used in the body of a create cluster request. This field might + * not be populated for older clusters. Note: not included in the response of the ListClusters + * API. + */ + @JsonProperty("spec") + private CreateCluster spec; + /** * SSH public key contents that will be added to each Spark node in this cluster. The * corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up @@ -667,6 +676,15 @@ public String getSparkVersion() { return sparkVersion; } + public ClusterDetails setSpec(CreateCluster spec) { + this.spec = spec; + return this; + } + + public CreateCluster getSpec() { + return spec; + } + public ClusterDetails setSshPublicKeys(Collection sshPublicKeys) { this.sshPublicKeys = sshPublicKeys; return this; @@ -772,6 +790,7 @@ public boolean equals(Object o) { && Objects.equals(sparkContextId, that.sparkContextId) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) + && Objects.equals(spec, that.spec) && Objects.equals(sshPublicKeys, that.sshPublicKeys) && Objects.equals(startTime, that.startTime) && Objects.equals(state, that.state) @@ -821,6 +840,7 @@ public int hashCode() { sparkContextId, sparkEnvVars, sparkVersion, + spec, sshPublicKeys, startTime, state, @@ -870,6 +890,7 @@ public String toString() { .add("sparkContextId", sparkContextId) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) + .add("spec", spec) .add("sshPublicKeys", sshPublicKeys) .add("startTime", startTime) .add("state", state) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index 8ed020000..d64e22ab1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -11,6 +11,10 @@ @Generated public class ClusterSpec { + /** */ + @JsonProperty("apply_policy_default_values") + private Boolean applyPolicyDefaultValues; + /** * Parameters needed in order to automatically scale clusters up and down based on load. Note: * autoscaling works best with DB runtime versions 3.0 or later. @@ -225,6 +229,15 @@ public class ClusterSpec { @JsonProperty("workload_type") private WorkloadType workloadType; + public ClusterSpec setApplyPolicyDefaultValues(Boolean applyPolicyDefaultValues) { + this.applyPolicyDefaultValues = applyPolicyDefaultValues; + return this; + } + + public Boolean getApplyPolicyDefaultValues() { + return applyPolicyDefaultValues; + } + public ClusterSpec setAutoscale(AutoScale autoscale) { this.autoscale = autoscale; return this; @@ -473,7 +486,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ClusterSpec that = (ClusterSpec) o; - return Objects.equals(autoscale, that.autoscale) + return Objects.equals(applyPolicyDefaultValues, that.applyPolicyDefaultValues) + && Objects.equals(autoscale, that.autoscale) && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) && Objects.equals(awsAttributes, that.awsAttributes) && Objects.equals(azureAttributes, that.azureAttributes) @@ -505,6 +519,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + applyPolicyDefaultValues, autoscale, autoterminationMinutes, awsAttributes, @@ -537,6 +552,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(ClusterSpec.class) + .add("applyPolicyDefaultValues", applyPolicyDefaultValues) .add("autoscale", autoscale) .add("autoterminationMinutes", autoterminationMinutes) .add("awsAttributes", awsAttributes) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index bb4b30823..dc294c91b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -11,7 +11,7 @@ @Generated public class CreateCluster { - /** Note: This field won't be true for webapp requests. Only API users will check this field. */ + /** */ @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index e3d4e92db..1113ae904 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -11,7 +11,7 @@ @Generated public class EditCluster { - /** Note: This field won't be true for webapp requests. Only API users will check this field. */ + /** */ @JsonProperty("apply_policy_default_values") private Boolean applyPolicyDefaultValues; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java index 11103d8ac..5f1c2d218 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java @@ -5,26 +5,11 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated public class EditInstancePool { - /** - * Attributes related to instance pools running on Amazon Web Services. If not specified at pool - * creation, a set of default values will be used. - */ - @JsonProperty("aws_attributes") - private InstancePoolAwsAttributes awsAttributes; - - /** - * Attributes related to instance pools running on Azure. If not specified at pool creation, a set - * of default values will be used. - */ - @JsonProperty("azure_attributes") - private InstancePoolAzureAttributes azureAttributes; - /** * Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances * and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -34,26 +19,6 @@ public class EditInstancePool { @JsonProperty("custom_tags") private Map customTags; - /** Defines the specification of the disks that will be attached to all spark containers. */ - @JsonProperty("disk_spec") - private DiskSpec diskSpec; - - /** - * Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire - * additional disk space when its Spark workers are running low on disk space. In AWS, this - * feature requires specific AWS permissions to function correctly - refer to the User Guide for - * more details. - */ - @JsonProperty("enable_elastic_disk") - private Boolean enableElasticDisk; - - /** - * Attributes related to instance pools running on Google Cloud Platform. If not specified at pool - * creation, a set of default values will be used. - */ - @JsonProperty("gcp_attributes") - private InstancePoolGcpAttributes gcpAttributes; - /** * Automatically terminates the extra instances in the pool cache after they are inactive for this * time in minutes if min_idle_instances requirement is already met. If not set, the extra pool @@ -96,36 +61,6 @@ public class EditInstancePool { @JsonProperty("node_type_id") private String nodeTypeId; - /** Custom Docker Image BYOC */ - @JsonProperty("preloaded_docker_images") - private Collection preloadedDockerImages; - - /** - * A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters - * started with the preloaded Spark version will start faster. A list of available Spark versions - * can be retrieved by using the :method:clusters/sparkVersions API call. - */ - @JsonProperty("preloaded_spark_versions") - private Collection preloadedSparkVersions; - - public EditInstancePool setAwsAttributes(InstancePoolAwsAttributes awsAttributes) { - this.awsAttributes = awsAttributes; - return this; - } - - public InstancePoolAwsAttributes getAwsAttributes() { - return awsAttributes; - } - - public EditInstancePool setAzureAttributes(InstancePoolAzureAttributes azureAttributes) { - this.azureAttributes = azureAttributes; - return this; - } - - public InstancePoolAzureAttributes getAzureAttributes() { - return azureAttributes; - } - public EditInstancePool setCustomTags(Map customTags) { this.customTags = customTags; return this; @@ -135,33 +70,6 @@ public Map getCustomTags() { return customTags; } - public EditInstancePool setDiskSpec(DiskSpec diskSpec) { - this.diskSpec = diskSpec; - return this; - } - - public DiskSpec getDiskSpec() { - return diskSpec; - } - - public EditInstancePool setEnableElasticDisk(Boolean enableElasticDisk) { - this.enableElasticDisk = enableElasticDisk; - return this; - } - - public Boolean getEnableElasticDisk() { - return enableElasticDisk; - } - - public EditInstancePool setGcpAttributes(InstancePoolGcpAttributes gcpAttributes) { - this.gcpAttributes = gcpAttributes; - return this; - } - - public InstancePoolGcpAttributes getGcpAttributes() { - return gcpAttributes; - } - public EditInstancePool setIdleInstanceAutoterminationMinutes( Long idleInstanceAutoterminationMinutes) { this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes; @@ -217,82 +125,43 @@ public String getNodeTypeId() { return nodeTypeId; } - public EditInstancePool setPreloadedDockerImages(Collection preloadedDockerImages) { - this.preloadedDockerImages = preloadedDockerImages; - return this; - } - - public Collection getPreloadedDockerImages() { - return preloadedDockerImages; - } - - public EditInstancePool setPreloadedSparkVersions(Collection preloadedSparkVersions) { - this.preloadedSparkVersions = preloadedSparkVersions; - return this; - } - - public Collection getPreloadedSparkVersions() { - return preloadedSparkVersions; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EditInstancePool that = (EditInstancePool) o; - return Objects.equals(awsAttributes, that.awsAttributes) - && Objects.equals(azureAttributes, that.azureAttributes) - && Objects.equals(customTags, that.customTags) - && Objects.equals(diskSpec, that.diskSpec) - && Objects.equals(enableElasticDisk, that.enableElasticDisk) - && Objects.equals(gcpAttributes, that.gcpAttributes) + return Objects.equals(customTags, that.customTags) && Objects.equals( idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes) && Objects.equals(instancePoolId, that.instancePoolId) && Objects.equals(instancePoolName, that.instancePoolName) && Objects.equals(maxCapacity, that.maxCapacity) && Objects.equals(minIdleInstances, that.minIdleInstances) - && Objects.equals(nodeTypeId, that.nodeTypeId) - && Objects.equals(preloadedDockerImages, that.preloadedDockerImages) - && Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions); + && Objects.equals(nodeTypeId, that.nodeTypeId); } @Override public int hashCode() { return Objects.hash( - awsAttributes, - azureAttributes, customTags, - diskSpec, - enableElasticDisk, - gcpAttributes, idleInstanceAutoterminationMinutes, instancePoolId, instancePoolName, maxCapacity, minIdleInstances, - nodeTypeId, - preloadedDockerImages, - preloadedSparkVersions); + nodeTypeId); } @Override public String toString() { return new ToStringer(EditInstancePool.class) - .add("awsAttributes", awsAttributes) - .add("azureAttributes", azureAttributes) .add("customTags", customTags) - .add("diskSpec", diskSpec) - .add("enableElasticDisk", enableElasticDisk) - .add("gcpAttributes", gcpAttributes) .add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes) .add("instancePoolId", instancePoolId) .add("instancePoolName", instancePoolName) .add("maxCapacity", maxCapacity) .add("minIdleInstances", minIdleInstances) .add("nodeTypeId", nodeTypeId) - .add("preloadedDockerImages", preloadedDockerImages) - .add("preloadedSparkVersions", preloadedSparkVersions) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index fa49f5216..30f050824 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -26,9 +26,13 @@ public class CreateJob { @JsonProperty("continuous") private Continuous continuous; + /** Deployment information for jobs managed by external sources. */ + @JsonProperty("deployment") + private JobDeployment deployment; + /** * An optional set of email addresses that is notified when runs of this job begin or complete as - * well as when this job is deleted. The default behavior is to not send any emails. + * well as when this job is deleted. */ @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; @@ -79,8 +83,7 @@ public class CreateJob { * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active * runs. * - *

This value cannot exceed 1000\. Setting this value to 0 causes all new runs to be skipped. - * The default behavior is to allow only 1 concurrent run. + *

This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -134,10 +137,7 @@ public class CreateJob { @JsonProperty("tasks") private Collection tasks; - /** - * An optional timeout applied to each run of this job. The default behavior is to have no - * timeout. - */ + /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ @JsonProperty("timeout_seconds") private Long timeoutSeconds; @@ -150,9 +150,15 @@ public class CreateJob { private TriggerSettings trigger; /** - * A collection of system notification IDs to notify when the run begins or completes. The default - * behavior is to not send any system notifications. + * State of the job in UI. + * + *

* `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in + * an editable state and can be modified. */ + @JsonProperty("ui_state") + private CreateJobUiState uiState; + + /** A collection of system notification IDs to notify when runs of this job begin or complete. */ @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; @@ -184,6 +190,15 @@ public Continuous getContinuous() { return continuous; } + public CreateJob setDeployment(JobDeployment deployment) { + this.deployment = deployment; + return this; + } + + public JobDeployment getDeployment() { + return deployment; + } + public CreateJob setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -328,6 +343,15 @@ public TriggerSettings getTrigger() { return trigger; } + public CreateJob setUiState(CreateJobUiState uiState) { + this.uiState = uiState; + return this; + } + + public CreateJobUiState getUiState() { + return uiState; + } + public CreateJob setWebhookNotifications(WebhookNotifications webhookNotifications) { this.webhookNotifications = webhookNotifications; return this; @@ -345,6 +369,7 @@ public boolean equals(Object o) { return Objects.equals(accessControlList, that.accessControlList) && Objects.equals(compute, that.compute) && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) @@ -361,6 +386,7 @@ public boolean equals(Object o) { && Objects.equals(tasks, that.tasks) && Objects.equals(timeoutSeconds, that.timeoutSeconds) && Objects.equals(trigger, that.trigger) + && Objects.equals(uiState, that.uiState) && Objects.equals(webhookNotifications, that.webhookNotifications); } @@ -370,6 +396,7 @@ public int hashCode() { accessControlList, compute, continuous, + deployment, emailNotifications, format, gitSource, @@ -386,6 +413,7 @@ public int hashCode() { tasks, timeoutSeconds, trigger, + uiState, webhookNotifications); } @@ -395,6 +423,7 @@ public String toString() { .add("accessControlList", accessControlList) .add("compute", compute) .add("continuous", continuous) + .add("deployment", deployment) .add("emailNotifications", emailNotifications) .add("format", format) .add("gitSource", gitSource) @@ -411,6 +440,7 @@ public String toString() { .add("tasks", tasks) .add("timeoutSeconds", timeoutSeconds) .add("trigger", trigger) + .add("uiState", uiState) .add("webhookNotifications", webhookNotifications) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java new file mode 100755 index 000000000..3a1ad123d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** + * State of the job in UI. + * + *

* `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in + * an editable state and can be modified. + */ +@Generated +public enum CreateJobUiState { + EDITABLE, // The job is in an editable state and can be modified. + LOCKED, // The job is in a locked state and cannot be modified. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java new file mode 100755 index 000000000..1487a8cb2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class JobDeployment { + /** + * The kind of deployment that manages the job. + * + *

* `BUNDLE`: The job is managed by Databricks Asset Bundle. + */ + @JsonProperty("kind") + private JobDeploymentKind kind; + + /** Path of the file that contains deployment metadata. */ + @JsonProperty("metadata_file_path") + private String metadataFilePath; + + public JobDeployment setKind(JobDeploymentKind kind) { + this.kind = kind; + return this; + } + + public JobDeploymentKind getKind() { + return kind; + } + + public JobDeployment setMetadataFilePath(String metadataFilePath) { + this.metadataFilePath = metadataFilePath; + return this; + } + + public String getMetadataFilePath() { + return metadataFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobDeployment that = (JobDeployment) o; + return Objects.equals(kind, that.kind) + && Objects.equals(metadataFilePath, that.metadataFilePath); + } + + @Override + public int hashCode() { + return Objects.hash(kind, metadataFilePath); + } + + @Override + public String toString() { + return new ToStringer(JobDeployment.class) + .add("kind", kind) + .add("metadataFilePath", metadataFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java new file mode 100755 index 000000000..52683b09a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** + * The kind of deployment that manages the job. + * + *

* `BUNDLE`: The job is managed by Databricks Asset Bundle. + */ +@Generated +public enum JobDeploymentKind { + BUNDLE, // The job is managed by Databricks Asset Bundle. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index e968aea91..f916b5423 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -22,9 +22,13 @@ public class JobSettings { @JsonProperty("continuous") private Continuous continuous; + /** Deployment information for jobs managed by external sources. */ + @JsonProperty("deployment") + private JobDeployment deployment; + /** * An optional set of email addresses that is notified when runs of this job begin or complete as - * well as when this job is deleted. The default behavior is to not send any emails. + * well as when this job is deleted. */ @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; @@ -75,8 +79,7 @@ public class JobSettings { * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active * runs. * - *

This value cannot exceed 1000\. Setting this value to 0 causes all new runs to be skipped. - * The default behavior is to allow only 1 concurrent run. + *

This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -130,10 +133,7 @@ public class JobSettings { @JsonProperty("tasks") private Collection tasks; - /** - * An optional timeout applied to each run of this job. The default behavior is to have no - * timeout. - */ + /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ @JsonProperty("timeout_seconds") private Long timeoutSeconds; @@ -146,9 +146,15 @@ public class JobSettings { private TriggerSettings trigger; /** - * A collection of system notification IDs to notify when the run begins or completes. The default - * behavior is to not send any system notifications. + * State of the job in UI. + * + *

* `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in + * an editable state and can be modified. */ + @JsonProperty("ui_state") + private JobSettingsUiState uiState; + + /** A collection of system notification IDs to notify when runs of this job begin or complete. */ @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; @@ -170,6 +176,15 @@ public Continuous getContinuous() { return continuous; } + public JobSettings setDeployment(JobDeployment deployment) { + this.deployment = deployment; + return this; + } + + public JobDeployment getDeployment() { + return deployment; + } + public JobSettings setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -314,6 +329,15 @@ public TriggerSettings getTrigger() { return trigger; } + public JobSettings setUiState(JobSettingsUiState uiState) { + this.uiState = uiState; + return this; + } + + public JobSettingsUiState getUiState() { + return uiState; + } + public JobSettings setWebhookNotifications(WebhookNotifications webhookNotifications) { this.webhookNotifications = webhookNotifications; return this; @@ -330,6 +354,7 @@ public boolean equals(Object o) { JobSettings that = (JobSettings) o; return Objects.equals(compute, that.compute) && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) @@ -346,6 +371,7 @@ public boolean equals(Object o) { && Objects.equals(tasks, that.tasks) && Objects.equals(timeoutSeconds, that.timeoutSeconds) && Objects.equals(trigger, that.trigger) + && Objects.equals(uiState, that.uiState) && Objects.equals(webhookNotifications, that.webhookNotifications); } @@ -354,6 +380,7 @@ public int hashCode() { return Objects.hash( compute, continuous, + deployment, emailNotifications, format, gitSource, @@ -370,6 +397,7 @@ public int hashCode() { tasks, timeoutSeconds, trigger, + uiState, webhookNotifications); } @@ -378,6 +406,7 @@ public String toString() { return new ToStringer(JobSettings.class) .add("compute", compute) .add("continuous", continuous) + .add("deployment", deployment) .add("emailNotifications", emailNotifications) .add("format", format) .add("gitSource", gitSource) @@ -394,6 +423,7 @@ public String toString() { .add("tasks", tasks) .add("timeoutSeconds", timeoutSeconds) .add("trigger", trigger) + .add("uiState", uiState) .add("webhookNotifications", webhookNotifications) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java new file mode 100755 index 000000000..3aa479c9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** + * State of the job in UI. + * + *

* `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in + * an editable state and can be modified. + */ +@Generated +public enum JobSettingsUiState { + EDITABLE, // The job is in an editable state and can be modified. + LOCKED, // The job is in a locked state and cannot be modified. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java index db119ba68..3d3d7f5d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java @@ -306,10 +306,10 @@ public void reset(long jobId, JobSettings newSettings) { } /** - * Overwrites all settings for a job. + * Overwrite all settings for a job. * - *

Overwrites all the settings for a specific job. Use the Update endpoint to update job - * settings partially. + *

Overwrite all settings for the given job. Use the Update endpoint to update job settings + * partially. */ public void reset(ResetJob request) { impl.reset(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java index 748e31ec4..6b14958ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java @@ -139,10 +139,10 @@ GetJobPermissionLevelsResponse getPermissionLevels( RepairRunResponse repairRun(RepairRun repairRun); /** - * Overwrites all settings for a job. + * Overwrite all settings for a job. * - *

Overwrites all the settings for a specific job. Use the Update endpoint to update job - * settings partially. + *

Overwrite all settings for the given job. Use the Update endpoint to update job settings + * partially. */ void reset(ResetJob resetJob); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java index f2f522345..05c16ca6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java @@ -9,10 +9,6 @@ @Generated public class RunOutput { - /** The output of a condition task, if available. */ - @JsonProperty("condition_task") - private Object conditionTask; - /** The output of a dbt task, if available. */ @JsonProperty("dbt_output") private DbtOutput dbtOutput; @@ -66,15 +62,6 @@ public class RunOutput { @JsonProperty("sql_output") private SqlOutput sqlOutput; - public RunOutput setConditionTask(Object conditionTask) { - this.conditionTask = conditionTask; - return this; - } - - public Object getConditionTask() { - return conditionTask; - } - public RunOutput setDbtOutput(DbtOutput dbtOutput) { this.dbtOutput = dbtOutput; return this; @@ -161,8 +148,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunOutput that = (RunOutput) o; - return Objects.equals(conditionTask, that.conditionTask) - && Objects.equals(dbtOutput, that.dbtOutput) + return Objects.equals(dbtOutput, that.dbtOutput) && Objects.equals(error, that.error) && Objects.equals(errorTrace, that.errorTrace) && Objects.equals(logs, that.logs) @@ -176,7 +162,6 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - conditionTask, dbtOutput, error, errorTrace, @@ -191,7 +176,6 @@ public int hashCode() { @Override public String toString() { return new ToStringer(RunOutput.class) - .add("conditionTask", conditionTask) .add("dbtOutput", dbtOutput) .add("error", error) .add("errorTrace", errorTrace) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index 2fd0664bb..5b547374f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -14,10 +14,7 @@ public class SubmitRun { @JsonProperty("access_control_list") private Collection accessControlList; - /** - * An optional set of email addresses notified when the run begins or completes. The default - * behavior is to not send any emails. - */ + /** An optional set of email addresses notified when the run begins or completes. */ @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; @@ -59,7 +56,7 @@ public class SubmitRun { /** * Optional notification settings that are used when sending notifications to each of the - * `webhook_notifications` for this run. + * `email_notifications` and `webhook_notifications` for this run. */ @JsonProperty("notification_settings") private JobNotificationSettings notificationSettings; @@ -76,17 +73,11 @@ public class SubmitRun { @JsonProperty("tasks") private Collection tasks; - /** - * An optional timeout applied to each run of this job. The default behavior is to have no - * timeout. - */ + /** An optional timeout applied to each run of this job. A value of `0` means no timeout. */ @JsonProperty("timeout_seconds") private Long timeoutSeconds; - /** - * A collection of system notification IDs to notify when the run begins or completes. The default - * behavior is to not send any system notifications. - */ + /** A collection of system notification IDs to notify when the run begins or completes. */ @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 463c6a743..b65af624c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -64,8 +64,8 @@ public class SubmitTask { private NotebookTask notebookTask; /** - * Optional notification settings that are used when sending email notifications for this task - * run. + * Optional notification settings that are used when sending notifications to each of the + * `email_notifications` and `webhook_notifications` for this task run. */ @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; @@ -118,10 +118,7 @@ public class SubmitTask { @JsonProperty("task_key") private String taskKey; - /** - * An optional timeout applied to each run of this job task. The default behavior is to have no - * timeout. - */ + /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ @JsonProperty("timeout_seconds") private Long timeoutSeconds; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 7f8a82bdf..b87453fd9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -81,8 +81,8 @@ public class Task { /** * An optional maximum number of times to retry an unsuccessful run. A run is considered to be * unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` - * `life_cycle_state`. The value -1 means to retry indefinitely and the value 0 means to never - * retry. The default behavior is to never retry. + * `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never + * retry. */ @JsonProperty("max_retries") private Long maxRetries; @@ -107,7 +107,7 @@ public class Task { /** * Optional notification settings that are used when sending notifications to each of the - * `email_notifications` for this task. + * `email_notifications` and `webhook_notifications` for this task. */ @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; @@ -120,16 +120,13 @@ public class Task { @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; - /** - * An optional policy to specify whether to retry a task when it times out. The default behavior - * is to not retry on timeout. - */ + /** An optional policy to specify whether to retry a task when it times out. */ @JsonProperty("retry_on_timeout") private Boolean retryOnTimeout; /** * An optional value specifying the condition determining whether the task is run once its - * dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. + * dependencies have been completed. * *

* `ALL_SUCCESS`: All dependencies have executed and succeeded * `AT_LEAST_ONE_SUCCESS`: At * least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at @@ -184,13 +181,17 @@ public class Task { @JsonProperty("task_key") private String taskKey; - /** - * An optional timeout applied to each run of this job task. The default behavior is to have no - * timeout. - */ + /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ @JsonProperty("timeout_seconds") private Long timeoutSeconds; + /** + * A collection of system notification IDs to notify when runs of this task begin or complete. The + * default behavior is to not send any system notifications. + */ + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + public Task setComputeKey(String computeKey) { this.computeKey = computeKey; return this; @@ -425,6 +426,15 @@ public Long getTimeoutSeconds() { return timeoutSeconds; } + public Task setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -455,7 +465,8 @@ public boolean equals(Object o) { && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) && Objects.equals(sqlTask, that.sqlTask) && Objects.equals(taskKey, that.taskKey) - && Objects.equals(timeoutSeconds, that.timeoutSeconds); + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); } @Override @@ -486,7 +497,8 @@ public int hashCode() { sparkSubmitTask, sqlTask, taskKey, - timeoutSeconds); + timeoutSeconds, + webhookNotifications); } @Override @@ -518,6 +530,7 @@ public String toString() { .add("sqlTask", sqlTask) .add("taskKey", taskKey) .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 588b7321b..f440e99f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -292,9 +292,10 @@ public StartUpdateResponse startUpdate(String pipelineId) { } /** - * Queue a pipeline update. + * Start a pipeline. * - *

Starts or queues a pipeline update. + *

Starts a new update for the pipeline. If there is already an active update for the pipeline, + * the request will fail and the active update will remain running. */ public StartUpdateResponse startUpdate(StartUpdate request) { return impl.startUpdate(request); @@ -307,7 +308,8 @@ public Wait stop(String pipelineId) { /** * Stop a pipeline. * - *

Stops a pipeline. + *

Stops the pipeline by canceling the active update. If there is no active update for the + * pipeline, this request is a no-op. */ public Wait stop(StopRequest request) { impl.stop(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index 3044ee910..fa25d4582 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -102,16 +102,18 @@ ListPipelineEventsResponse listPipelineEvents( PipelinePermissions setPermissions(PipelinePermissionsRequest pipelinePermissionsRequest); /** - * Queue a pipeline update. + * Start a pipeline. * - *

Starts or queues a pipeline update. + *

Starts a new update for the pipeline. If there is already an active update for the pipeline, + * the request will fail and the active update will remain running. */ StartUpdateResponse startUpdate(StartUpdate startUpdate); /** * Stop a pipeline. * - *

Stops a pipeline. + *

Stops the pipeline by canceling the active update. If there is no active update for the + * pipeline, this request is a no-op. */ void stop(StopRequest stopRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java index f94e57878..469c7303b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java @@ -58,6 +58,15 @@ public class ServedModelInput { @JsonProperty("workload_size") private String workloadSize; + /** + * The workload type of the served model. The workload type selects which type of compute to use + * in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, + * GPU acceleration is available by selecting workload types like GPU_SMALL and others. See + * documentation for all options. + */ + @JsonProperty("workload_type") + private String workloadType; + public ServedModelInput setEnvironmentVars(Map environmentVars) { this.environmentVars = environmentVars; return this; @@ -121,6 +130,15 @@ public String getWorkloadSize() { return workloadSize; } + public ServedModelInput setWorkloadType(String workloadType) { + this.workloadType = workloadType; + return this; + } + + public String getWorkloadType() { + return workloadType; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -132,7 +150,8 @@ public boolean equals(Object o) { && Objects.equals(modelVersion, that.modelVersion) && Objects.equals(name, that.name) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) - && Objects.equals(workloadSize, that.workloadSize); + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); } @Override @@ -144,7 +163,8 @@ public int hashCode() { modelVersion, name, scaleToZeroEnabled, - workloadSize); + workloadSize, + workloadType); } @Override @@ -157,6 +177,7 @@ public String toString() { .add("name", name) .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("workloadSize", workloadSize) + .add("workloadType", workloadType) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java index 4a94f2d2c..897e42afa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java @@ -65,6 +65,15 @@ public class ServedModelOutput { @JsonProperty("workload_size") private String workloadSize; + /** + * The workload type of the served model. The workload type selects which type of compute to use + * in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, + * GPU acceleration is available by selecting workload types like GPU_SMALL and others. See + * documentation for all options. + */ + @JsonProperty("workload_type") + private String workloadType; + public ServedModelOutput setCreationTimestamp(Long creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; @@ -155,6 +164,15 @@ public String getWorkloadSize() { return workloadSize; } + public ServedModelOutput setWorkloadType(String workloadType) { + this.workloadType = workloadType; + return this; + } + + public String getWorkloadType() { + return workloadType; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -169,7 +187,8 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(scaleToZeroEnabled, that.scaleToZeroEnabled) && Objects.equals(state, that.state) - && Objects.equals(workloadSize, that.workloadSize); + && Objects.equals(workloadSize, that.workloadSize) + && Objects.equals(workloadType, that.workloadType); } @Override @@ -184,7 +203,8 @@ public int hashCode() { name, scaleToZeroEnabled, state, - workloadSize); + workloadSize, + workloadType); } @Override @@ -200,6 +220,7 @@ public String toString() { .add("scaleToZeroEnabled", scaleToZeroEnabled) .add("state", state) .add("workloadSize", workloadSize) + .add("workloadType", workloadType) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java index 429d4c6f9..4fab39ade 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java @@ -18,8 +18,7 @@ public AccountIpAccessListsImpl(ApiClient apiClient) { @Override public CreateIpAccessListResponse create(CreateIpAccessList request) { String path = - String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); + String.format("/api/2.0/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); @@ -30,7 +29,7 @@ public CreateIpAccessListResponse create(CreateIpAccessList request) { public void delete(DeleteAccountIpAccessListRequest request) { String path = String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists/%s", + "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); Map headers = new HashMap<>(); apiClient.DELETE(path, request, Void.class, headers); @@ -40,7 +39,7 @@ public void delete(DeleteAccountIpAccessListRequest request) { public GetIpAccessListResponse get(GetAccountIpAccessListRequest request) { String path = String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists/%s", + "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); @@ -50,8 +49,7 @@ public GetIpAccessListResponse get(GetAccountIpAccessListRequest request) { @Override public GetIpAccessListsResponse list() { String path = - String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); + String.format("/api/2.0/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.GET(path, GetIpAccessListsResponse.class, headers); @@ -61,7 +59,7 @@ public GetIpAccessListsResponse list() { public void replace(ReplaceIpAccessList request) { String path = String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists/%s", + "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); @@ -73,7 +71,7 @@ public void replace(ReplaceIpAccessList request) { public void update(UpdateIpAccessList request) { String path = String.format( - "/api/2.0/preview/accounts/%s/ip-access-lists/%s", + "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java deleted file mode 100755 index d94d0e103..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java +++ /dev/null @@ -1,75 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Network policy is a set of rules that defines what can be accessed from your Databricks network. - * E.g.: You can choose to block your SQL UDF to access internet from your Databricks serverless - * clusters. - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - */ -@Generated -public class AccountNetworkPolicyAPI { - private static final Logger LOG = LoggerFactory.getLogger(AccountNetworkPolicyAPI.class); - - private final AccountNetworkPolicyService impl; - - /** Regular-use constructor */ - public AccountNetworkPolicyAPI(ApiClient apiClient) { - impl = new AccountNetworkPolicyImpl(apiClient); - } - - /** Constructor for mocks */ - public AccountNetworkPolicyAPI(AccountNetworkPolicyService mock) { - impl = mock; - } - - public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy(String etag) { - return deleteAccountNetworkPolicy(new DeleteAccountNetworkPolicyRequest().setEtag(etag)); - } - - /** - * Delete Account Network Policy. - * - *

Reverts back all the account network policies back to default. - */ - public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy( - DeleteAccountNetworkPolicyRequest request) { - return impl.deleteAccountNetworkPolicy(request); - } - - public AccountNetworkPolicyMessage readAccountNetworkPolicy(String etag) { - return readAccountNetworkPolicy(new ReadAccountNetworkPolicyRequest().setEtag(etag)); - } - - /** - * Get Account Network Policy. - * - *

Gets the value of Account level Network Policy. - */ - public AccountNetworkPolicyMessage readAccountNetworkPolicy( - ReadAccountNetworkPolicyRequest request) { - return impl.readAccountNetworkPolicy(request); - } - - /** - * Update Account Network Policy. - * - *

Updates the policy content of Account level Network Policy. - */ - public AccountNetworkPolicyMessage updateAccountNetworkPolicy( - UpdateAccountNetworkPolicyRequest request) { - return impl.updateAccountNetworkPolicy(request); - } - - public AccountNetworkPolicyService impl() { - return impl; - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java deleted file mode 100755 index 4a8f39d83..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java +++ /dev/null @@ -1,54 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; - -/** Package-local implementation of AccountNetworkPolicy */ -@Generated -class AccountNetworkPolicyImpl implements AccountNetworkPolicyService { - private final ApiClient apiClient; - - public AccountNetworkPolicyImpl(ApiClient apiClient) { - this.apiClient = apiClient; - } - - @Override - public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy( - DeleteAccountNetworkPolicyRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/network_policy/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteAccountNetworkPolicyResponse.class, headers); - } - - @Override - public AccountNetworkPolicyMessage readAccountNetworkPolicy( - ReadAccountNetworkPolicyRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/network_policy/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AccountNetworkPolicyMessage.class, headers); - } - - @Override - public AccountNetworkPolicyMessage updateAccountNetworkPolicy( - UpdateAccountNetworkPolicyRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/network_policy/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, AccountNetworkPolicyMessage.class, headers); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java deleted file mode 100755 index 2a4dd5c56..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java +++ /dev/null @@ -1,49 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class AccountNetworkPolicyMessage { - /** - * Whether or not serverless UDF can access the internet. When false, access to the internet will - * be blocked from serverless clusters. Trusted traffic required by clusters for basic - * functionality will not be affected. - */ - @JsonProperty("serverless_internet_access_enabled") - private Boolean serverlessInternetAccessEnabled; - - public AccountNetworkPolicyMessage setServerlessInternetAccessEnabled( - Boolean serverlessInternetAccessEnabled) { - this.serverlessInternetAccessEnabled = serverlessInternetAccessEnabled; - return this; - } - - public Boolean getServerlessInternetAccessEnabled() { - return serverlessInternetAccessEnabled; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AccountNetworkPolicyMessage that = (AccountNetworkPolicyMessage) o; - return Objects.equals(serverlessInternetAccessEnabled, that.serverlessInternetAccessEnabled); - } - - @Override - public int hashCode() { - return Objects.hash(serverlessInternetAccessEnabled); - } - - @Override - public String toString() { - return new ToStringer(AccountNetworkPolicyMessage.class) - .add("serverlessInternetAccessEnabled", serverlessInternetAccessEnabled) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java deleted file mode 100755 index 1c5cc813d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; - -/** - * Network policy is a set of rules that defines what can be accessed from your Databricks network. - * E.g.: You can choose to block your SQL UDF to access internet from your Databricks serverless - * clusters. - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - * - *

This is the high-level interface, that contains generated methods. - * - *

Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface AccountNetworkPolicyService { - /** - * Delete Account Network Policy. - * - *

Reverts back all the account network policies back to default. - */ - DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy( - DeleteAccountNetworkPolicyRequest deleteAccountNetworkPolicyRequest); - - /** - * Get Account Network Policy. - * - *

Gets the value of Account level Network Policy. - */ - AccountNetworkPolicyMessage readAccountNetworkPolicy( - ReadAccountNetworkPolicyRequest readAccountNetworkPolicyRequest); - - /** - * Update Account Network Policy. - * - *

Updates the policy content of Account level Network Policy. - */ - AccountNetworkPolicyMessage updateAccountNetworkPolicy( - UpdateAccountNetworkPolicyRequest updateAccountNetworkPolicyRequest); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java deleted file mode 100755 index 83f6c285a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java +++ /dev/null @@ -1,50 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -/** Delete Account Network Policy */ -@Generated -public class DeleteAccountNetworkPolicyRequest { - /** - * etag used for versioning. The response is at least as fresh as the eTag provided. This is used - * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - * overwriting each other. It is strongly suggested that systems make use of the etag in the read - * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get - * an etag from a GET request, and pass it with the DELETE request to identify the rule set - * version you are deleting. - */ - @QueryParam("etag") - private String etag; - - public DeleteAccountNetworkPolicyRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteAccountNetworkPolicyRequest that = (DeleteAccountNetworkPolicyRequest) o; - return Objects.equals(etag, that.etag); - } - - @Override - public int hashCode() { - return Objects.hash(etag); - } - - @Override - public String toString() { - return new ToStringer(DeleteAccountNetworkPolicyRequest.class).add("etag", etag).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java deleted file mode 100755 index 3a9a5981b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class DeleteAccountNetworkPolicyResponse { - /** - * etag used for versioning. The response is at least as fresh as the eTag provided. This is used - * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - * overwriting each other. It is strongly suggested that systems make use of the etag in the read - * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an - * etag from a GET request, and pass it with the PATCH request to identify the setting version you - * are updating. - */ - @JsonProperty("etag") - private String etag; - - public DeleteAccountNetworkPolicyResponse setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteAccountNetworkPolicyResponse that = (DeleteAccountNetworkPolicyResponse) o; - return Objects.equals(etag, that.etag); - } - - @Override - public int hashCode() { - return Objects.hash(etag); - } - - @Override - public String toString() { - return new ToStringer(DeleteAccountNetworkPolicyResponse.class).add("etag", etag).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java index 596b15dc4..47925b4bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java @@ -5,22 +5,21 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; @Generated public class GetIpAccessListResponse { /** */ - @JsonProperty("ip_access_lists") - private Collection ipAccessLists; + @JsonProperty("ip_access_list") + private IpAccessListInfo ipAccessList; - public GetIpAccessListResponse setIpAccessLists(Collection ipAccessLists) { - this.ipAccessLists = ipAccessLists; + public GetIpAccessListResponse setIpAccessList(IpAccessListInfo ipAccessList) { + this.ipAccessList = ipAccessList; return this; } - public Collection getIpAccessLists() { - return ipAccessLists; + public IpAccessListInfo getIpAccessList() { + return ipAccessList; } @Override @@ -28,18 +27,18 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetIpAccessListResponse that = (GetIpAccessListResponse) o; - return Objects.equals(ipAccessLists, that.ipAccessLists); + return Objects.equals(ipAccessList, that.ipAccessList); } @Override public int hashCode() { - return Objects.hash(ipAccessLists); + return Objects.hash(ipAccessList); } @Override public String toString() { return new ToStringer(GetIpAccessListResponse.class) - .add("ipAccessLists", ipAccessLists) + .add("ipAccessList", ipAccessList) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java index 95aa2bffa..54bf24590 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java @@ -40,11 +40,11 @@ public FetchIpAccessListResponse get(GetIpAccessListRequest request) { } @Override - public GetIpAccessListResponse list() { + public ListIpAccessListResponse list() { String path = "/api/2.0/ip-access-lists"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, GetIpAccessListResponse.class, headers); + return apiClient.GET(path, ListIpAccessListResponse.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java index d90c7b6da..08791e243 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java @@ -69,7 +69,7 @@ public interface IpAccessListsService { * *

Gets all IP access lists for the specified workspace. */ - GetIpAccessListResponse list(); + ListIpAccessListResponse list(); /** * Replace access list. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java new file mode 100755 index 000000000..9f31d07f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListIpAccessListResponse { + /** */ + @JsonProperty("ip_access_lists") + private Collection ipAccessLists; + + public ListIpAccessListResponse setIpAccessLists(Collection ipAccessLists) { + this.ipAccessLists = ipAccessLists; + return this; + } + + public Collection getIpAccessLists() { + return ipAccessLists; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListIpAccessListResponse that = (ListIpAccessListResponse) o; + return Objects.equals(ipAccessLists, that.ipAccessLists); + } + + @Override + public int hashCode() { + return Objects.hash(ipAccessLists); + } + + @Override + public String toString() { + return new ToStringer(ListIpAccessListResponse.class) + .add("ipAccessLists", ipAccessLists) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java deleted file mode 100755 index f7d9aaf21..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java +++ /dev/null @@ -1,50 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import java.util.Objects; - -/** Get Account Network Policy */ -@Generated -public class ReadAccountNetworkPolicyRequest { - /** - * etag used for versioning. The response is at least as fresh as the eTag provided. This is used - * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - * overwriting each other. It is strongly suggested that systems make use of the etag in the read - * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get - * an etag from a GET request, and pass it with the DELETE request to identify the rule set - * version you are deleting. - */ - @QueryParam("etag") - private String etag; - - public ReadAccountNetworkPolicyRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ReadAccountNetworkPolicyRequest that = (ReadAccountNetworkPolicyRequest) o; - return Objects.equals(etag, that.etag); - } - - @Override - public int hashCode() { - return Objects.hash(etag); - } - - @Override - public String toString() { - return new ToStringer(ReadAccountNetworkPolicyRequest.class).add("etag", etag).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java deleted file mode 100755 index e1feb9229..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -/** Update Account Network Policy */ -@Generated -public class UpdateAccountNetworkPolicyRequest { - /** This should always be set to true for Settings RPCs. Added for AIP compliance. */ - @JsonProperty("allow_missing") - private Boolean allowMissing; - - /** */ - @JsonProperty("setting") - private AccountNetworkPolicyMessage setting; - - public UpdateAccountNetworkPolicyRequest setAllowMissing(Boolean allowMissing) { - this.allowMissing = allowMissing; - return this; - } - - public Boolean getAllowMissing() { - return allowMissing; - } - - public UpdateAccountNetworkPolicyRequest setSetting(AccountNetworkPolicyMessage setting) { - this.setting = setting; - return this; - } - - public AccountNetworkPolicyMessage getSetting() { - return setting; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpdateAccountNetworkPolicyRequest that = (UpdateAccountNetworkPolicyRequest) o; - return Objects.equals(allowMissing, that.allowMissing) && Objects.equals(setting, that.setting); - } - - @Override - public int hashCode() { - return Objects.hash(allowMissing, setting); - } - - @Override - public String toString() { - return new ToStringer(UpdateAccountNetworkPolicyRequest.class) - .add("allowMissing", allowMissing) - .add("setting", setting) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 8e0f647a1..3b9d4cbb0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** Name of the channel */ @Generated public enum ChannelName { CHANNEL_NAME_CURRENT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java index 49737db93..ee39ba51d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Import.java @@ -25,10 +25,11 @@ public class Import { * *

- `AUTO`: The item is imported depending on an analysis of the item's extension and the * header content provided in the request. If the item is imported as a notebook, then the item's - * extension is automatically removed. - `SOURCE`: The notebook is imported as source code. - - * `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a - * Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. - * Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format. + * extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source + * code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported + * as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive + * format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown + * format. */ @JsonProperty("format") private ImportFormat format; @@ -46,7 +47,7 @@ public class Import { /** * The absolute path of the object or directory. Importing a directory is only supported for the - * `DBC` format. + * `DBC` and `SOURCE` formats. */ @JsonProperty("path") private String path; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java index b71f3db60..0448adcf4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java @@ -11,10 +11,10 @@ * *

- `AUTO`: The item is imported depending on an analysis of the item's extension and the header * content provided in the request. If the item is imported as a notebook, then the item's extension - * is automatically removed. - `SOURCE`: The notebook is imported as source code. - `HTML`: The - * notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython - * Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for - * directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format. + * is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - + * `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a + * Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. + * Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format. */ @Generated public enum ImportFormat { @@ -24,5 +24,5 @@ public enum ImportFormat { HTML, // The notebook is imported as an HTML file. JUPYTER, // The notebook is imported as a Jupyter/IPython Notebook file. R_MARKDOWN, // The notebook is imported from R Markdown format. - SOURCE, // The notebook is imported as source code. + SOURCE, // The notebook or directory is imported as source code. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 161c75621..8214fca08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -123,7 +123,9 @@ public void importContent(String path) { * *

Imports a workspace object (for example, a notebook or file) or the contents of an entire * directory. If `path` already exists and `overwrite` is set to `false`, this call returns an - * error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory. + * error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or + * the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you + * must set the `language` field. */ public void importContent(Import request) { impl.importContent(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java index 2f679a53c..e6e7be354 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java @@ -69,7 +69,9 @@ WorkspaceObjectPermissions getPermissions( * *

Imports a workspace object (for example, a notebook or file) or the contents of an entire * directory. If `path` already exists and `overwrite` is set to `false`, this call returns an - * error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory. + * error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or + * the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you + * must set the `language` field. */ void importContent(Import importContent);