diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 7d4ee2a67..e36ae5312 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -bcbf6e851e3d82fd910940910dd31c10c059746c \ No newline at end of file +493a76554afd3afdd15dc858773d01643f80352a \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 552ff9d42..e37601b60 100755 --- a/.gitattributes +++ b/.gitattributes @@ -15,6 +15,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDe databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeleteBudgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java linguist-generated=true @@ -143,6 +144,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountM databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetArtifactAllowlistRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true @@ -262,6 +264,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStora databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java linguist-generated=true @@ -272,8 +275,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBinding.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingBindingType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AddInstanceProfile.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AutoScale.java linguist-generated=true @@ -602,6 +608,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask. databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true @@ -626,6 +633,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessContr databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java linguist-generated=true @@ -637,6 +646,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourceDirtyState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java linguist-generated=true @@ -1105,10 +1115,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/TrafficConf databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyMessage.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true @@ -1123,8 +1129,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Credential databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true @@ -1146,6 +1150,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true @@ -1154,7 +1159,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalCo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessageEnum.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadAccountNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadPersonalComputeSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java linguist-generated=true @@ -1178,7 +1182,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAccountNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 0efa4e933..63f4bc6bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -53,8 +53,6 @@ import com.databricks.sdk.service.provisioning.WorkspacesService; import com.databricks.sdk.service.settings.AccountIpAccessListsAPI; import com.databricks.sdk.service.settings.AccountIpAccessListsService; -import com.databricks.sdk.service.settings.AccountNetworkPolicyAPI; -import com.databricks.sdk.service.settings.AccountNetworkPolicyService; import com.databricks.sdk.service.settings.AccountSettingsAPI; import com.databricks.sdk.service.settings.AccountSettingsService; import com.databricks.sdk.support.Generated; @@ -76,7 +74,6 @@ public class AccountClient { private LogDeliveryAPI logDeliveryAPI; private AccountMetastoreAssignmentsAPI metastoreAssignmentsAPI; private AccountMetastoresAPI metastoresAPI; - private AccountNetworkPolicyAPI networkPolicyAPI; private NetworksAPI networksAPI; private OAuthEnrollmentAPI oAuthEnrollmentAPI; private OAuthPublishedAppsAPI oAuthPublishedAppsAPI; @@ -111,7 +108,6 @@ public AccountClient(DatabricksConfig config) { logDeliveryAPI = new LogDeliveryAPI(apiClient); metastoreAssignmentsAPI = new AccountMetastoreAssignmentsAPI(apiClient); metastoresAPI = new AccountMetastoresAPI(apiClient); - networkPolicyAPI = new AccountNetworkPolicyAPI(apiClient); networksAPI = new NetworksAPI(apiClient); oAuthEnrollmentAPI = new OAuthEnrollmentAPI(apiClient); oAuthPublishedAppsAPI = new OAuthPublishedAppsAPI(apiClient); @@ -309,19 +305,6 @@ public AccountMetastoresAPI metastores() { return metastoresAPI; } - /** - * Network policy is a set of rules that defines what can be accessed from your Databricks - * network. E.g.: You can choose to block your SQL UDF to access internet from your Databricks - * serverless clusters. - * - *
There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - */ - public AccountNetworkPolicyAPI networkPolicy() { - return networkPolicyAPI; - } - /** * These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used * when creating a new workspace if you use customer-managed VPCs. @@ -534,12 +517,6 @@ public AccountClient withMetastoresImpl(AccountMetastoresService accountMetastor return this; } - /** Override AccountNetworkPolicyAPI with mock */ - public AccountClient withNetworkPolicyImpl(AccountNetworkPolicyService accountNetworkPolicy) { - networkPolicyAPI = new AccountNetworkPolicyAPI(accountNetworkPolicy); - return this; - } - /** Override NetworksAPI with mock */ public AccountClient withNetworksImpl(NetworksService networks) { networksAPI = new NetworksAPI(networks); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d8edd7c19..79cee2a8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -1212,12 +1212,23 @@ public WorkspaceAPI workspace() { } /** - * A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can - * be accessed from any workspace, while an __ISOLATED__ catalog can only be access from a - * configured list of workspaces. + * A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable + * can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a + * configured list of workspaces. This API allows you to configure (bind) securables to + * workspaces. * - *
A catalog's workspace bindings can be configured by a metastore admin or the owner of the - * catalog. + *
NOTE: The __isolation_mode__ is configured for the securable itself (using its Update + * method) and the workspace bindings are only consulted when the securable's __isolation_mode__ + * is set to __ISOLATED__. + * + *
A securable's workspace bindings can be configured by a metastore admin or the owner of the + * securable. + * + *
The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. + * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) + * which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). + * + *
Securables that support binding: - catalog */ public WorkspaceBindingsAPI workspaceBindings() { return workspaceBindingsAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java index 0357d7223..c5c8e08ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java @@ -26,8 +26,8 @@ public BillableUsageAPI(BillableUsageService mock) { impl = mock; } - public void download(String startMonth, String endMonth) { - download(new DownloadRequest().setStartMonth(startMonth).setEndMonth(endMonth)); + public DownloadResponse download(String startMonth, String endMonth) { + return download(new DownloadRequest().setStartMonth(startMonth).setEndMonth(endMonth)); } /** @@ -45,8 +45,8 @@ public void download(String startMonth, String endMonth) { *
[CSV file schema]:
* https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
*/
- public void download(DownloadRequest request) {
- impl.download(request);
+ public DownloadResponse download(DownloadRequest request) {
+ return impl.download(request);
}
public BillableUsageService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java
index bf243b185..1330ece71 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
@@ -16,10 +17,12 @@ public BillableUsageImpl(ApiClient apiClient) {
}
@Override
- public void download(DownloadRequest request) {
+ public DownloadResponse download(DownloadRequest request) {
String path =
String.format("/api/2.0/accounts/%s/usage/download", apiClient.configuredAccountID());
Map [CSV file schema]:
* https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
*/
- void download(DownloadRequest downloadRequest);
+ DownloadResponse download(DownloadRequest downloadRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java
new file mode 100755
index 000000000..3bd0e773e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DownloadResponse.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.io.InputStream;
+import java.util.Objects;
+
+@Generated
+public class DownloadResponse {
+ /** */
+ private InputStream contents;
+
+ public DownloadResponse setContents(InputStream contents) {
+ this.contents = contents;
+ return this;
+ }
+
+ public InputStream getContents() {
+ return contents;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DownloadResponse that = (DownloadResponse) o;
+ return Objects.equals(contents, that.contents);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(contents);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DownloadResponse.class).add("contents", contents).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
index d80ec6a29..c7fc0c681 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsAPI.java
@@ -42,8 +42,11 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ
return impl.create(request);
}
- public void delete(String metastoreId, String name) {
- delete(new DeleteAccountStorageCredentialRequest().setMetastoreId(metastoreId).setName(name));
+ public void delete(String metastoreId, String storageCredentialName) {
+ delete(
+ new DeleteAccountStorageCredentialRequest()
+ .setMetastoreId(metastoreId)
+ .setStorageCredentialName(storageCredentialName));
}
/**
@@ -56,8 +59,11 @@ public void delete(DeleteAccountStorageCredentialRequest request) {
impl.delete(request);
}
- public AccountsStorageCredentialInfo get(String metastoreId, String name) {
- return get(new GetAccountStorageCredentialRequest().setMetastoreId(metastoreId).setName(name));
+ public AccountsStorageCredentialInfo get(String metastoreId, String storageCredentialName) {
+ return get(
+ new GetAccountStorageCredentialRequest()
+ .setMetastoreId(metastoreId)
+ .setStorageCredentialName(storageCredentialName));
}
/**
@@ -83,8 +89,11 @@ public Iterable Creates a new storage credential. The request object is specific to the cloud:
- *
- * * **AwsIamRole** for AWS credentials. * **AzureServicePrincipal** for Azure credentials. *
- * **AzureManagedIdentity** for Azure managed credentials. * **DatabricksGcpServiceAccount** for
- * GCP managed credentials.
- *
- * The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on
- * the metastore.
+ * Creates a new storage credential.
*/
public StorageCredentialInfo create(CreateStorageCredential request) {
return impl.create(request);
@@ -102,9 +95,7 @@ public StorageCredentialInfo update(String name) {
/**
* Update a credential.
*
- * Updates a storage credential on the metastore. The caller must be the owner of the storage
- * credential or a metastore admin. If the caller is a metastore admin, only the __owner__
- * credential can be changed.
+ * Updates a storage credential on the metastore.
*/
public StorageCredentialInfo update(UpdateStorageCredential request) {
return impl.update(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
index 4910a8198..39f43a059 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java
@@ -25,14 +25,7 @@ public interface StorageCredentialsService {
/**
* Create a storage credential.
*
- * Creates a new storage credential. The request object is specific to the cloud:
- *
- * * **AwsIamRole** for AWS credentials. * **AzureServicePrincipal** for Azure credentials. *
- * **AzureManagedIdentity** for Azure managed credentials. * **DatabricksGcpServiceAccount** for
- * GCP managed credentials.
- *
- * The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on
- * the metastore.
+ * Creates a new storage credential.
*/
StorageCredentialInfo create(CreateStorageCredential createStorageCredential);
@@ -65,9 +58,7 @@ public interface StorageCredentialsService {
/**
* Update a credential.
*
- * Updates a storage credential on the metastore. The caller must be the owner of the storage
- * credential or a metastore admin. If the caller is a metastore admin, only the __owner__
- * credential can be changed.
+ * Updates a storage credential on the metastore.
*/
StorageCredentialInfo update(UpdateStorageCredential updateStorageCredential);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
index b29163ece..06f0edb2c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java
@@ -21,6 +21,10 @@ public class UpdateConnection {
@JsonProperty("options")
private Map A catalog's workspace bindings can be configured by a metastore admin or the owner of the
- * catalog.
+ * NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method)
+ * and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to
+ * __ISOLATED__.
+ *
+ * A securable's workspace bindings can be configured by a metastore admin or the owner of the
+ * securable.
+ *
+ * The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated.
+ * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which
+ * introduces the ability to bind a securable in READ_ONLY mode (catalogs only).
+ *
+ * Securables that support binding: - catalog
*/
@Generated
public class WorkspaceBindingsAPI {
@@ -44,6 +54,21 @@ public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) {
return impl.get(request);
}
+ public WorkspaceBindingsResponse getBindings(String securableType, String securableName) {
+ return getBindings(
+ new GetBindingsRequest().setSecurableType(securableType).setSecurableName(securableName));
+ }
+
+ /**
+ * Get securable workspace bindings.
+ *
+ * Gets workspace bindings of the securable. The caller must be a metastore admin or an owner
+ * of the securable.
+ */
+ public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) {
+ return impl.getBindings(request);
+ }
+
public CurrentWorkspaceBindings update(String name) {
return update(new UpdateWorkspaceBindings().setName(name));
}
@@ -58,6 +83,23 @@ public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) {
return impl.update(request);
}
+ public WorkspaceBindingsResponse updateBindings(String securableType, String securableName) {
+ return updateBindings(
+ new UpdateWorkspaceBindingsParameters()
+ .setSecurableType(securableType)
+ .setSecurableName(securableName));
+ }
+
+ /**
+ * Update securable workspace bindings.
+ *
+ * Updates workspace bindings of the securable. The caller must be a metastore admin or an
+ * owner of the securable.
+ */
+ public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameters request) {
+ return impl.updateBindings(request);
+ }
+
public WorkspaceBindingsService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
index 1c0721e03..8e5f5114b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java
@@ -24,6 +24,17 @@ public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) {
return apiClient.GET(path, request, CurrentWorkspaceBindings.class, headers);
}
+ @Override
+ public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/unity-catalog/bindings/%s/%s",
+ request.getSecurableType(), request.getSecurableName());
+ Map A catalog's workspace bindings can be configured by a metastore admin or the owner of the
- * catalog.
+ * NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method)
+ * and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to
+ * __ISOLATED__.
+ *
+ * A securable's workspace bindings can be configured by a metastore admin or the owner of the
+ * securable.
+ *
+ * The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated.
+ * Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which
+ * introduces the ability to bind a securable in READ_ONLY mode (catalogs only).
+ *
+ * Securables that support binding: - catalog
*
* This is the high-level interface, that contains generated methods.
*
@@ -25,6 +35,14 @@ public interface WorkspaceBindingsService {
*/
CurrentWorkspaceBindings get(GetWorkspaceBindingRequest getWorkspaceBindingRequest);
+ /**
+ * Get securable workspace bindings.
+ *
+ * Gets workspace bindings of the securable. The caller must be a metastore admin or an owner
+ * of the securable.
+ */
+ WorkspaceBindingsResponse getBindings(GetBindingsRequest getBindingsRequest);
+
/**
* Update catalog workspace bindings.
*
@@ -32,4 +50,13 @@ public interface WorkspaceBindingsService {
* of the catalog.
*/
CurrentWorkspaceBindings update(UpdateWorkspaceBindings updateWorkspaceBindings);
+
+ /**
+ * Update securable workspace bindings.
+ *
+ * Updates workspace bindings of the securable. The caller must be a metastore admin or an
+ * owner of the securable.
+ */
+ WorkspaceBindingsResponse updateBindings(
+ UpdateWorkspaceBindingsParameters updateWorkspaceBindingsParameters);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
index 666efc7aa..5ca111cc3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
@@ -293,6 +293,15 @@ public class ClusterDetails {
@JsonProperty("spark_version")
private String sparkVersion;
+ /**
+ * `spec` contains a snapshot of the field values that were used to create or edit this cluster.
+ * The contents of `spec` can be used in the body of a create cluster request. This field might
+ * not be populated for older clusters. Note: not included in the response of the ListClusters
+ * API.
+ */
+ @JsonProperty("spec")
+ private CreateCluster spec;
+
/**
* SSH public key contents that will be added to each Spark node in this cluster. The
* corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up
@@ -667,6 +676,15 @@ public String getSparkVersion() {
return sparkVersion;
}
+ public ClusterDetails setSpec(CreateCluster spec) {
+ this.spec = spec;
+ return this;
+ }
+
+ public CreateCluster getSpec() {
+ return spec;
+ }
+
public ClusterDetails setSshPublicKeys(Collection This value cannot exceed 1000\. Setting this value to 0 causes all new runs to be skipped.
- * The default behavior is to allow only 1 concurrent run.
+ * This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped.
*/
@JsonProperty("max_concurrent_runs")
private Long maxConcurrentRuns;
@@ -134,10 +137,7 @@ public class CreateJob {
@JsonProperty("tasks")
private Collection * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in
+ * an editable state and can be modified.
*/
+ @JsonProperty("ui_state")
+ private CreateJobUiState uiState;
+
+ /** A collection of system notification IDs to notify when runs of this job begin or complete. */
@JsonProperty("webhook_notifications")
private WebhookNotifications webhookNotifications;
@@ -184,6 +190,15 @@ public Continuous getContinuous() {
return continuous;
}
+ public CreateJob setDeployment(JobDeployment deployment) {
+ this.deployment = deployment;
+ return this;
+ }
+
+ public JobDeployment getDeployment() {
+ return deployment;
+ }
+
public CreateJob setEmailNotifications(JobEmailNotifications emailNotifications) {
this.emailNotifications = emailNotifications;
return this;
@@ -328,6 +343,15 @@ public TriggerSettings getTrigger() {
return trigger;
}
+ public CreateJob setUiState(CreateJobUiState uiState) {
+ this.uiState = uiState;
+ return this;
+ }
+
+ public CreateJobUiState getUiState() {
+ return uiState;
+ }
+
public CreateJob setWebhookNotifications(WebhookNotifications webhookNotifications) {
this.webhookNotifications = webhookNotifications;
return this;
@@ -345,6 +369,7 @@ public boolean equals(Object o) {
return Objects.equals(accessControlList, that.accessControlList)
&& Objects.equals(compute, that.compute)
&& Objects.equals(continuous, that.continuous)
+ && Objects.equals(deployment, that.deployment)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(format, that.format)
&& Objects.equals(gitSource, that.gitSource)
@@ -361,6 +386,7 @@ public boolean equals(Object o) {
&& Objects.equals(tasks, that.tasks)
&& Objects.equals(timeoutSeconds, that.timeoutSeconds)
&& Objects.equals(trigger, that.trigger)
+ && Objects.equals(uiState, that.uiState)
&& Objects.equals(webhookNotifications, that.webhookNotifications);
}
@@ -370,6 +396,7 @@ public int hashCode() {
accessControlList,
compute,
continuous,
+ deployment,
emailNotifications,
format,
gitSource,
@@ -386,6 +413,7 @@ public int hashCode() {
tasks,
timeoutSeconds,
trigger,
+ uiState,
webhookNotifications);
}
@@ -395,6 +423,7 @@ public String toString() {
.add("accessControlList", accessControlList)
.add("compute", compute)
.add("continuous", continuous)
+ .add("deployment", deployment)
.add("emailNotifications", emailNotifications)
.add("format", format)
.add("gitSource", gitSource)
@@ -411,6 +440,7 @@ public String toString() {
.add("tasks", tasks)
.add("timeoutSeconds", timeoutSeconds)
.add("trigger", trigger)
+ .add("uiState", uiState)
.add("webhookNotifications", webhookNotifications)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java
new file mode 100755
index 000000000..3a1ad123d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobUiState.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * State of the job in UI.
+ *
+ * * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in
+ * an editable state and can be modified.
+ */
+@Generated
+public enum CreateJobUiState {
+ EDITABLE, // The job is in an editable state and can be modified.
+ LOCKED, // The job is in a locked state and cannot be modified.
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java
new file mode 100755
index 000000000..1487a8cb2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class JobDeployment {
+ /**
+ * The kind of deployment that manages the job.
+ *
+ * * `BUNDLE`: The job is managed by Databricks Asset Bundle.
+ */
+ @JsonProperty("kind")
+ private JobDeploymentKind kind;
+
+ /** Path of the file that contains deployment metadata. */
+ @JsonProperty("metadata_file_path")
+ private String metadataFilePath;
+
+ public JobDeployment setKind(JobDeploymentKind kind) {
+ this.kind = kind;
+ return this;
+ }
+
+ public JobDeploymentKind getKind() {
+ return kind;
+ }
+
+ public JobDeployment setMetadataFilePath(String metadataFilePath) {
+ this.metadataFilePath = metadataFilePath;
+ return this;
+ }
+
+ public String getMetadataFilePath() {
+ return metadataFilePath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ JobDeployment that = (JobDeployment) o;
+ return Objects.equals(kind, that.kind)
+ && Objects.equals(metadataFilePath, that.metadataFilePath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(kind, metadataFilePath);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(JobDeployment.class)
+ .add("kind", kind)
+ .add("metadataFilePath", metadataFilePath)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java
new file mode 100755
index 000000000..52683b09a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * The kind of deployment that manages the job.
+ *
+ * * `BUNDLE`: The job is managed by Databricks Asset Bundle.
+ */
+@Generated
+public enum JobDeploymentKind {
+ BUNDLE, // The job is managed by Databricks Asset Bundle.
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index e968aea91..f916b5423 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -22,9 +22,13 @@ public class JobSettings {
@JsonProperty("continuous")
private Continuous continuous;
+ /** Deployment information for jobs managed by external sources. */
+ @JsonProperty("deployment")
+ private JobDeployment deployment;
+
/**
* An optional set of email addresses that is notified when runs of this job begin or complete as
- * well as when this job is deleted. The default behavior is to not send any emails.
+ * well as when this job is deleted.
*/
@JsonProperty("email_notifications")
private JobEmailNotifications emailNotifications;
@@ -75,8 +79,7 @@ public class JobSettings {
* active runs. However, from then on, new runs are skipped unless there are fewer than 3 active
* runs.
*
- * This value cannot exceed 1000\. Setting this value to 0 causes all new runs to be skipped.
- * The default behavior is to allow only 1 concurrent run.
+ * This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped.
*/
@JsonProperty("max_concurrent_runs")
private Long maxConcurrentRuns;
@@ -130,10 +133,7 @@ public class JobSettings {
@JsonProperty("tasks")
private Collection * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in
+ * an editable state and can be modified.
*/
+ @JsonProperty("ui_state")
+ private JobSettingsUiState uiState;
+
+ /** A collection of system notification IDs to notify when runs of this job begin or complete. */
@JsonProperty("webhook_notifications")
private WebhookNotifications webhookNotifications;
@@ -170,6 +176,15 @@ public Continuous getContinuous() {
return continuous;
}
+ public JobSettings setDeployment(JobDeployment deployment) {
+ this.deployment = deployment;
+ return this;
+ }
+
+ public JobDeployment getDeployment() {
+ return deployment;
+ }
+
public JobSettings setEmailNotifications(JobEmailNotifications emailNotifications) {
this.emailNotifications = emailNotifications;
return this;
@@ -314,6 +329,15 @@ public TriggerSettings getTrigger() {
return trigger;
}
+ public JobSettings setUiState(JobSettingsUiState uiState) {
+ this.uiState = uiState;
+ return this;
+ }
+
+ public JobSettingsUiState getUiState() {
+ return uiState;
+ }
+
public JobSettings setWebhookNotifications(WebhookNotifications webhookNotifications) {
this.webhookNotifications = webhookNotifications;
return this;
@@ -330,6 +354,7 @@ public boolean equals(Object o) {
JobSettings that = (JobSettings) o;
return Objects.equals(compute, that.compute)
&& Objects.equals(continuous, that.continuous)
+ && Objects.equals(deployment, that.deployment)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(format, that.format)
&& Objects.equals(gitSource, that.gitSource)
@@ -346,6 +371,7 @@ public boolean equals(Object o) {
&& Objects.equals(tasks, that.tasks)
&& Objects.equals(timeoutSeconds, that.timeoutSeconds)
&& Objects.equals(trigger, that.trigger)
+ && Objects.equals(uiState, that.uiState)
&& Objects.equals(webhookNotifications, that.webhookNotifications);
}
@@ -354,6 +380,7 @@ public int hashCode() {
return Objects.hash(
compute,
continuous,
+ deployment,
emailNotifications,
format,
gitSource,
@@ -370,6 +397,7 @@ public int hashCode() {
tasks,
timeoutSeconds,
trigger,
+ uiState,
webhookNotifications);
}
@@ -378,6 +406,7 @@ public String toString() {
return new ToStringer(JobSettings.class)
.add("compute", compute)
.add("continuous", continuous)
+ .add("deployment", deployment)
.add("emailNotifications", emailNotifications)
.add("format", format)
.add("gitSource", gitSource)
@@ -394,6 +423,7 @@ public String toString() {
.add("tasks", tasks)
.add("timeoutSeconds", timeoutSeconds)
.add("trigger", trigger)
+ .add("uiState", uiState)
.add("webhookNotifications", webhookNotifications)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java
new file mode 100755
index 000000000..3aa479c9f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsUiState.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * State of the job in UI.
+ *
+ * * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in
+ * an editable state and can be modified.
+ */
+@Generated
+public enum JobSettingsUiState {
+ EDITABLE, // The job is in an editable state and can be modified.
+ LOCKED, // The job is in a locked state and cannot be modified.
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
index db119ba68..3d3d7f5d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
@@ -306,10 +306,10 @@ public void reset(long jobId, JobSettings newSettings) {
}
/**
- * Overwrites all settings for a job.
+ * Overwrite all settings for a job.
*
- * Overwrites all the settings for a specific job. Use the Update endpoint to update job
- * settings partially.
+ * Overwrite all settings for the given job. Use the Update endpoint to update job settings
+ * partially.
*/
public void reset(ResetJob request) {
impl.reset(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
index 748e31ec4..6b14958ec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
@@ -139,10 +139,10 @@ GetJobPermissionLevelsResponse getPermissionLevels(
RepairRunResponse repairRun(RepairRun repairRun);
/**
- * Overwrites all settings for a job.
+ * Overwrite all settings for a job.
*
- * Overwrites all the settings for a specific job. Use the Update endpoint to update job
- * settings partially.
+ * Overwrite all settings for the given job. Use the Update endpoint to update job settings
+ * partially.
*/
void reset(ResetJob resetJob);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
index f2f522345..05c16ca6b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java
@@ -9,10 +9,6 @@
@Generated
public class RunOutput {
- /** The output of a condition task, if available. */
- @JsonProperty("condition_task")
- private Object conditionTask;
-
/** The output of a dbt task, if available. */
@JsonProperty("dbt_output")
private DbtOutput dbtOutput;
@@ -66,15 +62,6 @@ public class RunOutput {
@JsonProperty("sql_output")
private SqlOutput sqlOutput;
- public RunOutput setConditionTask(Object conditionTask) {
- this.conditionTask = conditionTask;
- return this;
- }
-
- public Object getConditionTask() {
- return conditionTask;
- }
-
public RunOutput setDbtOutput(DbtOutput dbtOutput) {
this.dbtOutput = dbtOutput;
return this;
@@ -161,8 +148,7 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RunOutput that = (RunOutput) o;
- return Objects.equals(conditionTask, that.conditionTask)
- && Objects.equals(dbtOutput, that.dbtOutput)
+ return Objects.equals(dbtOutput, that.dbtOutput)
&& Objects.equals(error, that.error)
&& Objects.equals(errorTrace, that.errorTrace)
&& Objects.equals(logs, that.logs)
@@ -176,7 +162,6 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- conditionTask,
dbtOutput,
error,
errorTrace,
@@ -191,7 +176,6 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(RunOutput.class)
- .add("conditionTask", conditionTask)
.add("dbtOutput", dbtOutput)
.add("error", error)
.add("errorTrace", errorTrace)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
index 2fd0664bb..5b547374f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java
@@ -14,10 +14,7 @@ public class SubmitRun {
@JsonProperty("access_control_list")
private Collection * `ALL_SUCCESS`: All dependencies have executed and succeeded * `AT_LEAST_ONE_SUCCESS`: At
* least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at
@@ -184,13 +181,17 @@ public class Task {
@JsonProperty("task_key")
private String taskKey;
- /**
- * An optional timeout applied to each run of this job task. The default behavior is to have no
- * timeout.
- */
+ /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */
@JsonProperty("timeout_seconds")
private Long timeoutSeconds;
+ /**
+ * A collection of system notification IDs to notify when runs of this task begin or complete. The
+ * default behavior is to not send any system notifications.
+ */
+ @JsonProperty("webhook_notifications")
+ private WebhookNotifications webhookNotifications;
+
public Task setComputeKey(String computeKey) {
this.computeKey = computeKey;
return this;
@@ -425,6 +426,15 @@ public Long getTimeoutSeconds() {
return timeoutSeconds;
}
+ public Task setWebhookNotifications(WebhookNotifications webhookNotifications) {
+ this.webhookNotifications = webhookNotifications;
+ return this;
+ }
+
+ public WebhookNotifications getWebhookNotifications() {
+ return webhookNotifications;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -455,7 +465,8 @@ public boolean equals(Object o) {
&& Objects.equals(sparkSubmitTask, that.sparkSubmitTask)
&& Objects.equals(sqlTask, that.sqlTask)
&& Objects.equals(taskKey, that.taskKey)
- && Objects.equals(timeoutSeconds, that.timeoutSeconds);
+ && Objects.equals(timeoutSeconds, that.timeoutSeconds)
+ && Objects.equals(webhookNotifications, that.webhookNotifications);
}
@Override
@@ -486,7 +497,8 @@ public int hashCode() {
sparkSubmitTask,
sqlTask,
taskKey,
- timeoutSeconds);
+ timeoutSeconds,
+ webhookNotifications);
}
@Override
@@ -518,6 +530,7 @@ public String toString() {
.add("sqlTask", sqlTask)
.add("taskKey", taskKey)
.add("timeoutSeconds", timeoutSeconds)
+ .add("webhookNotifications", webhookNotifications)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
index 588b7321b..f440e99f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
@@ -292,9 +292,10 @@ public StartUpdateResponse startUpdate(String pipelineId) {
}
/**
- * Queue a pipeline update.
+ * Start a pipeline.
*
- * Starts or queues a pipeline update.
+ * Starts a new update for the pipeline. If there is already an active update for the pipeline,
+ * the request will fail and the active update will remain running.
*/
public StartUpdateResponse startUpdate(StartUpdate request) {
return impl.startUpdate(request);
@@ -307,7 +308,8 @@ public Wait Stops a pipeline.
+ * Stops the pipeline by canceling the active update. If there is no active update for the
+ * pipeline, this request is a no-op.
*/
public Wait Starts or queues a pipeline update.
+ * Starts a new update for the pipeline. If there is already an active update for the pipeline,
+ * the request will fail and the active update will remain running.
*/
StartUpdateResponse startUpdate(StartUpdate startUpdate);
/**
* Stop a pipeline.
*
- * Stops a pipeline.
+ * Stops the pipeline by canceling the active update. If there is no active update for the
+ * pipeline, this request is a no-op.
*/
void stop(StopRequest stopRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
index f94e57878..469c7303b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
@@ -58,6 +58,15 @@ public class ServedModelInput {
@JsonProperty("workload_size")
private String workloadSize;
+ /**
+ * The workload type of the served model. The workload type selects which type of compute to use
+ * in the endpoint. The default value for this parameter is "CPU". For deep learning workloads,
+ * GPU acceleration is available by selecting workload types like GPU_SMALL and others. See
+ * documentation for all options.
+ */
+ @JsonProperty("workload_type")
+ private String workloadType;
+
public ServedModelInput setEnvironmentVars(Map There is only one instance of this setting per account. Since this setting has a default
- * value, this setting is present on all accounts even though it's never set on a given account.
- * Deletion reverts the value of the setting back to the default value.
- */
-@Generated
-public class AccountNetworkPolicyAPI {
- private static final Logger LOG = LoggerFactory.getLogger(AccountNetworkPolicyAPI.class);
-
- private final AccountNetworkPolicyService impl;
-
- /** Regular-use constructor */
- public AccountNetworkPolicyAPI(ApiClient apiClient) {
- impl = new AccountNetworkPolicyImpl(apiClient);
- }
-
- /** Constructor for mocks */
- public AccountNetworkPolicyAPI(AccountNetworkPolicyService mock) {
- impl = mock;
- }
-
- public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy(String etag) {
- return deleteAccountNetworkPolicy(new DeleteAccountNetworkPolicyRequest().setEtag(etag));
- }
-
- /**
- * Delete Account Network Policy.
- *
- * Reverts back all the account network policies back to default.
- */
- public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy(
- DeleteAccountNetworkPolicyRequest request) {
- return impl.deleteAccountNetworkPolicy(request);
- }
-
- public AccountNetworkPolicyMessage readAccountNetworkPolicy(String etag) {
- return readAccountNetworkPolicy(new ReadAccountNetworkPolicyRequest().setEtag(etag));
- }
-
- /**
- * Get Account Network Policy.
- *
- * Gets the value of Account level Network Policy.
- */
- public AccountNetworkPolicyMessage readAccountNetworkPolicy(
- ReadAccountNetworkPolicyRequest request) {
- return impl.readAccountNetworkPolicy(request);
- }
-
- /**
- * Update Account Network Policy.
- *
- * Updates the policy content of Account level Network Policy.
- */
- public AccountNetworkPolicyMessage updateAccountNetworkPolicy(
- UpdateAccountNetworkPolicyRequest request) {
- return impl.updateAccountNetworkPolicy(request);
- }
-
- public AccountNetworkPolicyService impl() {
- return impl;
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java
deleted file mode 100755
index 4a8f39d83..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountNetworkPolicyImpl.java
+++ /dev/null
@@ -1,54 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.core.ApiClient;
-import com.databricks.sdk.support.Generated;
-import java.util.HashMap;
-import java.util.Map;
-
-/** Package-local implementation of AccountNetworkPolicy */
-@Generated
-class AccountNetworkPolicyImpl implements AccountNetworkPolicyService {
- private final ApiClient apiClient;
-
- public AccountNetworkPolicyImpl(ApiClient apiClient) {
- this.apiClient = apiClient;
- }
-
- @Override
- public DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy(
- DeleteAccountNetworkPolicyRequest request) {
- String path =
- String.format(
- "/api/2.0/accounts/%s/settings/types/network_policy/names/default",
- apiClient.configuredAccountID());
- Map There is only one instance of this setting per account. Since this setting has a default
- * value, this setting is present on all accounts even though it's never set on a given account.
- * Deletion reverts the value of the setting back to the default value.
- *
- * This is the high-level interface, that contains generated methods.
- *
- * Evolving: this interface is under development. Method signatures may change.
- */
-@Generated
-public interface AccountNetworkPolicyService {
- /**
- * Delete Account Network Policy.
- *
- * Reverts back all the account network policies back to default.
- */
- DeleteAccountNetworkPolicyResponse deleteAccountNetworkPolicy(
- DeleteAccountNetworkPolicyRequest deleteAccountNetworkPolicyRequest);
-
- /**
- * Get Account Network Policy.
- *
- * Gets the value of Account level Network Policy.
- */
- AccountNetworkPolicyMessage readAccountNetworkPolicy(
- ReadAccountNetworkPolicyRequest readAccountNetworkPolicyRequest);
-
- /**
- * Update Account Network Policy.
- *
- * Updates the policy content of Account level Network Policy.
- */
- AccountNetworkPolicyMessage updateAccountNetworkPolicy(
- UpdateAccountNetworkPolicyRequest updateAccountNetworkPolicyRequest);
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java
deleted file mode 100755
index 83f6c285a..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyRequest.java
+++ /dev/null
@@ -1,50 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.QueryParam;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-/** Delete Account Network Policy */
-@Generated
-public class DeleteAccountNetworkPolicyRequest {
- /**
- * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
- * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
- * overwriting each other. It is strongly suggested that systems make use of the etag in the read
- * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
- * an etag from a GET request, and pass it with the DELETE request to identify the rule set
- * version you are deleting.
- */
- @QueryParam("etag")
- private String etag;
-
- public DeleteAccountNetworkPolicyRequest setEtag(String etag) {
- this.etag = etag;
- return this;
- }
-
- public String getEtag() {
- return etag;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- DeleteAccountNetworkPolicyRequest that = (DeleteAccountNetworkPolicyRequest) o;
- return Objects.equals(etag, that.etag);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(etag);
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteAccountNetworkPolicyRequest.class).add("etag", etag).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java
deleted file mode 100755
index 3a9a5981b..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountNetworkPolicyResponse.java
+++ /dev/null
@@ -1,49 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class DeleteAccountNetworkPolicyResponse {
- /**
- * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
- * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
- * overwriting each other. It is strongly suggested that systems make use of the etag in the read
- * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
- * etag from a GET request, and pass it with the PATCH request to identify the setting version you
- * are updating.
- */
- @JsonProperty("etag")
- private String etag;
-
- public DeleteAccountNetworkPolicyResponse setEtag(String etag) {
- this.etag = etag;
- return this;
- }
-
- public String getEtag() {
- return etag;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- DeleteAccountNetworkPolicyResponse that = (DeleteAccountNetworkPolicyResponse) o;
- return Objects.equals(etag, that.etag);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(etag);
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteAccountNetworkPolicyResponse.class).add("etag", etag).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
index 596b15dc4..47925b4bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListResponse.java
@@ -5,22 +5,21 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
import java.util.Objects;
@Generated
public class GetIpAccessListResponse {
/** */
- @JsonProperty("ip_access_lists")
- private Collection Gets all IP access lists for the specified workspace.
*/
- GetIpAccessListResponse list();
+ ListIpAccessListResponse list();
/**
* Replace access list.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java
new file mode 100755
index 000000000..9f31d07f0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListIpAccessListResponse {
+ /** */
+ @JsonProperty("ip_access_lists")
+ private Collection - `AUTO`: The item is imported depending on an analysis of the item's extension and the
* header content provided in the request. If the item is imported as a notebook, then the item's
- * extension is automatically removed. - `SOURCE`: The notebook is imported as source code. -
- * `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a
- * Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format.
- * Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
+ * extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source
+ * code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported
+ * as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive
+ * format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown
+ * format.
*/
@JsonProperty("format")
private ImportFormat format;
@@ -46,7 +47,7 @@ public class Import {
/**
* The absolute path of the object or directory. Importing a directory is only supported for the
- * `DBC` format.
+ * `DBC` and `SOURCE` formats.
*/
@JsonProperty("path")
private String path;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java
index b71f3db60..0448adcf4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ImportFormat.java
@@ -11,10 +11,10 @@
*
* - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
* content provided in the request. If the item is imported as a notebook, then the item's extension
- * is automatically removed. - `SOURCE`: The notebook is imported as source code. - `HTML`: The
- * notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython
- * Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for
- * directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
+ * is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. -
+ * `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a
+ * Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format.
+ * Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
*/
@Generated
public enum ImportFormat {
@@ -24,5 +24,5 @@ public enum ImportFormat {
HTML, // The notebook is imported as an HTML file.
JUPYTER, // The notebook is imported as a Jupyter/IPython Notebook file.
R_MARKDOWN, // The notebook is imported from R Markdown format.
- SOURCE, // The notebook is imported as source code.
+ SOURCE, // The notebook or directory is imported as source code.
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
index 161c75621..8214fca08 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
@@ -123,7 +123,9 @@ public void importContent(String path) {
*
* Imports a workspace object (for example, a notebook or file) or the contents of an entire
* directory. If `path` already exists and `overwrite` is set to `false`, this call returns an
- * error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory.
+ * error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or
+ * the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you
+ * must set the `language` field.
*/
public void importContent(Import request) {
impl.importContent(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
index 2f679a53c..e6e7be354 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
@@ -69,7 +69,9 @@ WorkspaceObjectPermissions getPermissions(
*
* Imports a workspace object (for example, a notebook or file) or the contents of an entire
* directory. If `path` already exists and `overwrite` is set to `false`, this call returns an
- * error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory.
+ * error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or
+ * the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you
+ * must set the `language` field.
*/
void importContent(Import importContent);