diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 23aa44735..7c42f6dc2 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -5903bb39137fd76ac384b2044e425f9c56840e00 \ No newline at end of file +e7b127cb07af8dd4d8c61c7cc045c8910cdbb02a \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 6ff2f4712..b8ef77bd1 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,70 +1,98 @@ import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils -from databricks.sdk.mixins.compute import ClustersExt + from databricks.sdk.mixins.files import DbfsExt +from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.workspace import WorkspaceExt -from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, - LogDeliveryAPI) -from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, - AccountMetastoresAPI, - AccountStorageCredentialsAPI, - ArtifactAllowlistsAPI, CatalogsAPI, - ConnectionsAPI, - ExternalLocationsAPI, FunctionsAPI, - GrantsAPI, MetastoresAPI, - ModelVersionsAPI, - RegisteredModelsAPI, SchemasAPI, - StorageCredentialsAPI, - SystemSchemasAPI, - TableConstraintsAPI, TablesAPI, - VolumesAPI, WorkspaceBindingsAPI) -from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, - CommandExecutionAPI, - GlobalInitScriptsAPI, - InstancePoolsAPI, - InstanceProfilesAPI, LibrariesAPI, - PolicyFamiliesAPI) -from databricks.sdk.service.files import DbfsAPI, FilesAPI -from databricks.sdk.service.iam import (AccountAccessControlAPI, - AccountAccessControlProxyAPI, - AccountGroupsAPI, - AccountServicePrincipalsAPI, - AccountUsersAPI, CurrentUserAPI, - GroupsAPI, PermissionsAPI, - ServicePrincipalsAPI, UsersAPI, - WorkspaceAssignmentAPI) +from databricks.sdk.service.iam import AccountAccessControlAPI +from databricks.sdk.service.iam import AccountAccessControlProxyAPI +from databricks.sdk.service.sql import AlertsAPI +from databricks.sdk.service.serving import AppsAPI +from databricks.sdk.service.catalog import ArtifactAllowlistsAPI +from databricks.sdk.service.billing import BillableUsageAPI +from databricks.sdk.service.billing import BudgetsAPI +from databricks.sdk.service.catalog import CatalogsAPI +from databricks.sdk.service.sharing import CleanRoomsAPI +from databricks.sdk.service.compute import ClusterPoliciesAPI +from databricks.sdk.service.compute import ClustersAPI +from databricks.sdk.service.compute import CommandExecutionAPI +from databricks.sdk.service.catalog import ConnectionsAPI +from databricks.sdk.service.provisioning import CredentialsAPI +from databricks.sdk.service.settings import CredentialsManagerAPI +from databricks.sdk.service.iam import CurrentUserAPI +from databricks.sdk.service.oauth2 import CustomAppIntegrationAPI +from databricks.sdk.service.sql import DashboardWidgetsAPI +from databricks.sdk.service.sql import DashboardsAPI +from databricks.sdk.service.sql import DataSourcesAPI +from databricks.sdk.service.files import DbfsAPI +from databricks.sdk.service.sql import DbsqlPermissionsAPI +from databricks.sdk.service.provisioning import EncryptionKeysAPI +from databricks.sdk.service.ml import ExperimentsAPI +from databricks.sdk.service.catalog import ExternalLocationsAPI +from databricks.sdk.service.files import FilesAPI +from databricks.sdk.service.catalog import FunctionsAPI +from databricks.sdk.service.workspace import GitCredentialsAPI +from databricks.sdk.service.compute import GlobalInitScriptsAPI +from databricks.sdk.service.catalog import GrantsAPI +from databricks.sdk.service.iam import GroupsAPI +from databricks.sdk.service.iam import AccountGroupsAPI +from databricks.sdk.service.compute import InstancePoolsAPI +from databricks.sdk.service.compute import InstanceProfilesAPI +from databricks.sdk.service.settings import IpAccessListsAPI +from databricks.sdk.service.settings import AccountIpAccessListsAPI from databricks.sdk.service.jobs import JobsAPI -from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI -from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI, - OAuthEnrollmentAPI, - OAuthPublishedAppsAPI, - PublishedAppIntegrationAPI, - ServicePrincipalSecretsAPI) +from databricks.sdk.service.compute import LibrariesAPI +from databricks.sdk.service.billing import LogDeliveryAPI +from databricks.sdk.service.catalog import AccountMetastoreAssignmentsAPI +from databricks.sdk.service.catalog import MetastoresAPI +from databricks.sdk.service.catalog import AccountMetastoresAPI +from databricks.sdk.service.ml import ModelRegistryAPI +from databricks.sdk.service.catalog import ModelVersionsAPI +from databricks.sdk.service.settings import NetworkConnectivityAPI +from databricks.sdk.service.settings import AccountNetworkPolicyAPI +from databricks.sdk.service.provisioning import NetworksAPI +from databricks.sdk.service.oauth2 import OAuthPublishedAppsAPI +from databricks.sdk.service.iam import PermissionsAPI from databricks.sdk.service.pipelines import PipelinesAPI -from databricks.sdk.service.provisioning import (CredentialsAPI, - EncryptionKeysAPI, - NetworksAPI, PrivateAccessAPI, - StorageAPI, VpcEndpointsAPI, - WorkspacesAPI) -from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI -from databricks.sdk.service.settings import (AccountIpAccessListsAPI, - AccountNetworkPolicyAPI, - AccountSettingsAPI, - CredentialsManagerAPI, - IpAccessListsAPI, SettingsAPI, - TokenManagementAPI, TokensAPI, - WorkspaceConfAPI) -from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI, - RecipientActivationAPI, - RecipientsAPI, SharesAPI) -from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI, - DashboardWidgetsAPI, DataSourcesAPI, - DbsqlPermissionsAPI, QueriesAPI, - QueryHistoryAPI, - QueryVisualizationsAPI, - StatementExecutionAPI, WarehousesAPI) -from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, - SecretsAPI, WorkspaceAPI) +from databricks.sdk.service.compute import PolicyFamiliesAPI +from databricks.sdk.service.provisioning import PrivateAccessAPI +from databricks.sdk.service.sharing import ProvidersAPI +from databricks.sdk.service.oauth2 import PublishedAppIntegrationAPI +from databricks.sdk.service.sql import QueriesAPI +from databricks.sdk.service.sql import QueryHistoryAPI +from databricks.sdk.service.sql import QueryVisualizationsAPI +from databricks.sdk.service.sharing import RecipientActivationAPI +from databricks.sdk.service.sharing import RecipientsAPI +from databricks.sdk.service.catalog import RegisteredModelsAPI +from databricks.sdk.service.workspace import ReposAPI +from databricks.sdk.service.catalog import SchemasAPI +from databricks.sdk.service.workspace import SecretsAPI +from databricks.sdk.service.oauth2 import ServicePrincipalSecretsAPI +from databricks.sdk.service.iam import ServicePrincipalsAPI +from databricks.sdk.service.iam import AccountServicePrincipalsAPI +from databricks.sdk.service.serving import ServingEndpointsAPI +from databricks.sdk.service.settings import SettingsAPI +from databricks.sdk.service.settings import AccountSettingsAPI +from databricks.sdk.service.sharing import SharesAPI +from databricks.sdk.service.sql import StatementExecutionAPI +from databricks.sdk.service.provisioning import StorageAPI +from databricks.sdk.service.catalog import StorageCredentialsAPI +from databricks.sdk.service.catalog import AccountStorageCredentialsAPI +from databricks.sdk.service.catalog import SystemSchemasAPI +from databricks.sdk.service.catalog import TableConstraintsAPI +from databricks.sdk.service.catalog import TablesAPI +from databricks.sdk.service.settings import TokenManagementAPI +from databricks.sdk.service.settings import TokensAPI +from databricks.sdk.service.iam import UsersAPI +from databricks.sdk.service.iam import AccountUsersAPI +from databricks.sdk.service.catalog import VolumesAPI +from databricks.sdk.service.provisioning import VpcEndpointsAPI +from databricks.sdk.service.sql import WarehousesAPI +from databricks.sdk.service.workspace import WorkspaceAPI +from databricks.sdk.service.iam import WorkspaceAssignmentAPI +from databricks.sdk.service.catalog import WorkspaceBindingsAPI +from databricks.sdk.service.settings import WorkspaceConfAPI +from databricks.sdk.service.provisioning import WorkspacesAPI def _make_dbutils(config: client.Config): @@ -260,9 +288,9 @@ def __init__(self, self.log_delivery = LogDeliveryAPI(self.api_client) self.metastore_assignments = AccountMetastoreAssignmentsAPI(self.api_client) self.metastores = AccountMetastoresAPI(self.api_client) + self.network_connectivity = NetworkConnectivityAPI(self.api_client) self.network_policy = AccountNetworkPolicyAPI(self.api_client) self.networks = NetworksAPI(self.api_client) - self.o_auth_enrollment = OAuthEnrollmentAPI(self.api_client) self.o_auth_published_apps = OAuthPublishedAppsAPI(self.api_client) self.private_access = PrivateAccessAPI(self.api_client) self.published_app_integration = PublishedAppIntegrationAPI(self.api_client) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 6730d4c47..23784cf0c 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import BinaryIO, Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -542,15 +545,15 @@ def list(self) -> Iterator['BudgetWithStatus']: parsed = BudgetList.from_dict(json).budgets return parsed if parsed is not None else [] - def update(self, budget: Budget, budget_id: str): + def update(self, budget_id: str, budget: Budget): """Modify budget. Modifies a budget in this account. Budget properties are completely overwritten. - :param budget: :class:`Budget` - Budget configuration to be created. :param budget_id: str Budget ID + :param budget: :class:`Budget` + Budget configuration to be created. """ @@ -705,7 +708,7 @@ def list(self, parsed = WrappedLogDeliveryConfigurations.from_dict(json).log_delivery_configurations return parsed if parsed is not None else [] - def patch_status(self, status: LogDeliveryConfigStatus, log_delivery_configuration_id: str): + def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryConfigStatus): """Enable or disable log delivery configuration. Enables or disables a log delivery configuration. Deletion of delivery configurations is not @@ -713,13 +716,13 @@ def patch_status(self, status: LogDeliveryConfigStatus, log_delivery_configurati re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). + :param log_delivery_configuration_id: str + Databricks log delivery configuration ID :param status: :class:`LogDeliveryConfigStatus` Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. - :param log_delivery_configuration_id: str - Databricks log delivery configuration ID """ diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 94c78dda3..68dc91301 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -675,13 +678,13 @@ class CreateFunction: name: str catalog_name: str schema_name: str - input_params: 'List[FunctionParameterInfo]' + input_params: 'FunctionParameterInfos' data_type: 'ColumnTypeName' full_data_type: str - return_params: 'List[FunctionParameterInfo]' + return_params: 'FunctionParameterInfos' routine_body: 'CreateFunctionRoutineBody' routine_definition: str - routine_dependencies: 'List[Dependency]' + routine_dependencies: 'DependencyList' parameter_style: 'CreateFunctionParameterStyle' is_deterministic: bool sql_data_access: 'CreateFunctionSqlDataAccess' @@ -691,7 +694,7 @@ class CreateFunction: comment: Optional[str] = None external_language: Optional[str] = None external_name: Optional[str] = None - properties: Optional['Dict[str,str]'] = None + properties: Optional[str] = None sql_path: Optional[str] = None def as_dict(self) -> dict: @@ -702,17 +705,16 @@ def as_dict(self) -> dict: if self.external_language is not None: body['external_language'] = self.external_language if self.external_name is not None: body['external_name'] = self.external_name if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.input_params: body['input_params'] = [v.as_dict() for v in self.input_params] + if self.input_params: body['input_params'] = self.input_params.as_dict() if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic if self.is_null_call is not None: body['is_null_call'] = self.is_null_call if self.name is not None: body['name'] = self.name if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value - if self.properties: body['properties'] = self.properties - if self.return_params: body['return_params'] = [v.as_dict() for v in self.return_params] + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params.as_dict() if self.routine_body is not None: body['routine_body'] = self.routine_body.value if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: - body['routine_dependencies'] = [v.as_dict() for v in self.routine_dependencies] + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() if self.schema_name is not None: body['schema_name'] = self.schema_name if self.security_type is not None: body['security_type'] = self.security_type.value if self.specific_name is not None: body['specific_name'] = self.specific_name @@ -728,16 +730,16 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateFunction': external_language=d.get('external_language', None), external_name=d.get('external_name', None), full_data_type=d.get('full_data_type', None), - input_params=_repeated(d, 'input_params', FunctionParameterInfo), + input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), name=d.get('name', None), parameter_style=_enum(d, 'parameter_style', CreateFunctionParameterStyle), properties=d.get('properties', None), - return_params=_repeated(d, 'return_params', FunctionParameterInfo), + return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', CreateFunctionRoutineBody), routine_definition=d.get('routine_definition', None), - routine_dependencies=_repeated(d, 'routine_dependencies', Dependency), + routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', CreateFunctionSecurityType), specific_name=d.get('specific_name', None), @@ -751,6 +753,20 @@ class CreateFunctionParameterStyle(Enum): S = 'S' +@dataclass +class CreateFunctionRequest: + function_info: 'CreateFunction' + + def as_dict(self) -> dict: + body = {} + if self.function_info: body['function_info'] = self.function_info.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateFunctionRequest': + return cls(function_info=_from_dict(d, 'function_info', CreateFunction)) + + class CreateFunctionRoutineBody(Enum): """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot @@ -778,8 +794,8 @@ class CreateFunctionSqlDataAccess(Enum): @dataclass class CreateMetastore: name: str - storage_root: str region: Optional[str] = None + storage_root: Optional[str] = None def as_dict(self) -> dict: body = {} @@ -1039,6 +1055,22 @@ def from_dict(cls, d: Dict[str, any]) -> 'Dependency': table=_from_dict(d, 'table', TableDependency)) +@dataclass +class DependencyList: + """A list of dependencies.""" + + dependencies: Optional['List[Dependency]'] = None + + def as_dict(self) -> dict: + body = {} + if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DependencyList': + return cls(dependencies=_repeated(d, 'dependencies', Dependency)) + + class DisableSchemaName(Enum): ACCESS = 'access' @@ -1264,18 +1296,18 @@ class FunctionInfo: full_data_type: Optional[str] = None full_name: Optional[str] = None function_id: Optional[str] = None - input_params: Optional['List[FunctionParameterInfo]'] = None + input_params: Optional['FunctionParameterInfos'] = None is_deterministic: Optional[bool] = None is_null_call: Optional[bool] = None metastore_id: Optional[str] = None name: Optional[str] = None owner: Optional[str] = None parameter_style: Optional['FunctionInfoParameterStyle'] = None - properties: Optional['Dict[str,str]'] = None - return_params: Optional['List[FunctionParameterInfo]'] = None + properties: Optional[str] = None + return_params: Optional['FunctionParameterInfos'] = None routine_body: Optional['FunctionInfoRoutineBody'] = None routine_definition: Optional[str] = None - routine_dependencies: Optional['List[Dependency]'] = None + routine_dependencies: Optional['DependencyList'] = None schema_name: Optional[str] = None security_type: Optional['FunctionInfoSecurityType'] = None specific_name: Optional[str] = None @@ -1296,19 +1328,18 @@ def as_dict(self) -> dict: if self.full_data_type is not None: body['full_data_type'] = self.full_data_type if self.full_name is not None: body['full_name'] = self.full_name if self.function_id is not None: body['function_id'] = self.function_id - if self.input_params: body['input_params'] = [v.as_dict() for v in self.input_params] + if self.input_params: body['input_params'] = self.input_params.as_dict() if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic if self.is_null_call is not None: body['is_null_call'] = self.is_null_call if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value - if self.properties: body['properties'] = self.properties - if self.return_params: body['return_params'] = [v.as_dict() for v in self.return_params] + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params.as_dict() if self.routine_body is not None: body['routine_body'] = self.routine_body.value if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: - body['routine_dependencies'] = [v.as_dict() for v in self.routine_dependencies] + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() if self.schema_name is not None: body['schema_name'] = self.schema_name if self.security_type is not None: body['security_type'] = self.security_type.value if self.specific_name is not None: body['specific_name'] = self.specific_name @@ -1330,7 +1361,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'FunctionInfo': full_data_type=d.get('full_data_type', None), full_name=d.get('full_name', None), function_id=d.get('function_id', None), - input_params=_repeated(d, 'input_params', FunctionParameterInfo), + input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), metastore_id=d.get('metastore_id', None), @@ -1338,10 +1369,10 @@ def from_dict(cls, d: Dict[str, any]) -> 'FunctionInfo': owner=d.get('owner', None), parameter_style=_enum(d, 'parameter_style', FunctionInfoParameterStyle), properties=d.get('properties', None), - return_params=_repeated(d, 'return_params', FunctionParameterInfo), + return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', FunctionInfoRoutineBody), routine_definition=d.get('routine_definition', None), - routine_dependencies=_repeated(d, 'routine_dependencies', Dependency), + routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', FunctionInfoSecurityType), specific_name=d.get('specific_name', None), @@ -1428,6 +1459,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'FunctionParameterInfo': type_text=d.get('type_text', None)) +@dataclass +class FunctionParameterInfos: + parameters: Optional['List[FunctionParameterInfo]'] = None + + def as_dict(self) -> dict: + body = {} + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'FunctionParameterInfos': + return cls(parameters=_repeated(d, 'parameters', FunctionParameterInfo)) + + class FunctionParameterMode(Enum): """The mode of the function parameter.""" @@ -1855,7 +1900,7 @@ class ModelVersionInfo: id: Optional[str] = None metastore_id: Optional[str] = None model_name: Optional[str] = None - model_version_dependencies: Optional['List[Dependency]'] = None + model_version_dependencies: Optional['DependencyList'] = None run_id: Optional[str] = None run_workspace_id: Optional[int] = None schema_name: Optional[str] = None @@ -1876,7 +1921,7 @@ def as_dict(self) -> dict: if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.model_name is not None: body['model_name'] = self.model_name if self.model_version_dependencies: - body['model_version_dependencies'] = [v.as_dict() for v in self.model_version_dependencies] + body['model_version_dependencies'] = self.model_version_dependencies.as_dict() if self.run_id is not None: body['run_id'] = self.run_id if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id if self.schema_name is not None: body['schema_name'] = self.schema_name @@ -1897,7 +1942,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ModelVersionInfo': id=d.get('id', None), metastore_id=d.get('metastore_id', None), model_name=d.get('model_name', None), - model_version_dependencies=_repeated(d, 'model_version_dependencies', Dependency), + model_version_dependencies=_from_dict(d, 'model_version_dependencies', DependencyList), run_id=d.get('run_id', None), run_workspace_id=d.get('run_workspace_id', None), schema_name=d.get('schema_name', None), @@ -2461,7 +2506,7 @@ class TableInfo: updated_at: Optional[int] = None updated_by: Optional[str] = None view_definition: Optional[str] = None - view_dependencies: Optional['List[Dependency]'] = None + view_dependencies: Optional['DependencyList'] = None def as_dict(self) -> dict: body = {} @@ -2501,7 +2546,7 @@ def as_dict(self) -> dict: if self.updated_at is not None: body['updated_at'] = self.updated_at if self.updated_by is not None: body['updated_by'] = self.updated_by if self.view_definition is not None: body['view_definition'] = self.view_definition - if self.view_dependencies: body['view_dependencies'] = [v.as_dict() for v in self.view_dependencies] + if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict() return body @classmethod @@ -2538,7 +2583,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'TableInfo': updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), view_definition=d.get('view_definition', None), - view_dependencies=_repeated(d, 'view_dependencies', Dependency)) + view_dependencies=_from_dict(d, 'view_dependencies', DependencyList)) @dataclass @@ -2641,6 +2686,7 @@ class UpdateExternalLocation: name: Optional[str] = None owner: Optional[str] = None read_only: Optional[bool] = None + skip_validation: Optional[bool] = None url: Optional[str] = None def as_dict(self) -> dict: @@ -2653,6 +2699,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation if self.url is not None: body['url'] = self.url return body @@ -2666,6 +2713,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateExternalLocation': name=d.get('name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), + skip_validation=d.get('skip_validation', None), url=d.get('url', None)) @@ -3552,18 +3600,18 @@ def get(self, artifact_type: ArtifactType) -> ArtifactAllowlistInfo: headers=headers) return ArtifactAllowlistInfo.from_dict(res) - def update(self, artifact_matchers: List[ArtifactMatcher], - artifact_type: ArtifactType) -> ArtifactAllowlistInfo: + def update(self, artifact_type: ArtifactType, + artifact_matchers: List[ArtifactMatcher]) -> ArtifactAllowlistInfo: """Set an artifact allowlist. Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - :param artifact_matchers: List[:class:`ArtifactMatcher`] - A list of allowed artifact match patterns. :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. + :param artifact_matchers: List[:class:`ArtifactMatcher`] + A list of allowed artifact match patterns. :returns: :class:`ArtifactAllowlistInfo` """ @@ -3820,21 +3868,21 @@ def list(self) -> Iterator['ConnectionInfo']: return parsed if parsed is not None else [] def update(self, + name_arg: str, name: str, options: Dict[str, str], - name_arg: str, *, owner: Optional[str] = None) -> ConnectionInfo: """Update a connection. Updates the connection that matches the supplied name. + :param name_arg: str + Name of the connection. :param name: str Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. - :param name_arg: str - Name of the connection. :param owner: str (optional) Username of current owner of the connection. @@ -3978,6 +4026,7 @@ def update(self, force: Optional[bool] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, url: Optional[str] = None) -> ExternalLocationInfo: """Update an external location. @@ -4001,6 +4050,8 @@ def update(self, The owner of the external location. :param read_only: bool (optional) Indicates whether the external location is read-only. + :param skip_validation: bool (optional) + Skips validation of the storage credential associated with the external location. :param url: str (optional) Path URL of the external location. @@ -4014,6 +4065,7 @@ def update(self, if force is not None: body['force'] = force if owner is not None: body['owner'] = owner if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation if url is not None: body['url'] = url headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', @@ -4033,29 +4085,7 @@ class FunctionsAPI: def __init__(self, api_client): self._api = api_client - def create(self, - name: str, - catalog_name: str, - schema_name: str, - input_params: List[FunctionParameterInfo], - data_type: ColumnTypeName, - full_data_type: str, - return_params: List[FunctionParameterInfo], - routine_body: CreateFunctionRoutineBody, - routine_definition: str, - routine_dependencies: List[Dependency], - parameter_style: CreateFunctionParameterStyle, - is_deterministic: bool, - sql_data_access: CreateFunctionSqlDataAccess, - is_null_call: bool, - security_type: CreateFunctionSecurityType, - specific_name: str, - *, - comment: Optional[str] = None, - external_language: Optional[str] = None, - external_name: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - sql_path: Optional[str] = None) -> FunctionInfo: + def create(self, function_info: CreateFunction) -> FunctionInfo: """Create a function. Creates a new function @@ -4064,77 +4094,13 @@ def create(self, **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the function's parent schema - :param name: str - Name of function, relative to parent schema. - :param catalog_name: str - Name of parent catalog. - :param schema_name: str - Name of parent schema relative to its parent catalog. - :param input_params: List[:class:`FunctionParameterInfo`] - The array of __FunctionParameterInfo__ definitions of the function's parameters. - :param data_type: :class:`ColumnTypeName` - Scalar function return data type. - :param full_data_type: str - Pretty printed function data type. - :param return_params: List[:class:`FunctionParameterInfo`] - Table function return parameters. - :param routine_body: :class:`CreateFunctionRoutineBody` - Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot be - used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**. - :param routine_definition: str - Function body. - :param routine_dependencies: List[:class:`Dependency`] - Function dependencies. - :param parameter_style: :class:`CreateFunctionParameterStyle` - Function parameter style. **S** is the value for SQL. - :param is_deterministic: bool - Whether the function is deterministic. - :param sql_data_access: :class:`CreateFunctionSqlDataAccess` - Function SQL data access. - :param is_null_call: bool - Function null call. - :param security_type: :class:`CreateFunctionSecurityType` - Function security type. - :param specific_name: str - Specific name of the function; Reserved for future use. - :param comment: str (optional) - User-provided free-form text description. - :param external_language: str (optional) - External function language. - :param external_name: str (optional) - External function name. - :param properties: Dict[str,str] (optional) - A map of key-value properties attached to the securable. - :param sql_path: str (optional) - List of schemes whose objects can be referenced without qualification. + :param function_info: :class:`CreateFunction` + Partial __FunctionInfo__ specifying the function to be created. :returns: :class:`FunctionInfo` """ body = {} - if catalog_name is not None: body['catalog_name'] = catalog_name - if comment is not None: body['comment'] = comment - if data_type is not None: body['data_type'] = data_type.value - if external_language is not None: body['external_language'] = external_language - if external_name is not None: body['external_name'] = external_name - if full_data_type is not None: body['full_data_type'] = full_data_type - if input_params is not None: body['input_params'] = [v.as_dict() for v in input_params] - if is_deterministic is not None: body['is_deterministic'] = is_deterministic - if is_null_call is not None: body['is_null_call'] = is_null_call - if name is not None: body['name'] = name - if parameter_style is not None: body['parameter_style'] = parameter_style.value - if properties is not None: body['properties'] = properties - if return_params is not None: body['return_params'] = [v.as_dict() for v in return_params] - if routine_body is not None: body['routine_body'] = routine_body.value - if routine_definition is not None: body['routine_definition'] = routine_definition - if routine_dependencies is not None: - body['routine_dependencies'] = [v.as_dict() for v in routine_dependencies] - if schema_name is not None: body['schema_name'] = schema_name - if security_type is not None: body['security_type'] = security_type.value - if specific_name is not None: body['specific_name'] = specific_name - if sql_data_access is not None: body['sql_data_access'] = sql_data_access.value - if sql_path is not None: body['sql_path'] = sql_path + if function_info is not None: body['function_info'] = function_info.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.1/unity-catalog/functions', body=body, headers=headers) return FunctionInfo.from_dict(res) @@ -4348,19 +4314,19 @@ class MetastoresAPI: def __init__(self, api_client): self._api = api_client - def assign(self, metastore_id: str, default_catalog_name: str, workspace_id: int): + def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str): """Create an assignment. Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. + :param workspace_id: int + A workspace ID. :param metastore_id: str The unique ID of the metastore. :param default_catalog_name: str The name of the default catalog in the metastore. - :param workspace_id: int - A workspace ID. """ @@ -4373,18 +4339,25 @@ def assign(self, metastore_id: str, default_catalog_name: str, workspace_id: int body=body, headers=headers) - def create(self, name: str, storage_root: str, *, region: Optional[str] = None) -> MetastoreInfo: + def create(self, + name: str, + *, + region: Optional[str] = None, + storage_root: Optional[str] = None) -> MetastoreInfo: """Create a metastore. - Creates a new metastore based on a provided name and storage root path. + Creates a new metastore based on a provided name and optional storage root path. By default (if the + __owner__ field is not set), the owner of the new metastore is the user calling the + __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is + assigned to the System User instead. :param name: str The user-specified name of the metastore. - :param storage_root: str - The storage root URL for metastore :param region: str (optional) Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). If this field is omitted, the region of the workspace receiving the request will be used. + :param storage_root: str (optional) + The storage root URL for metastore :returns: :class:`MetastoreInfo` """ @@ -4521,7 +4494,8 @@ def update(self, storage_root_credential_id: Optional[str] = None) -> MetastoreInfo: """Update a metastore. - Updates information for a specific metastore. The caller must be a metastore admin. + Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ + field is set to the empty string (**""**), the ownership is updated to the System User. :param id: str Unique ID of the metastore. diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index dc61df102..55d473513 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -1,50 +1,46 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order + @dataclass class AddInstanceProfile: instance_profile_arn: str iam_role_arn: Optional[str] = None is_meta_instance_profile: Optional[bool] = None skip_validation: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'AddInstanceProfile': - return cls(iam_role_arn=d.get('iam_role_arn', None), - instance_profile_arn=d.get('instance_profile_arn', None), - is_meta_instance_profile=d.get('is_meta_instance_profile', None), - skip_validation=d.get('skip_validation', None)) + return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None), skip_validation=d.get('skip_validation', None)) + + @dataclass class AutoScale: min_workers: int max_workers: int - def as_dict(self) -> dict: body = {} if self.max_workers is not None: body['max_workers'] = self.max_workers @@ -54,6 +50,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'AutoScale': return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None)) + + @dataclass @@ -68,7 +66,6 @@ class AwsAttributes: instance_profile_arn: Optional[str] = None spot_bid_price_percent: Optional[int] = None zone_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.availability is not None: body['availability'] = self.availability.value @@ -79,42 +76,32 @@ def as_dict(self) -> dict: if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.spot_bid_price_percent is not None: - body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'AwsAttributes': - return cls(availability=_enum(d, 'availability', AwsAvailability), - ebs_volume_count=d.get('ebs_volume_count', None), - ebs_volume_iops=d.get('ebs_volume_iops', None), - ebs_volume_size=d.get('ebs_volume_size', None), - ebs_volume_throughput=d.get('ebs_volume_throughput', None), - ebs_volume_type=_enum(d, 'ebs_volume_type', EbsVolumeType), - first_on_demand=d.get('first_on_demand', None), - instance_profile_arn=d.get('instance_profile_arn', None), - spot_bid_price_percent=d.get('spot_bid_price_percent', None), - zone_id=d.get('zone_id', None)) + return cls(availability=_enum(d, 'availability', AwsAvailability), ebs_volume_count=d.get('ebs_volume_count', None), ebs_volume_iops=d.get('ebs_volume_iops', None), ebs_volume_size=d.get('ebs_volume_size', None), ebs_volume_throughput=d.get('ebs_volume_throughput', None), ebs_volume_type=_enum(d, 'ebs_volume_type', EbsVolumeType), first_on_demand=d.get('first_on_demand', None), instance_profile_arn=d.get('instance_profile_arn', None), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) + + class AwsAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - + ON_DEMAND = 'ON_DEMAND' SPOT = 'SPOT' SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK' - @dataclass class AzureAttributes: availability: Optional['AzureAvailability'] = None first_on_demand: Optional[int] = None log_analytics_info: Optional['LogAnalyticsInfo'] = None spot_bid_max_price: Optional[float] = None - def as_dict(self) -> dict: body = {} if self.availability is not None: body['availability'] = self.availability.value @@ -125,28 +112,25 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'AzureAttributes': - return cls(availability=_enum(d, 'availability', AzureAvailability), - first_on_demand=d.get('first_on_demand', None), - log_analytics_info=_from_dict(d, 'log_analytics_info', LogAnalyticsInfo), - spot_bid_max_price=d.get('spot_bid_max_price', None)) + return cls(availability=_enum(d, 'availability', AzureAvailability), first_on_demand=d.get('first_on_demand', None), log_analytics_info=_from_dict(d, 'log_analytics_info', LogAnalyticsInfo), spot_bid_max_price=d.get('spot_bid_max_price', None)) + + class AzureAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero (which only happens on pool clusters), this availability type will be used for the entire cluster.""" - + ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' SPOT_AZURE = 'SPOT_AZURE' SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE' - @dataclass class CancelCommand: cluster_id: Optional[str] = None command_id: Optional[str] = None context_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['clusterId'] = self.cluster_id @@ -156,16 +140,15 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CancelCommand': - return cls(cluster_id=d.get('clusterId', None), - command_id=d.get('commandId', None), - context_id=d.get('contextId', None)) + return cls(cluster_id=d.get('clusterId', None), command_id=d.get('commandId', None), context_id=d.get('contextId', None)) + + @dataclass class ChangeClusterOwner: cluster_id: str owner_username: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -175,13 +158,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ChangeClusterOwner': return cls(cluster_id=d.get('cluster_id', None), owner_username=d.get('owner_username', None)) + + @dataclass class ClientsTypes: jobs: Optional[bool] = None notebooks: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.jobs is not None: body['jobs'] = self.jobs @@ -191,12 +175,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClientsTypes': return cls(jobs=d.get('jobs', None), notebooks=d.get('notebooks', None)) + + @dataclass class CloudProviderNodeInfo: status: Optional['List[CloudProviderNodeStatus]'] = None - def as_dict(self) -> dict: body = {} if self.status: body['status'] = [v.value for v in self.status] @@ -205,36 +190,35 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CloudProviderNodeInfo': return cls(status=d.get('status', None)) + -class CloudProviderNodeStatus(Enum): +class CloudProviderNodeStatus(Enum): + + NOT_AVAILABLE_IN_REGION = 'NotAvailableInRegion' NOT_ENABLED_ON_SUBSCRIPTION = 'NotEnabledOnSubscription' - @dataclass class ClusterAccessControlRequest: group_name: Optional[str] = None permission_level: Optional['ClusterPermissionLevel'] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.group_name is not None: body['group_name'] = self.group_name if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterAccessControlRequest': - return cls(group_name=d.get('group_name', None), - permission_level=_enum(d, 'permission_level', ClusterPermissionLevel), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -244,24 +228,20 @@ class ClusterAccessControlResponse: group_name: Optional[str] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] if self.display_name is not None: body['display_name'] = self.display_name if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterAccessControlResponse': - return cls(all_permissions=_repeated(d, 'all_permissions', ClusterPermission), - display_name=d.get('display_name', None), - group_name=d.get('group_name', None), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(all_permissions=_repeated(d, 'all_permissions', ClusterPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -291,11 +271,9 @@ class ClusterAttributes: spark_env_vars: Optional['Dict[str,str]'] = None ssh_public_keys: Optional['List[str]'] = None workload_type: Optional['WorkloadType'] = None - def as_dict(self) -> dict: body = {} - if self.autotermination_minutes is not None: - body['autotermination_minutes'] = self.autotermination_minutes + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() @@ -304,12 +282,10 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = self.custom_tags if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -326,31 +302,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterAttributes': - return cls(autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_name=d.get('cluster_name', None), - cluster_source=_enum(d, 'cluster_source', ClusterSource), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), - init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), - instance_pool_id=d.get('instance_pool_id', None), - node_type_id=d.get('node_type_id', None), - policy_id=d.get('policy_id', None), - runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), - single_user_name=d.get('single_user_name', None), - spark_conf=d.get('spark_conf', None), - spark_env_vars=d.get('spark_env_vars', None), - spark_version=d.get('spark_version', None), - ssh_public_keys=d.get('ssh_public_keys', None), - workload_type=_from_dict(d, 'workload_type', WorkloadType)) + return cls(autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), node_type_id=d.get('node_type_id', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass @@ -400,12 +354,10 @@ class ClusterDetails: terminated_time: Optional[int] = None termination_reason: Optional['TerminationReason'] = None workload_type: Optional['WorkloadType'] = None - def as_dict(self) -> dict: body = {} if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body['autotermination_minutes'] = self.autotermination_minutes + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores @@ -421,12 +373,10 @@ def as_dict(self) -> dict: if self.default_tags: body['default_tags'] = self.default_tags if self.docker_image: body['docker_image'] = self.docker_image.as_dict() if self.driver: body['driver'] = self.driver.as_dict() - if self.driver_instance_pool_id is not None: - body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.executors: body['executors'] = [v.as_dict() for v in self.executors] if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] @@ -455,51 +405,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterDetails': - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), - autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_cores=d.get('cluster_cores', None), - cluster_id=d.get('cluster_id', None), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_log_status=_from_dict(d, 'cluster_log_status', LogSyncStatus), - cluster_memory_mb=d.get('cluster_memory_mb', None), - cluster_name=d.get('cluster_name', None), - cluster_source=_enum(d, 'cluster_source', ClusterSource), - creator_user_name=d.get('creator_user_name', None), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - default_tags=d.get('default_tags', None), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver=_from_dict(d, 'driver', SparkNode), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), - executors=_repeated(d, 'executors', SparkNode), - gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), - init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), - instance_pool_id=d.get('instance_pool_id', None), - jdbc_port=d.get('jdbc_port', None), - last_restarted_time=d.get('last_restarted_time', None), - last_state_loss_time=d.get('last_state_loss_time', None), - node_type_id=d.get('node_type_id', None), - num_workers=d.get('num_workers', None), - policy_id=d.get('policy_id', None), - runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), - single_user_name=d.get('single_user_name', None), - spark_conf=d.get('spark_conf', None), - spark_context_id=d.get('spark_context_id', None), - spark_env_vars=d.get('spark_env_vars', None), - spark_version=d.get('spark_version', None), - spec=_from_dict(d, 'spec', CreateCluster), - ssh_public_keys=d.get('ssh_public_keys', None), - start_time=d.get('start_time', None), - state=_enum(d, 'state', State), - state_message=d.get('state_message', None), - terminated_time=d.get('terminated_time', None), - termination_reason=_from_dict(d, 'termination_reason', TerminationReason), - workload_type=_from_dict(d, 'workload_type', WorkloadType)) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_cores=d.get('cluster_cores', None), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_log_status=_from_dict(d, 'cluster_log_status', LogSyncStatus), cluster_memory_mb=d.get('cluster_memory_mb', None), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), creator_user_name=d.get('creator_user_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), default_tags=d.get('default_tags', None), docker_image=_from_dict(d, 'docker_image', DockerImage), driver=_from_dict(d, 'driver', SparkNode), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), executors=_repeated(d, 'executors', SparkNode), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), jdbc_port=d.get('jdbc_port', None), last_restarted_time=d.get('last_restarted_time', None), last_state_loss_time=d.get('last_state_loss_time', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_context_id=d.get('spark_context_id', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), spec=_from_dict(d, 'spec', CreateCluster), ssh_public_keys=d.get('ssh_public_keys', None), start_time=d.get('start_time', None), state=_enum(d, 'state', State), state_message=d.get('state_message', None), terminated_time=d.get('terminated_time', None), termination_reason=_from_dict(d, 'termination_reason', TerminationReason), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass @@ -509,12 +417,10 @@ class ClusterEvent: details: Optional['EventDetails'] = None timestamp: Optional[int] = None type: Optional['EventType'] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.data_plane_event_details: - body['data_plane_event_details'] = self.data_plane_event_details.as_dict() + if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details.as_dict() if self.details: body['details'] = self.details.as_dict() if self.timestamp is not None: body['timestamp'] = self.timestamp if self.type is not None: body['type'] = self.type.value @@ -522,18 +428,15 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterEvent': - return cls(cluster_id=d.get('cluster_id', None), - data_plane_event_details=_from_dict(d, 'data_plane_event_details', DataPlaneEventDetails), - details=_from_dict(d, 'details', EventDetails), - timestamp=d.get('timestamp', None), - type=_enum(d, 'type', EventType)) + return cls(cluster_id=d.get('cluster_id', None), data_plane_event_details=_from_dict(d, 'data_plane_event_details', DataPlaneEventDetails), details=_from_dict(d, 'details', EventDetails), timestamp=d.get('timestamp', None), type=_enum(d, 'type', EventType)) + + @dataclass class ClusterLibraryStatuses: cluster_id: Optional[str] = None library_statuses: Optional['List[LibraryFullStatus]'] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -542,15 +445,15 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterLibraryStatuses': - return cls(cluster_id=d.get('cluster_id', None), - library_statuses=_repeated(d, 'library_statuses', LibraryFullStatus)) + return cls(cluster_id=d.get('cluster_id', None), library_statuses=_repeated(d, 'library_statuses', LibraryFullStatus)) + + @dataclass class ClusterLogConf: dbfs: Optional['DbfsStorageInfo'] = None s3: Optional['S3StorageInfo'] = None - def as_dict(self) -> dict: body = {} if self.dbfs: body['dbfs'] = self.dbfs.as_dict() @@ -560,6 +463,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterLogConf': return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo)) + + @dataclass @@ -567,7 +472,6 @@ class ClusterPermission: inherited: Optional[bool] = None inherited_from_object: Optional['List[str]'] = None permission_level: Optional['ClusterPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.inherited is not None: body['inherited'] = self.inherited @@ -577,45 +481,41 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPermission': - return cls(inherited=d.get('inherited', None), - inherited_from_object=d.get('inherited_from_object', None), - permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + + class ClusterPermissionLevel(Enum): """Permission level""" - + CAN_ATTACH_TO = 'CAN_ATTACH_TO' CAN_MANAGE = 'CAN_MANAGE' CAN_RESTART = 'CAN_RESTART' - @dataclass class ClusterPermissions: access_control_list: Optional['List[ClusterAccessControlResponse]'] = None object_id: Optional[str] = None object_type: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.object_id is not None: body['object_id'] = self.object_id if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPermissions': - return cls(access_control_list=_repeated(d, 'access_control_list', ClusterAccessControlResponse), - object_id=d.get('object_id', None), - object_type=d.get('object_type', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', ClusterAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ClusterPermissionsDescription: description: Optional[str] = None permission_level: Optional['ClusterPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.description is not None: body['description'] = self.description @@ -624,26 +524,26 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPermissionsDescription': - return cls(description=d.get('description', None), - permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + + @dataclass class ClusterPermissionsRequest: access_control_list: Optional['List[ClusterAccessControlRequest]'] = None cluster_id: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPermissionsRequest': - return cls(access_control_list=_repeated(d, 'access_control_list', ClusterAccessControlRequest), - cluster_id=d.get('cluster_id', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', ClusterAccessControlRequest), cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -652,22 +552,19 @@ class ClusterPolicyAccessControlRequest: permission_level: Optional['ClusterPolicyPermissionLevel'] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.group_name is not None: body['group_name'] = self.group_name if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyAccessControlRequest': - return cls(group_name=d.get('group_name', None), - permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -677,24 +574,20 @@ class ClusterPolicyAccessControlResponse: group_name: Optional[str] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] if self.display_name is not None: body['display_name'] = self.display_name if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyAccessControlResponse': - return cls(all_permissions=_repeated(d, 'all_permissions', ClusterPolicyPermission), - display_name=d.get('display_name', None), - group_name=d.get('group_name', None), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(all_permissions=_repeated(d, 'all_permissions', ClusterPolicyPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -702,7 +595,6 @@ class ClusterPolicyPermission: inherited: Optional[bool] = None inherited_from_object: Optional['List[str]'] = None permission_level: Optional['ClusterPolicyPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.inherited is not None: body['inherited'] = self.inherited @@ -712,44 +604,39 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyPermission': - return cls(inherited=d.get('inherited', None), - inherited_from_object=d.get('inherited_from_object', None), - permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + + class ClusterPolicyPermissionLevel(Enum): """Permission level""" - + CAN_USE = 'CAN_USE' - @dataclass class ClusterPolicyPermissions: access_control_list: Optional['List[ClusterPolicyAccessControlResponse]'] = None object_id: Optional[str] = None object_type: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.object_id is not None: body['object_id'] = self.object_id if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyPermissions': - return cls(access_control_list=_repeated(d, 'access_control_list', - ClusterPolicyAccessControlResponse), - object_id=d.get('object_id', None), - object_type=d.get('object_type', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', ClusterPolicyAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ClusterPolicyPermissionsDescription: description: Optional[str] = None permission_level: Optional['ClusterPolicyPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.description is not None: body['description'] = self.description @@ -758,33 +645,32 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyPermissionsDescription': - return cls(description=d.get('description', None), - permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + + @dataclass class ClusterPolicyPermissionsRequest: access_control_list: Optional['List[ClusterPolicyAccessControlRequest]'] = None cluster_policy_id: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterPolicyPermissionsRequest': - return cls(access_control_list=_repeated(d, 'access_control_list', ClusterPolicyAccessControlRequest), - cluster_policy_id=d.get('cluster_policy_id', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', ClusterPolicyAccessControlRequest), cluster_policy_id=d.get('cluster_policy_id', None)) + + @dataclass class ClusterSize: autoscale: Optional['AutoScale'] = None num_workers: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.autoscale: body['autoscale'] = self.autoscale.as_dict() @@ -794,12 +680,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterSize': return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), num_workers=d.get('num_workers', None)) + + class ClusterSource(Enum): """Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only.""" - + API = 'API' JOB = 'JOB' MODELS = 'MODELS' @@ -808,7 +696,6 @@ class ClusterSource(Enum): SQL = 'SQL' UI = 'UI' - @dataclass class ClusterSpec: apply_policy_default_values: Optional[bool] = None @@ -839,14 +726,11 @@ class ClusterSpec: spark_version: Optional[str] = None ssh_public_keys: Optional['List[str]'] = None workload_type: Optional['WorkloadType'] = None - def as_dict(self) -> dict: body = {} - if self.apply_policy_default_values is not None: - body['apply_policy_default_values'] = self.apply_policy_default_values + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body['autotermination_minutes'] = self.autotermination_minutes + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() @@ -855,12 +739,10 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = self.custom_tags if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -878,34 +760,12 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ClusterSpec': - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), - autoscale=_from_dict(d, 'autoscale', AutoScale), - autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_name=d.get('cluster_name', None), - cluster_source=_enum(d, 'cluster_source', ClusterSource), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), - init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), - instance_pool_id=d.get('instance_pool_id', None), - node_type_id=d.get('node_type_id', None), - num_workers=d.get('num_workers', None), - policy_id=d.get('policy_id', None), - runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), - single_user_name=d.get('single_user_name', None), - spark_conf=d.get('spark_conf', None), - spark_env_vars=d.get('spark_env_vars', None), - spark_version=d.get('spark_version', None), - ssh_public_keys=d.get('ssh_public_keys', None), - workload_type=_from_dict(d, 'workload_type', WorkloadType)) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + + + + @dataclass @@ -914,7 +774,6 @@ class Command: command: Optional[str] = None context_id: Optional[str] = None language: Optional['Language'] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['clusterId'] = self.cluster_id @@ -925,14 +784,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'Command': - return cls(cluster_id=d.get('clusterId', None), - command=d.get('command', None), - context_id=d.get('contextId', None), - language=_enum(d, 'language', Language)) + return cls(cluster_id=d.get('clusterId', None), command=d.get('command', None), context_id=d.get('contextId', None), language=_enum(d, 'language', Language)) + -class CommandStatus(Enum): +class CommandStatus(Enum): + + CANCELLED = 'Cancelled' CANCELLING = 'Cancelling' ERROR = 'Error' @@ -941,12 +800,13 @@ class CommandStatus(Enum): RUNNING = 'Running' + + @dataclass class CommandStatusResponse: id: Optional[str] = None results: Optional['Results'] = None status: Optional['CommandStatus'] = None - def as_dict(self) -> dict: body = {} if self.id is not None: body['id'] = self.id @@ -956,15 +816,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CommandStatusResponse': - return cls(id=d.get('id', None), - results=_from_dict(d, 'results', Results), - status=_enum(d, 'status', CommandStatus)) + return cls(id=d.get('id', None), results=_from_dict(d, 'results', Results), status=_enum(d, 'status', CommandStatus)) + + @dataclass class ComputeSpec: kind: Optional['ComputeSpecKind'] = None - def as_dict(self) -> dict: body = {} if self.kind is not None: body['kind'] = self.kind.value @@ -973,26 +832,29 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ComputeSpec': return cls(kind=_enum(d, 'kind', ComputeSpecKind)) + + class ComputeSpecKind(Enum): """The kind of compute described by this compute specification.""" - + SERVERLESS_PREVIEW = 'SERVERLESS_PREVIEW' - class ContextStatus(Enum): - + + ERROR = 'Error' PENDING = 'Pending' RUNNING = 'Running' + + @dataclass class ContextStatusResponse: id: Optional[str] = None status: Optional['ContextStatus'] = None - def as_dict(self) -> dict: body = {} if self.id is not None: body['id'] = self.id @@ -1002,6 +864,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ContextStatusResponse': return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus)) + + @dataclass @@ -1034,14 +898,11 @@ class CreateCluster: spark_env_vars: Optional['Dict[str,str]'] = None ssh_public_keys: Optional['List[str]'] = None workload_type: Optional['WorkloadType'] = None - def as_dict(self) -> dict: body = {} - if self.apply_policy_default_values is not None: - body['apply_policy_default_values'] = self.apply_policy_default_values + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body['autotermination_minutes'] = self.autotermination_minutes + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() @@ -1050,12 +911,10 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = self.custom_tags if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1073,40 +932,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateCluster': - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), - autoscale=_from_dict(d, 'autoscale', AutoScale), - autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_name=d.get('cluster_name', None), - cluster_source=_enum(d, 'cluster_source', ClusterSource), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), - init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), - instance_pool_id=d.get('instance_pool_id', None), - node_type_id=d.get('node_type_id', None), - num_workers=d.get('num_workers', None), - policy_id=d.get('policy_id', None), - runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), - single_user_name=d.get('single_user_name', None), - spark_conf=d.get('spark_conf', None), - spark_env_vars=d.get('spark_env_vars', None), - spark_version=d.get('spark_version', None), - ssh_public_keys=d.get('ssh_public_keys', None), - workload_type=_from_dict(d, 'workload_type', WorkloadType)) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass class CreateClusterResponse: cluster_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -1115,13 +948,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateClusterResponse': return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass class CreateContext: cluster_id: Optional[str] = None language: Optional['Language'] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['clusterId'] = self.cluster_id @@ -1131,6 +965,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateContext': return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language)) + + @dataclass @@ -1148,7 +984,6 @@ class CreateInstancePool: min_idle_instances: Optional[int] = None preloaded_docker_images: Optional['List[DockerImage]'] = None preloaded_spark_versions: Optional['List[str]'] = None - def as_dict(self) -> dict: body = {} if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() @@ -1157,39 +992,25 @@ def as_dict(self) -> dict: if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name if self.max_capacity is not None: body['max_capacity'] = self.max_capacity if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePool': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None)) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None)) + + @dataclass class CreateInstancePoolResponse: instance_pool_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1198,6 +1019,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateInstancePoolResponse': return cls(instance_pool_id=d.get('instance_pool_id', None)) + + @dataclass @@ -1205,35 +1028,31 @@ class CreatePolicy: name: str definition: Optional[str] = None description: Optional[str] = None + libraries: Optional['List[Library]'] = None max_clusters_per_user: Optional[int] = None policy_family_definition_overrides: Optional[str] = None policy_family_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.definition is not None: body['definition'] = self.definition if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicy': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None)) + return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None)) + + @dataclass class CreatePolicyResponse: policy_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -1242,12 +1061,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreatePolicyResponse': return cls(policy_id=d.get('policy_id', None)) + + @dataclass class CreateResponse: script_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.script_id is not None: body['script_id'] = self.script_id @@ -1256,12 +1076,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'CreateResponse': return cls(script_id=d.get('script_id', None)) + + @dataclass class Created: id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.id is not None: body['id'] = self.id @@ -1270,6 +1091,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'Created': return cls(id=d.get('id', None)) + + @dataclass @@ -1278,7 +1101,6 @@ class DataPlaneEventDetails: executor_failures: Optional[int] = None host_id: Optional[str] = None timestamp: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.event_type is not None: body['event_type'] = self.event_type.value @@ -1289,19 +1111,17 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DataPlaneEventDetails': - return cls(event_type=_enum(d, 'event_type', DataPlaneEventDetailsEventType), - executor_failures=d.get('executor_failures', None), - host_id=d.get('host_id', None), - timestamp=d.get('timestamp', None)) + return cls(event_type=_enum(d, 'event_type', DataPlaneEventDetailsEventType), executor_failures=d.get('executor_failures', None), host_id=d.get('host_id', None), timestamp=d.get('timestamp', None)) + + class DataPlaneEventDetailsEventType(Enum): """""" - + NODE_BLACKLISTED = 'NODE_BLACKLISTED' NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED' - class DataSecurityMode(Enum): """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -1316,7 +1136,7 @@ class DataSecurityMode(Enum): `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters.""" - + LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH' LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER' LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL' @@ -1324,11 +1144,9 @@ class DataSecurityMode(Enum): SINGLE_USER = 'SINGLE_USER' USER_ISOLATION = 'USER_ISOLATION' - @dataclass class DbfsStorageInfo: destination: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.destination is not None: body['destination'] = self.destination @@ -1337,12 +1155,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DbfsStorageInfo': return cls(destination=d.get('destination', None)) + + @dataclass class DeleteCluster: cluster_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -1351,12 +1170,16 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DeleteCluster': return cls(cluster_id=d.get('cluster_id', None)) + + + + + @dataclass class DeleteInstancePool: instance_pool_id: str - def as_dict(self) -> dict: body = {} if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1365,12 +1188,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DeleteInstancePool': return cls(instance_pool_id=d.get('instance_pool_id', None)) + + @dataclass class DeletePolicy: policy_id: str - def as_dict(self) -> dict: body = {} if self.policy_id is not None: body['policy_id'] = self.policy_id @@ -1379,13 +1203,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DeletePolicy': return cls(policy_id=d.get('policy_id', None)) + + @dataclass class DestroyContext: cluster_id: str context_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['clusterId'] = self.cluster_id @@ -1395,6 +1220,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DestroyContext': return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None)) + + @dataclass @@ -1404,7 +1231,6 @@ class DiskSpec: disk_size: Optional[int] = None disk_throughput: Optional[int] = None disk_type: Optional['DiskType'] = None - def as_dict(self) -> dict: body = {} if self.disk_count is not None: body['disk_count'] = self.disk_count @@ -1416,48 +1242,44 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DiskSpec': - return cls(disk_count=d.get('disk_count', None), - disk_iops=d.get('disk_iops', None), - disk_size=d.get('disk_size', None), - disk_throughput=d.get('disk_throughput', None), - disk_type=_from_dict(d, 'disk_type', DiskType)) + return cls(disk_count=d.get('disk_count', None), disk_iops=d.get('disk_iops', None), disk_size=d.get('disk_size', None), disk_throughput=d.get('disk_throughput', None), disk_type=_from_dict(d, 'disk_type', DiskType)) + + @dataclass class DiskType: azure_disk_volume_type: Optional['DiskTypeAzureDiskVolumeType'] = None ebs_volume_type: Optional['DiskTypeEbsVolumeType'] = None - def as_dict(self) -> dict: body = {} - if self.azure_disk_volume_type is not None: - body['azure_disk_volume_type'] = self.azure_disk_volume_type.value + if self.azure_disk_volume_type is not None: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DiskType': - return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), - ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) + return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) + -class DiskTypeAzureDiskVolumeType(Enum): +class DiskTypeAzureDiskVolumeType(Enum): + + PREMIUM_LRS = 'PREMIUM_LRS' STANDARD_LRS = 'STANDARD_LRS' - class DiskTypeEbsVolumeType(Enum): - + + GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' - @dataclass class DockerBasicAuth: password: Optional[str] = None username: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.password is not None: body['password'] = self.password @@ -1467,13 +1289,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DockerBasicAuth': return cls(password=d.get('password', None), username=d.get('username', None)) + + @dataclass class DockerImage: basic_auth: Optional['DockerBasicAuth'] = None url: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict() @@ -1483,15 +1306,16 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DockerImage': return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None)) + + class EbsVolumeType(Enum): """The type of EBS volumes that will be launched with this cluster.""" - + GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' - @dataclass class EditCluster: cluster_id: str @@ -1523,14 +1347,11 @@ class EditCluster: spark_env_vars: Optional['Dict[str,str]'] = None ssh_public_keys: Optional['List[str]'] = None workload_type: Optional['WorkloadType'] = None - def as_dict(self) -> dict: body = {} - if self.apply_policy_default_values is not None: - body['apply_policy_default_values'] = self.apply_policy_default_values + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body['autotermination_minutes'] = self.autotermination_minutes + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -1540,12 +1361,10 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = self.custom_tags if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1563,35 +1382,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'EditCluster': - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), - autoscale=_from_dict(d, 'autoscale', AutoScale), - autotermination_minutes=d.get('autotermination_minutes', None), - aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), - cluster_id=d.get('cluster_id', None), - cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), - cluster_name=d.get('cluster_name', None), - cluster_source=_enum(d, 'cluster_source', ClusterSource), - custom_tags=d.get('custom_tags', None), - data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), - docker_image=_from_dict(d, 'docker_image', DockerImage), - driver_instance_pool_id=d.get('driver_instance_pool_id', None), - driver_node_type_id=d.get('driver_node_type_id', None), - enable_elastic_disk=d.get('enable_elastic_disk', None), - enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), - init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), - instance_pool_id=d.get('instance_pool_id', None), - node_type_id=d.get('node_type_id', None), - num_workers=d.get('num_workers', None), - policy_id=d.get('policy_id', None), - runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), - single_user_name=d.get('single_user_name', None), - spark_conf=d.get('spark_conf', None), - spark_env_vars=d.get('spark_env_vars', None), - spark_version=d.get('spark_version', None), - ssh_public_keys=d.get('ssh_public_keys', None), - workload_type=_from_dict(d, 'workload_type', WorkloadType)) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass @@ -1603,12 +1396,10 @@ class EditInstancePool: idle_instance_autotermination_minutes: Optional[int] = None max_capacity: Optional[int] = None min_idle_instances: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name if self.max_capacity is not None: body['max_capacity'] = self.max_capacity @@ -1618,13 +1409,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'EditInstancePool': - return cls(custom_tags=d.get('custom_tags', None), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None)) + return cls(custom_tags=d.get('custom_tags', None), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None)) + + @dataclass @@ -1633,31 +1420,27 @@ class EditPolicy: name: str definition: Optional[str] = None description: Optional[str] = None + libraries: Optional['List[Library]'] = None max_clusters_per_user: Optional[int] = None policy_family_definition_overrides: Optional[str] = None policy_family_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.definition is not None: body['definition'] = self.definition if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'EditPolicy': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None), - policy_id=d.get('policy_id', None)) + return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) + + @dataclass @@ -1672,6 +1455,7 @@ class EventDetails: driver_state_message: Optional[str] = None enable_termination_for_node_blocklisted: Optional[bool] = None free_space: Optional[int] = None + init_scripts: Optional['InitScriptEventDetails'] = None instance_id: Optional[str] = None job_run_name: Optional[str] = None previous_attributes: Optional['ClusterAttributes'] = None @@ -1681,7 +1465,6 @@ class EventDetails: target_num_vcpus: Optional[int] = None target_num_workers: Optional[int] = None user: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.attributes: body['attributes'] = self.attributes.as_dict() @@ -1692,9 +1475,9 @@ def as_dict(self) -> dict: if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason if self.disk_size is not None: body['disk_size'] = self.disk_size if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message - if self.enable_termination_for_node_blocklisted is not None: - body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted + if self.enable_termination_for_node_blocklisted is not None: body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted if self.free_space is not None: body['free_space'] = self.free_space + if self.init_scripts: body['init_scripts'] = self.init_scripts.as_dict() if self.instance_id is not None: body['instance_id'] = self.instance_id if self.job_run_name is not None: body['job_run_name'] = self.job_run_name if self.previous_attributes: body['previous_attributes'] = self.previous_attributes.as_dict() @@ -1708,39 +1491,22 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'EventDetails': - return cls(attributes=_from_dict(d, 'attributes', ClusterAttributes), - cause=_enum(d, 'cause', EventDetailsCause), - cluster_size=_from_dict(d, 'cluster_size', ClusterSize), - current_num_vcpus=d.get('current_num_vcpus', None), - current_num_workers=d.get('current_num_workers', None), - did_not_expand_reason=d.get('did_not_expand_reason', None), - disk_size=d.get('disk_size', None), - driver_state_message=d.get('driver_state_message', None), - enable_termination_for_node_blocklisted=d.get('enable_termination_for_node_blocklisted', - None), - free_space=d.get('free_space', None), - instance_id=d.get('instance_id', None), - job_run_name=d.get('job_run_name', None), - previous_attributes=_from_dict(d, 'previous_attributes', ClusterAttributes), - previous_cluster_size=_from_dict(d, 'previous_cluster_size', ClusterSize), - previous_disk_size=d.get('previous_disk_size', None), - reason=_from_dict(d, 'reason', TerminationReason), - target_num_vcpus=d.get('target_num_vcpus', None), - target_num_workers=d.get('target_num_workers', None), - user=d.get('user', None)) + return cls(attributes=_from_dict(d, 'attributes', ClusterAttributes), cause=_enum(d, 'cause', EventDetailsCause), cluster_size=_from_dict(d, 'cluster_size', ClusterSize), current_num_vcpus=d.get('current_num_vcpus', None), current_num_workers=d.get('current_num_workers', None), did_not_expand_reason=d.get('did_not_expand_reason', None), disk_size=d.get('disk_size', None), driver_state_message=d.get('driver_state_message', None), enable_termination_for_node_blocklisted=d.get('enable_termination_for_node_blocklisted', None), free_space=d.get('free_space', None), init_scripts=_from_dict(d, 'init_scripts', InitScriptEventDetails), instance_id=d.get('instance_id', None), job_run_name=d.get('job_run_name', None), previous_attributes=_from_dict(d, 'previous_attributes', ClusterAttributes), previous_cluster_size=_from_dict(d, 'previous_cluster_size', ClusterSize), previous_disk_size=d.get('previous_disk_size', None), reason=_from_dict(d, 'reason', TerminationReason), target_num_vcpus=d.get('target_num_vcpus', None), target_num_workers=d.get('target_num_workers', None), user=d.get('user', None)) + + class EventDetailsCause(Enum): """The cause of a change in target size.""" - + AUTORECOVERY = 'AUTORECOVERY' AUTOSCALE = 'AUTOSCALE' REPLACE_BAD_NODES = 'REPLACE_BAD_NODES' USER_REQUEST = 'USER_REQUEST' - class EventType(Enum): - + + AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT' CREATING = 'CREATING' DBFS_DOWN = 'DBFS_DOWN' @@ -1767,44 +1533,41 @@ class EventType(Enum): UNPINNED = 'UNPINNED' UPSIZE_COMPLETED = 'UPSIZE_COMPLETED' - @dataclass class GcpAttributes: availability: Optional['GcpAvailability'] = None boot_disk_size: Optional[int] = None google_service_account: Optional[str] = None local_ssd_count: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.availability is not None: body['availability'] = self.availability.value if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size - if self.google_service_account is not None: - body['google_service_account'] = self.google_service_account + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GcpAttributes': - return cls(availability=_enum(d, 'availability', GcpAvailability), - boot_disk_size=d.get('boot_disk_size', None), - google_service_account=d.get('google_service_account', None), - local_ssd_count=d.get('local_ssd_count', None)) + return cls(availability=_enum(d, 'availability', GcpAvailability), boot_disk_size=d.get('boot_disk_size', None), google_service_account=d.get('google_service_account', None), local_ssd_count=d.get('local_ssd_count', None)) + + class GcpAvailability(Enum): """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - + ON_DEMAND_GCP = 'ON_DEMAND_GCP' PREEMPTIBLE_GCP = 'PREEMPTIBLE_GCP' PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP' + + @dataclass class GetClusterPermissionLevelsResponse: permission_levels: Optional['List[ClusterPermissionsDescription]'] = None - def as_dict(self) -> dict: body = {} if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] @@ -1813,12 +1576,19 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetClusterPermissionLevelsResponse': return cls(permission_levels=_repeated(d, 'permission_levels', ClusterPermissionsDescription)) + + + + + + + + @dataclass class GetClusterPolicyPermissionLevelsResponse: permission_levels: Optional['List[ClusterPolicyPermissionsDescription]'] = None - def as_dict(self) -> dict: body = {} if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] @@ -1827,10 +1597,21 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetClusterPolicyPermissionLevelsResponse': return cls(permission_levels=_repeated(d, 'permission_levels', ClusterPolicyPermissionsDescription)) + -@dataclass -class GetEvents: + + + + + + + + + + +@dataclass +class GetEvents: cluster_id: str end_time: Optional[int] = None event_types: Optional['List[EventType]'] = None @@ -1838,7 +1619,6 @@ class GetEvents: offset: Optional[int] = None order: Optional['GetEventsOrder'] = None start_time: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -1852,28 +1632,22 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetEvents': - return cls(cluster_id=d.get('cluster_id', None), - end_time=d.get('end_time', None), - event_types=d.get('event_types', None), - limit=d.get('limit', None), - offset=d.get('offset', None), - order=_enum(d, 'order', GetEventsOrder), - start_time=d.get('start_time', None)) + return cls(cluster_id=d.get('cluster_id', None), end_time=d.get('end_time', None), event_types=d.get('event_types', None), limit=d.get('limit', None), offset=d.get('offset', None), order=_enum(d, 'order', GetEventsOrder), start_time=d.get('start_time', None)) + + class GetEventsOrder(Enum): """The order to list events in; either "ASC" or "DESC". Defaults to "DESC".""" - + ASC = 'ASC' DESC = 'DESC' - @dataclass class GetEventsResponse: events: Optional['List[ClusterEvent]'] = None next_page: Optional['GetEvents'] = None total_count: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.events: body['events'] = [v.as_dict() for v in self.events] @@ -1883,9 +1657,12 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetEventsResponse': - return cls(events=_repeated(d, 'events', ClusterEvent), - next_page=_from_dict(d, 'next_page', GetEvents), - total_count=d.get('total_count', None)) + return cls(events=_repeated(d, 'events', ClusterEvent), next_page=_from_dict(d, 'next_page', GetEvents), total_count=d.get('total_count', None)) + + + + + @dataclass @@ -1908,7 +1685,6 @@ class GetInstancePool: state: Optional['InstancePoolState'] = None stats: Optional['InstancePoolStats'] = None status: Optional['InstancePoolStatus'] = None - def as_dict(self) -> dict: body = {} if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() @@ -1918,17 +1694,14 @@ def as_dict(self) -> dict: if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name if self.max_capacity is not None: body['max_capacity'] = self.max_capacity if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] if self.state is not None: body['state'] = self.state.value if self.stats: body['stats'] = self.stats.as_dict() if self.status: body['status'] = self.status.as_dict() @@ -1936,30 +1709,17 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetInstancePool': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - default_tags=d.get('default_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None), - state=_enum(d, 'state', InstancePoolState), - stats=_from_dict(d, 'stats', InstancePoolStats), - status=_from_dict(d, 'status', InstancePoolStatus)) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) + + + + + @dataclass class GetInstancePoolPermissionLevelsResponse: permission_levels: Optional['List[InstancePoolPermissionsDescription]'] = None - def as_dict(self) -> dict: body = {} if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] @@ -1968,12 +1728,22 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetInstancePoolPermissionLevelsResponse': return cls(permission_levels=_repeated(d, 'permission_levels', InstancePoolPermissionsDescription)) + + + + + + + + + + + @dataclass class GetSparkVersionsResponse: versions: Optional['List[SparkVersion]'] = None - def as_dict(self) -> dict: body = {} if self.versions: body['versions'] = [v.as_dict() for v in self.versions] @@ -1982,6 +1752,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GetSparkVersionsResponse': return cls(versions=_repeated(d, 'versions', SparkVersion)) + + @dataclass @@ -1990,7 +1762,6 @@ class GlobalInitScriptCreateRequest: script: str enabled: Optional[bool] = None position: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.enabled is not None: body['enabled'] = self.enabled @@ -2001,10 +1772,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptCreateRequest': - return cls(enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None)) + return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None)) + + @dataclass @@ -2017,7 +1787,6 @@ class GlobalInitScriptDetails: script_id: Optional[str] = None updated_at: Optional[int] = None updated_by: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.created_at is not None: body['created_at'] = self.created_at @@ -2032,14 +1801,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetails': - return cls(created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script_id=d.get('script_id', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass @@ -2053,7 +1817,6 @@ class GlobalInitScriptDetailsWithContent: script_id: Optional[str] = None updated_at: Optional[int] = None updated_by: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.created_at is not None: body['created_at'] = self.created_at @@ -2069,15 +1832,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptDetailsWithContent': - return cls(created_at=d.get('created_at', None), - created_by=d.get('created_by', None), - enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None), - script_id=d.get('script_id', None), - updated_at=d.get('updated_at', None), - updated_by=d.get('updated_by', None)) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass @@ -2087,7 +1844,6 @@ class GlobalInitScriptUpdateRequest: enabled: Optional[bool] = None position: Optional[int] = None script_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.enabled is not None: body['enabled'] = self.enabled @@ -2099,23 +1855,70 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'GlobalInitScriptUpdateRequest': - return cls(enabled=d.get('enabled', None), - name=d.get('name', None), - position=d.get('position', None), - script=d.get('script', None), - script_id=d.get('script_id', None)) + return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None)) + + + + +@dataclass +class InitScriptEventDetails: + cluster: Optional['List[InitScriptInfoAndExecutionDetails]'] = None + global: Optional['List[InitScriptInfoAndExecutionDetails]'] = None + reported_for_node: Optional[str] = None + def as_dict(self) -> dict: + body = {} + if self.cluster: body['cluster'] = [v.as_dict() for v in self.cluster] + if self.global: body['global'] = [v.as_dict() for v in self.global] + if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InitScriptEventDetails': + return cls(cluster=_repeated(d, 'cluster', InitScriptInfoAndExecutionDetails), global=_repeated(d, 'global', InitScriptInfoAndExecutionDetails), reported_for_node=d.get('reported_for_node', None)) + + +@dataclass +class InitScriptExecutionDetails: + error_message: Optional[str] = None + execution_duration_seconds: Optional[int] = None + status: Optional['InitScriptExecutionDetailsStatus'] = None + def as_dict(self) -> dict: + body = {} + if self.error_message is not None: body['error_message'] = self.error_message + if self.execution_duration_seconds is not None: body['execution_duration_seconds'] = self.execution_duration_seconds + if self.status is not None: body['status'] = self.status.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InitScriptExecutionDetails': + return cls(error_message=d.get('error_message', None), execution_duration_seconds=d.get('execution_duration_seconds', None), status=_enum(d, 'status', InitScriptExecutionDetailsStatus)) + + + + +class InitScriptExecutionDetailsStatus(Enum): + """The current status of the script""" + + FAILED_EXECUTION = 'FAILED_EXECUTION' + FAILED_FETCH = 'FAILED_FETCH' + NOT_EXECUTED = 'NOT_EXECUTED' + SKIPPED = 'SKIPPED' + SUCCEEDED = 'SUCCEEDED' + UNKNOWN = 'UNKNOWN' + @dataclass class InitScriptInfo: dbfs: Optional['DbfsStorageInfo'] = None + file: Optional['LocalFileInfo'] = None s3: Optional['S3StorageInfo'] = None volumes: Optional['VolumesStorageInfo'] = None workspace: Optional['WorkspaceStorageInfo'] = None - def as_dict(self) -> dict: body = {} if self.dbfs: body['dbfs'] = self.dbfs.as_dict() + if self.file: body['file'] = self.file.as_dict() if self.s3: body['s3'] = self.s3.as_dict() if self.volumes: body['volumes'] = self.volumes.as_dict() if self.workspace: body['workspace'] = self.workspace.as_dict() @@ -2123,17 +1926,32 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InitScriptInfo': - return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), - s3=_from_dict(d, 's3', S3StorageInfo), - volumes=_from_dict(d, 'volumes', VolumesStorageInfo), - workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) + return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), file=_from_dict(d, 'file', LocalFileInfo), s3=_from_dict(d, 's3', S3StorageInfo), volumes=_from_dict(d, 'volumes', VolumesStorageInfo), workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) + + + + +@dataclass +class InitScriptInfoAndExecutionDetails: + execution_details: Optional['InitScriptExecutionDetails'] = None + script: Optional['InitScriptInfo'] = None + def as_dict(self) -> dict: + body = {} + if self.execution_details: body['execution_details'] = self.execution_details.as_dict() + if self.script: body['script'] = self.script.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'InitScriptInfoAndExecutionDetails': + return cls(execution_details=_from_dict(d, 'execution_details', InitScriptExecutionDetails), script=_from_dict(d, 'script', InitScriptInfo)) + + @dataclass class InstallLibraries: cluster_id: str libraries: 'List[Library]' - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -2143,6 +1961,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstallLibraries': return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) + + @dataclass @@ -2151,22 +1971,19 @@ class InstancePoolAccessControlRequest: permission_level: Optional['InstancePoolPermissionLevel'] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.group_name is not None: body['group_name'] = self.group_name if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAccessControlRequest': - return cls(group_name=d.get('group_name', None), - permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -2176,24 +1993,20 @@ class InstancePoolAccessControlResponse: group_name: Optional[str] = None service_principal_name: Optional[str] = None user_name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] if self.display_name is not None: body['display_name'] = self.display_name if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAccessControlResponse': - return cls(all_permissions=_repeated(d, 'all_permissions', InstancePoolPermission), - display_name=d.get('display_name', None), - group_name=d.get('group_name', None), - service_principal_name=d.get('service_principal_name', None), - user_name=d.get('user_name', None)) + return cls(all_permissions=_repeated(d, 'all_permissions', InstancePoolPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -2216,7 +2029,6 @@ class InstancePoolAndStats: state: Optional['InstancePoolState'] = None stats: Optional['InstancePoolStats'] = None status: Optional['InstancePoolStatus'] = None - def as_dict(self) -> dict: body = {} if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() @@ -2226,17 +2038,14 @@ def as_dict(self) -> dict: if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name if self.max_capacity is not None: body['max_capacity'] = self.max_capacity if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: - body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] if self.state is not None: body['state'] = self.state.value if self.stats: body['stats'] = self.stats.as_dict() if self.status: body['status'] = self.status.as_dict() @@ -2244,24 +2053,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAndStats': - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), - custom_tags=d.get('custom_tags', None), - default_tags=d.get('default_tags', None), - disk_spec=_from_dict(d, 'disk_spec', DiskSpec), - enable_elastic_disk=d.get('enable_elastic_disk', None), - gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), - instance_pool_id=d.get('instance_pool_id', None), - instance_pool_name=d.get('instance_pool_name', None), - max_capacity=d.get('max_capacity', None), - min_idle_instances=d.get('min_idle_instances', None), - node_type_id=d.get('node_type_id', None), - preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), - preloaded_spark_versions=d.get('preloaded_spark_versions', None), - state=_enum(d, 'state', InstancePoolState), - stats=_from_dict(d, 'stats', InstancePoolStats), - status=_from_dict(d, 'status', InstancePoolStatus)) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) + + @dataclass @@ -2269,36 +2063,32 @@ class InstancePoolAwsAttributes: availability: Optional['InstancePoolAwsAttributesAvailability'] = None spot_bid_price_percent: Optional[int] = None zone_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.availability is not None: body['availability'] = self.availability.value - if self.spot_bid_price_percent is not None: - body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAwsAttributes': - return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), - spot_bid_price_percent=d.get('spot_bid_price_percent', None), - zone_id=d.get('zone_id', None)) + return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) + + class InstancePoolAwsAttributesAvailability(Enum): """Availability type used for the spot nodes. The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability""" - + ON_DEMAND = 'ON_DEMAND' SPOT = 'SPOT' - @dataclass class InstancePoolAzureAttributes: availability: Optional['InstancePoolAzureAttributesAvailability'] = None spot_bid_max_price: Optional[float] = None - def as_dict(self) -> dict: body = {} if self.availability is not None: body['availability'] = self.availability.value @@ -2307,34 +2097,36 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolAzureAttributes': - return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), - spot_bid_max_price=d.get('spot_bid_max_price', None)) + return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), spot_bid_max_price=d.get('spot_bid_max_price', None)) + + class InstancePoolAzureAttributesAvailability(Enum): """Shows the Availability type used for the spot nodes. The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability""" - + ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' SPOT_AZURE = 'SPOT_AZURE' - @dataclass class InstancePoolGcpAttributes: gcp_availability: Optional['GcpAvailability'] = None local_ssd_count: Optional[int] = None - + zone_id: Optional[str] = None def as_dict(self) -> dict: body = {} if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability.value if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolGcpAttributes': - return cls(gcp_availability=_enum(d, 'gcp_availability', GcpAvailability), - local_ssd_count=d.get('local_ssd_count', None)) + return cls(gcp_availability=_enum(d, 'gcp_availability', GcpAvailability), local_ssd_count=d.get('local_ssd_count', None), zone_id=d.get('zone_id', None)) + + @dataclass @@ -2342,7 +2134,6 @@ class InstancePoolPermission: inherited: Optional[bool] = None inherited_from_object: Optional['List[str]'] = None permission_level: Optional['InstancePoolPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.inherited is not None: body['inherited'] = self.inherited @@ -2352,44 +2143,40 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolPermission': - return cls(inherited=d.get('inherited', None), - inherited_from_object=d.get('inherited_from_object', None), - permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + + class InstancePoolPermissionLevel(Enum): """Permission level""" - + CAN_ATTACH_TO = 'CAN_ATTACH_TO' CAN_MANAGE = 'CAN_MANAGE' - @dataclass class InstancePoolPermissions: access_control_list: Optional['List[InstancePoolAccessControlResponse]'] = None object_id: Optional[str] = None object_type: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.object_id is not None: body['object_id'] = self.object_id if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolPermissions': - return cls(access_control_list=_repeated(d, 'access_control_list', InstancePoolAccessControlResponse), - object_id=d.get('object_id', None), - object_type=d.get('object_type', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', InstancePoolAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class InstancePoolPermissionsDescription: description: Optional[str] = None permission_level: Optional['InstancePoolPermissionLevel'] = None - def as_dict(self) -> dict: body = {} if self.description is not None: body['description'] = self.description @@ -2398,43 +2185,41 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolPermissionsDescription': - return cls(description=d.get('description', None), - permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + + @dataclass class InstancePoolPermissionsRequest: access_control_list: Optional['List[InstancePoolAccessControlRequest]'] = None instance_pool_id: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.access_control_list: - body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolPermissionsRequest': - return cls(access_control_list=_repeated(d, 'access_control_list', InstancePoolAccessControlRequest), - instance_pool_id=d.get('instance_pool_id', None)) + return cls(access_control_list=_repeated(d, 'access_control_list', InstancePoolAccessControlRequest), instance_pool_id=d.get('instance_pool_id', None)) + + class InstancePoolState(Enum): """Current state of the instance pool.""" - + ACTIVE = 'ACTIVE' DELETED = 'DELETED' STOPPED = 'STOPPED' - @dataclass class InstancePoolStats: idle_count: Optional[int] = None pending_idle_count: Optional[int] = None pending_used_count: Optional[int] = None used_count: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.idle_count is not None: body['idle_count'] = self.idle_count @@ -2445,25 +2230,24 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStats': - return cls(idle_count=d.get('idle_count', None), - pending_idle_count=d.get('pending_idle_count', None), - pending_used_count=d.get('pending_used_count', None), - used_count=d.get('used_count', None)) + return cls(idle_count=d.get('idle_count', None), pending_idle_count=d.get('pending_idle_count', None), pending_used_count=d.get('pending_used_count', None), used_count=d.get('used_count', None)) + + @dataclass class InstancePoolStatus: pending_instance_errors: Optional['List[PendingInstanceError]'] = None - def as_dict(self) -> dict: body = {} - if self.pending_instance_errors: - body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] + if self.pending_instance_errors: body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstancePoolStatus': return cls(pending_instance_errors=_repeated(d, 'pending_instance_errors', PendingInstanceError)) + + @dataclass @@ -2471,29 +2255,27 @@ class InstanceProfile: instance_profile_arn: str iam_role_arn: Optional[str] = None is_meta_instance_profile: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'InstanceProfile': - return cls(iam_role_arn=d.get('iam_role_arn', None), - instance_profile_arn=d.get('instance_profile_arn', None), - is_meta_instance_profile=d.get('is_meta_instance_profile', None)) + return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None)) + -class Language(Enum): +class Language(Enum): + + PYTHON = 'python' SCALA = 'scala' SQL = 'sql' - @dataclass class Library: cran: Optional['RCranLibrary'] = None @@ -2502,7 +2284,6 @@ class Library: maven: Optional['MavenLibrary'] = None pypi: Optional['PythonPyPiLibrary'] = None whl: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.cran: body['cran'] = self.cran.as_dict() @@ -2515,12 +2296,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'Library': - return cls(cran=_from_dict(d, 'cran', RCranLibrary), - egg=d.get('egg', None), - jar=d.get('jar', None), - maven=_from_dict(d, 'maven', MavenLibrary), - pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), - whl=d.get('whl', None)) + return cls(cran=_from_dict(d, 'cran', RCranLibrary), egg=d.get('egg', None), jar=d.get('jar', None), maven=_from_dict(d, 'maven', MavenLibrary), pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), whl=d.get('whl', None)) + + @dataclass @@ -2529,11 +2307,9 @@ class LibraryFullStatus: library: Optional['Library'] = None messages: Optional['List[str]'] = None status: Optional['LibraryFullStatusStatus'] = None - def as_dict(self) -> dict: body = {} - if self.is_library_for_all_clusters is not None: - body['is_library_for_all_clusters'] = self.is_library_for_all_clusters + if self.is_library_for_all_clusters is not None: body['is_library_for_all_clusters'] = self.is_library_for_all_clusters if self.library: body['library'] = self.library.as_dict() if self.messages: body['messages'] = [v for v in self.messages] if self.status is not None: body['status'] = self.status.value @@ -2541,15 +2317,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'LibraryFullStatus': - return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), - library=_from_dict(d, 'library', Library), - messages=d.get('messages', None), - status=_enum(d, 'status', LibraryFullStatusStatus)) + return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), library=_from_dict(d, 'library', Library), messages=d.get('messages', None), status=_enum(d, 'status', LibraryFullStatusStatus)) + + class LibraryFullStatusStatus(Enum): """Status of installing the library on the cluster.""" - + FAILED = 'FAILED' INSTALLED = 'INSTALLED' INSTALLING = 'INSTALLING' @@ -2558,11 +2333,9 @@ class LibraryFullStatusStatus(Enum): SKIPPED = 'SKIPPED' UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART' - @dataclass class ListAllClusterLibraryStatusesResponse: statuses: Optional['List[ClusterLibraryStatuses]'] = None - def as_dict(self) -> dict: body = {} if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] @@ -2571,13 +2344,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListAllClusterLibraryStatusesResponse': return cls(statuses=_repeated(d, 'statuses', ClusterLibraryStatuses)) + + @dataclass class ListAvailableZonesResponse: default_zone: Optional[str] = None zones: Optional['List[str]'] = None - def as_dict(self) -> dict: body = {} if self.default_zone is not None: body['default_zone'] = self.default_zone @@ -2587,12 +2361,19 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListAvailableZonesResponse': return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) + + + + + + + + @dataclass class ListClustersResponse: clusters: Optional['List[ClusterDetails]'] = None - def as_dict(self) -> dict: body = {} if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] @@ -2601,12 +2382,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListClustersResponse': return cls(clusters=_repeated(d, 'clusters', ClusterDetails)) + + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional['List[GlobalInitScriptDetails]'] = None - def as_dict(self) -> dict: body = {} if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] @@ -2615,12 +2397,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListGlobalInitScriptsResponse': return cls(scripts=_repeated(d, 'scripts', GlobalInitScriptDetails)) + + @dataclass class ListInstancePools: instance_pools: Optional['List[InstancePoolAndStats]'] = None - def as_dict(self) -> dict: body = {} if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] @@ -2629,12 +2412,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListInstancePools': return cls(instance_pools=_repeated(d, 'instance_pools', InstancePoolAndStats)) + + @dataclass class ListInstanceProfilesResponse: instance_profiles: Optional['List[InstanceProfile]'] = None - def as_dict(self) -> dict: body = {} if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles] @@ -2643,12 +2427,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListInstanceProfilesResponse': return cls(instance_profiles=_repeated(d, 'instance_profiles', InstanceProfile)) + + @dataclass class ListNodeTypesResponse: node_types: Optional['List[NodeType]'] = None - def as_dict(self) -> dict: body = {} if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types] @@ -2657,12 +2442,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListNodeTypesResponse': return cls(node_types=_repeated(d, 'node_types', NodeType)) + + @dataclass class ListPoliciesResponse: policies: Optional['List[Policy]'] = None - def as_dict(self) -> dict: body = {} if self.policies: body['policies'] = [v.as_dict() for v in self.policies] @@ -2671,13 +2457,17 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListPoliciesResponse': return cls(policies=_repeated(d, 'policies', Policy)) + + + + + @dataclass class ListPolicyFamiliesResponse: policy_families: 'List[PolicyFamily]' next_page_token: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.next_page_token is not None: body['next_page_token'] = self.next_page_token @@ -2686,46 +2476,59 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ListPolicyFamiliesResponse': - return cls(next_page_token=d.get('next_page_token', None), - policy_families=_repeated(d, 'policy_families', PolicyFamily)) + return cls(next_page_token=d.get('next_page_token', None), policy_families=_repeated(d, 'policy_families', PolicyFamily)) + -class ListSortColumn(Enum): +class ListSortColumn(Enum): + + POLICY_CREATION_TIME = 'POLICY_CREATION_TIME' POLICY_NAME = 'POLICY_NAME' - class ListSortOrder(Enum): - + + ASC = 'ASC' DESC = 'DESC' +@dataclass +class LocalFileInfo: + destination: Optional[str] = None + def as_dict(self) -> dict: + body = {} + if self.destination is not None: body['destination'] = self.destination + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'LocalFileInfo': + return cls(destination=d.get('destination', None)) + + + @dataclass class LogAnalyticsInfo: log_analytics_primary_key: Optional[str] = None log_analytics_workspace_id: Optional[str] = None - def as_dict(self) -> dict: body = {} - if self.log_analytics_primary_key is not None: - body['log_analytics_primary_key'] = self.log_analytics_primary_key - if self.log_analytics_workspace_id is not None: - body['log_analytics_workspace_id'] = self.log_analytics_workspace_id + if self.log_analytics_primary_key is not None: body['log_analytics_primary_key'] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'LogAnalyticsInfo': - return cls(log_analytics_primary_key=d.get('log_analytics_primary_key', None), - log_analytics_workspace_id=d.get('log_analytics_workspace_id', None)) + return cls(log_analytics_primary_key=d.get('log_analytics_primary_key', None), log_analytics_workspace_id=d.get('log_analytics_workspace_id', None)) + + @dataclass class LogSyncStatus: last_attempted: Optional[int] = None last_exception: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.last_attempted is not None: body['last_attempted'] = self.last_attempted @@ -2735,6 +2538,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'LogSyncStatus': return cls(last_attempted=d.get('last_attempted', None), last_exception=d.get('last_exception', None)) + + @dataclass @@ -2742,7 +2547,6 @@ class MavenLibrary: coordinates: str exclusions: Optional['List[str]'] = None repo: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.coordinates is not None: body['coordinates'] = self.coordinates @@ -2752,9 +2556,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'MavenLibrary': - return cls(coordinates=d.get('coordinates', None), - exclusions=d.get('exclusions', None), - repo=d.get('repo', None)) + return cls(coordinates=d.get('coordinates', None), exclusions=d.get('exclusions', None), repo=d.get('repo', None)) + + @dataclass @@ -2764,24 +2568,20 @@ class NodeInstanceType: local_disks: Optional[int] = None local_nvme_disk_size_gb: Optional[int] = None local_nvme_disks: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb if self.local_disks is not None: body['local_disks'] = self.local_disks - if self.local_nvme_disk_size_gb is not None: - body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb + if self.local_nvme_disk_size_gb is not None: body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'NodeInstanceType': - return cls(instance_type_id=d.get('instance_type_id', None), - local_disk_size_gb=d.get('local_disk_size_gb', None), - local_disks=d.get('local_disks', None), - local_nvme_disk_size_gb=d.get('local_nvme_disk_size_gb', None), - local_nvme_disks=d.get('local_nvme_disks', None)) + return cls(instance_type_id=d.get('instance_type_id', None), local_disk_size_gb=d.get('local_disk_size_gb', None), local_disks=d.get('local_disks', None), local_nvme_disk_size_gb=d.get('local_nvme_disk_size_gb', None), local_nvme_disks=d.get('local_nvme_disks', None)) + + @dataclass @@ -2807,7 +2607,6 @@ class NodeType: support_ebs_volumes: Optional[bool] = None support_port_forwarding: Optional[bool] = None supports_elastic_disk: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.category is not None: body['category'] = self.category @@ -2815,8 +2614,7 @@ def as_dict(self) -> dict: if self.display_order is not None: body['display_order'] = self.display_order if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated - if self.is_encrypted_in_transit is not None: - body['is_encrypted_in_transit'] = self.is_encrypted_in_transit + if self.is_encrypted_in_transit is not None: body['is_encrypted_in_transit'] = self.is_encrypted_in_transit if self.is_graviton is not None: body['is_graviton'] = self.is_graviton if self.is_hidden is not None: body['is_hidden'] = self.is_hidden if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled @@ -2830,41 +2628,21 @@ def as_dict(self) -> dict: if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes - if self.support_port_forwarding is not None: - body['support_port_forwarding'] = self.support_port_forwarding + if self.support_port_forwarding is not None: body['support_port_forwarding'] = self.support_port_forwarding if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'NodeType': - return cls(category=d.get('category', None), - description=d.get('description', None), - display_order=d.get('display_order', None), - instance_type_id=d.get('instance_type_id', None), - is_deprecated=d.get('is_deprecated', None), - is_encrypted_in_transit=d.get('is_encrypted_in_transit', None), - is_graviton=d.get('is_graviton', None), - is_hidden=d.get('is_hidden', None), - is_io_cache_enabled=d.get('is_io_cache_enabled', None), - memory_mb=d.get('memory_mb', None), - node_info=_from_dict(d, 'node_info', CloudProviderNodeInfo), - node_instance_type=_from_dict(d, 'node_instance_type', NodeInstanceType), - node_type_id=d.get('node_type_id', None), - num_cores=d.get('num_cores', None), - num_gpus=d.get('num_gpus', None), - photon_driver_capable=d.get('photon_driver_capable', None), - photon_worker_capable=d.get('photon_worker_capable', None), - support_cluster_tags=d.get('support_cluster_tags', None), - support_ebs_volumes=d.get('support_ebs_volumes', None), - support_port_forwarding=d.get('support_port_forwarding', None), - supports_elastic_disk=d.get('supports_elastic_disk', None)) + return cls(category=d.get('category', None), description=d.get('description', None), display_order=d.get('display_order', None), instance_type_id=d.get('instance_type_id', None), is_deprecated=d.get('is_deprecated', None), is_encrypted_in_transit=d.get('is_encrypted_in_transit', None), is_graviton=d.get('is_graviton', None), is_hidden=d.get('is_hidden', None), is_io_cache_enabled=d.get('is_io_cache_enabled', None), memory_mb=d.get('memory_mb', None), node_info=_from_dict(d, 'node_info', CloudProviderNodeInfo), node_instance_type=_from_dict(d, 'node_instance_type', NodeInstanceType), node_type_id=d.get('node_type_id', None), num_cores=d.get('num_cores', None), num_gpus=d.get('num_gpus', None), photon_driver_capable=d.get('photon_driver_capable', None), photon_worker_capable=d.get('photon_worker_capable', None), support_cluster_tags=d.get('support_cluster_tags', None), support_ebs_volumes=d.get('support_ebs_volumes', None), support_port_forwarding=d.get('support_port_forwarding', None), supports_elastic_disk=d.get('supports_elastic_disk', None)) + + @dataclass class PendingInstanceError: instance_id: Optional[str] = None message: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.instance_id is not None: body['instance_id'] = self.instance_id @@ -2874,12 +2652,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PendingInstanceError': return cls(instance_id=d.get('instance_id', None), message=d.get('message', None)) + + @dataclass class PermanentDeleteCluster: cluster_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -2888,12 +2667,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PermanentDeleteCluster': return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass class PinCluster: cluster_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -2902,6 +2682,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PinCluster': return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -2911,12 +2693,12 @@ class Policy: definition: Optional[str] = None description: Optional[str] = None is_default: Optional[bool] = None + libraries: Optional['List[Library]'] = None max_clusters_per_user: Optional[int] = None name: Optional[str] = None policy_family_definition_overrides: Optional[str] = None policy_family_id: Optional[str] = None policy_id: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp @@ -2924,26 +2706,19 @@ def as_dict(self) -> dict: if self.definition is not None: body['definition'] = self.definition if self.description is not None: body['description'] = self.description if self.is_default is not None: body['is_default'] = self.is_default + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: - body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'Policy': - return cls(created_at_timestamp=d.get('created_at_timestamp', None), - creator_user_name=d.get('creator_user_name', None), - definition=d.get('definition', None), - description=d.get('description', None), - is_default=d.get('is_default', None), - max_clusters_per_user=d.get('max_clusters_per_user', None), - name=d.get('name', None), - policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), - policy_family_id=d.get('policy_family_id', None), - policy_id=d.get('policy_id', None)) + return cls(created_at_timestamp=d.get('created_at_timestamp', None), creator_user_name=d.get('creator_user_name', None), definition=d.get('definition', None), description=d.get('description', None), is_default=d.get('is_default', None), libraries=_repeated(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) + + @dataclass @@ -2952,7 +2727,6 @@ class PolicyFamily: name: str description: str definition: str - def as_dict(self) -> dict: body = {} if self.definition is not None: body['definition'] = self.definition @@ -2963,17 +2737,17 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PolicyFamily': - return cls(definition=d.get('definition', None), - description=d.get('description', None), - name=d.get('name', None), - policy_family_id=d.get('policy_family_id', None)) + return cls(definition=d.get('definition', None), description=d.get('description', None), name=d.get('name', None), policy_family_id=d.get('policy_family_id', None)) + + + + @dataclass class PythonPyPiLibrary: package: str repo: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.package is not None: body['package'] = self.package @@ -2983,13 +2757,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'PythonPyPiLibrary': return cls(package=d.get('package', None), repo=d.get('repo', None)) + + @dataclass class RCranLibrary: package: str repo: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.package is not None: body['package'] = self.package @@ -2999,12 +2774,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'RCranLibrary': return cls(package=d.get('package', None), repo=d.get('repo', None)) + + @dataclass class RemoveInstanceProfile: instance_profile_arn: str - def as_dict(self) -> dict: body = {} if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn @@ -3013,6 +2789,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'RemoveInstanceProfile': return cls(instance_profile_arn=d.get('instance_profile_arn', None)) + + @dataclass @@ -3020,7 +2798,6 @@ class ResizeCluster: cluster_id: str autoscale: Optional['AutoScale'] = None num_workers: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.autoscale: body['autoscale'] = self.autoscale.as_dict() @@ -3030,16 +2807,15 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'ResizeCluster': - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), - cluster_id=d.get('cluster_id', None), - num_workers=d.get('num_workers', None)) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), cluster_id=d.get('cluster_id', None), num_workers=d.get('num_workers', None)) + + @dataclass class RestartCluster: cluster_id: str restart_user: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -3049,17 +2825,19 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'RestartCluster': return cls(cluster_id=d.get('cluster_id', None), restart_user=d.get('restart_user', None)) + -class ResultType(Enum): +class ResultType(Enum): + + ERROR = 'error' IMAGE = 'image' IMAGES = 'images' TABLE = 'table' TEXT = 'text' - @dataclass class Results: cause: Optional[str] = None @@ -3072,7 +2850,6 @@ class Results: schema: Optional['List[Dict[str,Any]]'] = None summary: Optional[str] = None truncated: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.cause is not None: body['cause'] = self.cause @@ -3089,27 +2866,19 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'Results': - return cls(cause=d.get('cause', None), - data=d.get('data', None), - file_name=d.get('fileName', None), - file_names=d.get('fileNames', None), - is_json_schema=d.get('isJsonSchema', None), - pos=d.get('pos', None), - result_type=_enum(d, 'resultType', ResultType), - schema=d.get('schema', None), - summary=d.get('summary', None), - truncated=d.get('truncated', None)) + return cls(cause=d.get('cause', None), data=d.get('data', None), file_name=d.get('fileName', None), file_names=d.get('fileNames', None), is_json_schema=d.get('isJsonSchema', None), pos=d.get('pos', None), result_type=_enum(d, 'resultType', ResultType), schema=d.get('schema', None), summary=d.get('summary', None), truncated=d.get('truncated', None)) + + class RuntimeEngine(Enum): """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version.""" - + NULL = 'NULL' PHOTON = 'PHOTON' STANDARD = 'STANDARD' - @dataclass class S3StorageInfo: canned_acl: Optional[str] = None @@ -3119,7 +2888,6 @@ class S3StorageInfo: endpoint: Optional[str] = None kms_key: Optional[str] = None region: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.canned_acl is not None: body['canned_acl'] = self.canned_acl @@ -3133,13 +2901,9 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'S3StorageInfo': - return cls(canned_acl=d.get('canned_acl', None), - destination=d.get('destination', None), - enable_encryption=d.get('enable_encryption', None), - encryption_type=d.get('encryption_type', None), - endpoint=d.get('endpoint', None), - kms_key=d.get('kms_key', None), - region=d.get('region', None)) + return cls(canned_acl=d.get('canned_acl', None), destination=d.get('destination', None), enable_encryption=d.get('enable_encryption', None), encryption_type=d.get('encryption_type', None), endpoint=d.get('endpoint', None), kms_key=d.get('kms_key', None), region=d.get('region', None)) + + @dataclass @@ -3151,7 +2915,6 @@ class SparkNode: private_ip: Optional[str] = None public_dns: Optional[str] = None start_timestamp: Optional[int] = None - def as_dict(self) -> dict: body = {} if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip @@ -3165,19 +2928,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'SparkNode': - return cls(host_private_ip=d.get('host_private_ip', None), - instance_id=d.get('instance_id', None), - node_aws_attributes=_from_dict(d, 'node_aws_attributes', SparkNodeAwsAttributes), - node_id=d.get('node_id', None), - private_ip=d.get('private_ip', None), - public_dns=d.get('public_dns', None), - start_timestamp=d.get('start_timestamp', None)) + return cls(host_private_ip=d.get('host_private_ip', None), instance_id=d.get('instance_id', None), node_aws_attributes=_from_dict(d, 'node_aws_attributes', SparkNodeAwsAttributes), node_id=d.get('node_id', None), private_ip=d.get('private_ip', None), public_dns=d.get('public_dns', None), start_timestamp=d.get('start_timestamp', None)) + + @dataclass class SparkNodeAwsAttributes: is_spot: Optional[bool] = None - def as_dict(self) -> dict: body = {} if self.is_spot is not None: body['is_spot'] = self.is_spot @@ -3186,13 +2944,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'SparkNodeAwsAttributes': return cls(is_spot=d.get('is_spot', None)) + + @dataclass class SparkVersion: key: Optional[str] = None name: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.key is not None: body['key'] = self.key @@ -3202,12 +2961,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'SparkVersion': return cls(key=d.get('key', None), name=d.get('name', None)) + + @dataclass class StartCluster: cluster_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -3216,11 +2976,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'StartCluster': return cls(cluster_id=d.get('cluster_id', None)) + + class State(Enum): """Current state of the cluster.""" - + ERROR = 'ERROR' PENDING = 'PENDING' RESIZING = 'RESIZING' @@ -3230,13 +2992,11 @@ class State(Enum): TERMINATING = 'TERMINATING' UNKNOWN = 'UNKNOWN' - @dataclass class TerminationReason: code: Optional['TerminationReasonCode'] = None parameters: Optional['Dict[str,str]'] = None type: Optional['TerminationReasonType'] = None - def as_dict(self) -> dict: body = {} if self.code is not None: body['code'] = self.code.value @@ -3246,14 +3006,14 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'TerminationReason': - return cls(code=_enum(d, 'code', TerminationReasonCode), - parameters=d.get('parameters', None), - type=_enum(d, 'type', TerminationReasonType)) + return cls(code=_enum(d, 'code', TerminationReasonCode), parameters=d.get('parameters', None), type=_enum(d, 'type', TerminationReasonType)) + + class TerminationReasonCode(Enum): """status code indicating why the cluster was terminated""" - + ABUSE_DETECTED = 'ABUSE_DETECTED' ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE' AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE' @@ -3334,21 +3094,18 @@ class TerminationReasonCode(Enum): WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR' WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR' - class TerminationReasonType(Enum): """type of the termination""" - + CLIENT_ERROR = 'CLIENT_ERROR' CLOUD_FAILURE = 'CLOUD_FAILURE' SERVICE_FAULT = 'SERVICE_FAULT' SUCCESS = 'SUCCESS' - @dataclass class UninstallLibraries: cluster_id: str libraries: 'List[Library]' - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -3358,12 +3115,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'UninstallLibraries': return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated(d, 'libraries', Library)) + + @dataclass class UnpinCluster: cluster_id: str - def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id @@ -3372,12 +3130,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'UnpinCluster': return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass class VolumesStorageInfo: destination: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.destination is not None: body['destination'] = self.destination @@ -3386,12 +3145,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'VolumesStorageInfo': return cls(destination=d.get('destination', None)) + + @dataclass class WorkloadType: clients: Optional['ClientsTypes'] = None - def as_dict(self) -> dict: body = {} if self.clients: body['clients'] = self.clients.as_dict() @@ -3400,12 +3160,13 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'WorkloadType': return cls(clients=_from_dict(d, 'clients', ClientsTypes)) + + @dataclass class WorkspaceStorageInfo: destination: Optional[str] = None - def as_dict(self) -> dict: body = {} if self.destination is not None: body['destination'] = self.destination @@ -3414,39 +3175,38 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> 'WorkspaceStorageInfo': return cls(destination=d.get('destination', None)) + + class ClusterPoliciesAPI: - """Cluster policy limits the ability to configure clusters based on a set of rules. The policy rules limit - the attributes or attribute values available for cluster creation. Cluster policies have ACLs that limit - their use to specific users and groups. + """You can use cluster policies to control users' ability to configure clusters based on a set of rules. + These rules specify which attributes or attribute values can be used during cluster creation. Cluster + policies have ACLs that limit their use to specific users and groups. - Cluster policies let you limit users to create clusters with prescribed settings, simplify the user - interface and enable more users to create their own clusters (by fixing and hiding some values), control - cost by limiting per cluster maximum cost (by setting limits on attributes whose values contribute to - hourly price). + With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in + the policy's "libraries" field. - Limit users to creating clusters with the prescribed settings. - + Simplify the user interface, enabling more users to create clusters, by fixing and hiding some fields. - + Manage costs by setting limits on attributes that impact the hourly rate. Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user - creates a cluster: - A user who has cluster create permission can select the Unrestricted policy and - create fully-configurable clusters. - A user who has both cluster create permission and access to cluster - policies can select the Unrestricted policy and policies they have access to. - A user that has access to - only cluster policies, can select the policies they have access to. + creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted + policy and create fully-configurable clusters. - A user who has both unrestricted cluster create + permission and access to cluster policies can select the Unrestricted policy and policies they have access + to. - A user that has access to only cluster policies, can select the policies they have access to. - If no policies have been created in the workspace, the Policy drop-down does not display. + If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, + edit, and delete policies. Admin users also have access to all policies.""" - Only admin users can create, edit, and delete policies. Admin users also have access to all policies.""" - def __init__(self, api_client): self._api = api_client + - def create(self, - name: str, - *, - definition: Optional[str] = None, - description: Optional[str] = None, - max_clusters_per_user: Optional[int] = None, - policy_family_definition_overrides: Optional[str] = None, - policy_family_id: Optional[str] = None) -> CreatePolicyResponse: + + def create(self + , name: str + , * + , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None) -> CreatePolicyResponse: """Create a new policy. Creates a new policy with prescribed settings. @@ -3458,6 +3218,8 @@ def create(self, Policy definition document expressed in Databricks Cluster Policy Definition Language. :param description: str (optional) Additional human-readable description of the cluster policy. + :param libraries: List[:class:`Library`] (optional) + A list of libraries to be installed on the next cluster restart that uses this policy. :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. @@ -3479,16 +3241,24 @@ def create(self, body = {} if definition is not None: body['definition'] = definition if description is not None: body['description'] = description + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user if name is not None: body['name'] = name - if policy_family_definition_overrides is not None: - body['policy_family_definition_overrides'] = policy_family_definition_overrides + if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides if policy_family_id is not None: body['policy_family_id'] = policy_family_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('POST', '/api/2.0/policies/clusters/create', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + res = self._api.do('POST', + '/api/2.0/policies/clusters/create' + , body=body + + , headers=headers) return CreatePolicyResponse.from_dict(res) - def delete(self, policy_id: str): + + + def delete(self + , policy_id: str + ): """Delete a cluster policy. Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. @@ -3500,18 +3270,20 @@ def delete(self, policy_id: str): """ body = {} if policy_id is not None: body['policy_id'] = policy_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/policies/clusters/delete', body=body, headers=headers) - - def edit(self, - policy_id: str, - name: str, - *, - definition: Optional[str] = None, - description: Optional[str] = None, - max_clusters_per_user: Optional[int] = None, - policy_family_definition_overrides: Optional[str] = None, - policy_family_id: Optional[str] = None): + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/policies/clusters/delete' + , body=body + + , headers=headers) + + + + + def edit(self + , policy_id: str, name: str + , * + , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None): """Update a cluster policy. Update an existing policy for cluster. This operation may make some clusters governed by the previous @@ -3526,6 +3298,8 @@ def edit(self, Policy definition document expressed in Databricks Cluster Policy Definition Language. :param description: str (optional) Additional human-readable description of the cluster policy. + :param libraries: List[:class:`Library`] (optional) + A list of libraries to be installed on the next cluster restart that uses this policy. :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. @@ -3547,16 +3321,25 @@ def edit(self, body = {} if definition is not None: body['definition'] = definition if description is not None: body['description'] = description + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user if name is not None: body['name'] = name - if policy_family_definition_overrides is not None: - body['policy_family_definition_overrides'] = policy_family_definition_overrides + if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides if policy_family_id is not None: body['policy_family_id'] = policy_family_id if policy_id is not None: body['policy_id'] = policy_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/policies/clusters/edit', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/policies/clusters/edit' + , body=body + + , headers=headers) + - def get(self, policy_id: str) -> Policy: + + + def get(self + , policy_id: str + ) -> Policy: """Get a cluster policy. Get a cluster policy entity. Creation and editing is available to admins only. @@ -3566,14 +3349,22 @@ def get(self, policy_id: str) -> Policy: :returns: :class:`Policy` """ - + query = {} if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/policies/clusters/get', query=query, headers=headers) + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/policies/clusters/get' + , query=query + + , headers=headers) return Policy.from_dict(res) - def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse: + + + def get_permission_levels(self + , cluster_policy_id: str + ) -> GetClusterPolicyPermissionLevelsResponse: """Get cluster policy permission levels. Gets the permission levels that a user can have on an object. @@ -3583,14 +3374,20 @@ def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermi :returns: :class:`GetClusterPolicyPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json', } + + headers = {'Accept': 'application/json',} res = self._api.do('GET', - f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels', - headers=headers) + f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels' + + + , headers=headers) return GetClusterPolicyPermissionLevelsResponse.from_dict(res) - def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: + + + def get_permissions(self + , cluster_policy_id: str + ) -> ClusterPolicyPermissions: """Get cluster policy permissions. Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root @@ -3601,17 +3398,21 @@ def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: :returns: :class:`ClusterPolicyPermissions` """ - - headers = {'Accept': 'application/json', } + + headers = {'Accept': 'application/json',} res = self._api.do('GET', - f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', - headers=headers) + f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}' + + + , headers=headers) return ClusterPolicyPermissions.from_dict(res) - def list(self, - *, - sort_column: Optional[ListSortColumn] = None, - sort_order: Optional[ListSortOrder] = None) -> Iterator['Policy']: + + + def list(self + + , * + , sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None) -> Iterator['Policy']: """List cluster policies. Returns a list of policies accessible by the requesting user. @@ -3625,21 +3426,26 @@ def list(self, :returns: Iterator over :class:`Policy` """ - + query = {} if sort_column is not None: query['sort_column'] = sort_column.value if sort_order is not None: query['sort_order'] = sort_order.value - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/policies/clusters/list', query=query, headers=headers) + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/policies/clusters/list' + , query=query + + , headers=headers) parsed = ListPoliciesResponse.from_dict(json).policies return parsed if parsed is not None else [] + - def set_permissions( - self, - cluster_policy_id: str, - *, - access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - ) -> ClusterPolicyPermissions: + + + def set_permissions(self + , cluster_policy_id: str + , * + , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: """Set cluster policy permissions. Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object. @@ -3651,21 +3457,21 @@ def set_permissions( :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} res = self._api.do('PUT', - f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', - body=body, - headers=headers) + f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}' + , body=body + + , headers=headers) return ClusterPolicyPermissions.from_dict(res) - def update_permissions( - self, - cluster_policy_id: str, - *, - access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - ) -> ClusterPolicyPermissions: + + + def update_permissions(self + , cluster_policy_id: str + , * + , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: """Update cluster policy permissions. Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root @@ -3678,16 +3484,17 @@ def update_permissions( :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} res = self._api.do('PATCH', - f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', - body=body, - headers=headers) + f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}' + , body=body + + , headers=headers) return ClusterPolicyPermissions.from_dict(res) - + + class ClustersAPI: """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. @@ -3710,73 +3517,71 @@ class ClustersAPI: terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.""" - + def __init__(self, api_client): self._api = api_client + + def wait_get_cluster_running(self, cluster_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.RUNNING, ) + failure_states = (State.ERROR, State.TERMINATED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach RUNNING, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_cluster_terminated(self, cluster_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.TERMINATED, ) + failure_states = (State.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach TERMINATED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + - def wait_get_cluster_running( - self, - cluster_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.RUNNING, ) - failure_states = (State.ERROR, State.TERMINATED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach RUNNING, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_cluster_terminated( - self, - cluster_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.TERMINATED, ) - failure_states = (State.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach TERMINATED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def change_owner(self, cluster_id: str, owner_username: str): + + def change_owner(self + , cluster_id: str, owner_username: str + ): """Change cluster owner. Change the owner of the cluster. You must be an admin to perform this operation. @@ -3791,39 +3596,20 @@ def change_owner(self, cluster_id: str, owner_username: str): body = {} if cluster_id is not None: body['cluster_id'] = cluster_id if owner_username is not None: body['owner_username'] = owner_username - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/change-owner', body=body, headers=headers) - - def create(self, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - cluster_source: Optional[ClusterSource] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/change-owner' + , body=body + + , headers=headers) + + + + + def create(self + , spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: """Create new cluster. Creates a new Spark cluster. This method will acquire new instances from the cloud provider if @@ -3952,8 +3738,7 @@ def create(self, See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: - body['apply_policy_default_values'] = apply_policy_default_values + if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values if autoscale is not None: body['autoscale'] = autoscale.as_dict() if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() @@ -3967,8 +3752,7 @@ def create(self, if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk - if enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = enable_local_disk_encryption + if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id @@ -3982,74 +3766,26 @@ def create(self, if spark_version is not None: body['spark_version'] = spark_version if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] if workload_type is not None: body['workload_type'] = workload_type.as_dict() - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/create', body=body, headers=headers) - return Wait(self.wait_get_cluster_running, - response=CreateClusterResponse.from_dict(op_response), - cluster_id=op_response['cluster_id']) - - def create_and_wait( - self, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - cluster_source: Optional[ClusterSource] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - workload_type: Optional[WorkloadType] = None, + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + op_response = self._api.do('POST', + '/api/2.0/clusters/create' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_running + , response = CreateClusterResponse.from_dict(op_response) + , cluster_id=op_response['cluster_id']) + + + def create_and_wait(self, spark_version: str + , * , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, workload_type: Optional[WorkloadType] = None, timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.create(apply_policy_default_values=apply_policy_default_values, - autoscale=autoscale, - autotermination_minutes=autotermination_minutes, - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - cluster_log_conf=cluster_log_conf, - cluster_name=cluster_name, - cluster_source=cluster_source, - custom_tags=custom_tags, - data_security_mode=data_security_mode, - docker_image=docker_image, - driver_instance_pool_id=driver_instance_pool_id, - driver_node_type_id=driver_node_type_id, - enable_elastic_disk=enable_elastic_disk, - enable_local_disk_encryption=enable_local_disk_encryption, - gcp_attributes=gcp_attributes, - init_scripts=init_scripts, - instance_pool_id=instance_pool_id, - node_type_id=node_type_id, - num_workers=num_workers, - policy_id=policy_id, - runtime_engine=runtime_engine, - single_user_name=single_user_name, - spark_conf=spark_conf, - spark_env_vars=spark_env_vars, - spark_version=spark_version, - ssh_public_keys=ssh_public_keys, - workload_type=workload_type).result(timeout=timeout) - - def delete(self, cluster_id: str) -> Wait[ClusterDetails]: + return self.create(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, cluster_source=cluster_source, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, workload_type=workload_type).result(timeout=timeout) + + + def delete(self + , cluster_id: str + ) -> Wait[ClusterDetails]: """Terminate cluster. Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the @@ -4065,44 +3801,27 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: """ body = {} if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/delete', body=body, headers=headers) - return Wait(self.wait_get_cluster_terminated, cluster_id=cluster_id) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/delete' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_terminated + + , cluster_id=cluster_id) - def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: + + def delete_and_wait(self, cluster_id: str + , + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.delete(cluster_id=cluster_id).result(timeout=timeout) - - def edit(self, - cluster_id: str, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - cluster_source: Optional[ClusterSource] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: + + + def edit(self + , cluster_id: str, spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: """Update cluster configuration. Updates the configuration of a cluster to match the provided attributes and size. A cluster can be @@ -4238,8 +3957,7 @@ def edit(self, See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: - body['apply_policy_default_values'] = apply_policy_default_values + if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values if autoscale is not None: body['autoscale'] = autoscale.as_dict() if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() @@ -4254,8 +3972,7 @@ def edit(self, if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk - if enable_local_disk_encryption is not None: - body['enable_local_disk_encryption'] = enable_local_disk_encryption + if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id @@ -4269,82 +3986,27 @@ def edit(self, if spark_version is not None: body['spark_version'] = spark_version if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] if workload_type is not None: body['workload_type'] = workload_type.as_dict() - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/edit', body=body, headers=headers) - return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) - - def edit_and_wait( - self, - cluster_id: str, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - cluster_source: Optional[ClusterSource] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - workload_type: Optional[WorkloadType] = None, + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/edit' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_running + + , cluster_id=cluster_id) + + + def edit_and_wait(self, cluster_id: str, spark_version: str + , * , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, cluster_source: Optional[ClusterSource] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, workload_type: Optional[WorkloadType] = None, timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.edit(apply_policy_default_values=apply_policy_default_values, - autoscale=autoscale, - autotermination_minutes=autotermination_minutes, - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - cluster_id=cluster_id, - cluster_log_conf=cluster_log_conf, - cluster_name=cluster_name, - cluster_source=cluster_source, - custom_tags=custom_tags, - data_security_mode=data_security_mode, - docker_image=docker_image, - driver_instance_pool_id=driver_instance_pool_id, - driver_node_type_id=driver_node_type_id, - enable_elastic_disk=enable_elastic_disk, - enable_local_disk_encryption=enable_local_disk_encryption, - gcp_attributes=gcp_attributes, - init_scripts=init_scripts, - instance_pool_id=instance_pool_id, - node_type_id=node_type_id, - num_workers=num_workers, - policy_id=policy_id, - runtime_engine=runtime_engine, - single_user_name=single_user_name, - spark_conf=spark_conf, - spark_env_vars=spark_env_vars, - spark_version=spark_version, - ssh_public_keys=ssh_public_keys, - workload_type=workload_type).result(timeout=timeout) - - def events(self, - cluster_id: str, - *, - end_time: Optional[int] = None, - event_types: Optional[List[EventType]] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, - order: Optional[GetEventsOrder] = None, - start_time: Optional[int] = None) -> Iterator['ClusterEvent']: + return self.edit(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, cluster_id=cluster_id, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, cluster_source=cluster_source, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, workload_type=workload_type).result(timeout=timeout) + + + def events(self + , cluster_id: str + , * + , end_time: Optional[int] = None, event_types: Optional[List[EventType]] = None, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[GetEventsOrder] = None, start_time: Optional[int] = None) -> Iterator['ClusterEvent']: """List cluster activity events. Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more @@ -4378,19 +4040,29 @@ def events(self, if offset is not None: body['offset'] = offset if order is not None: body['order'] = order.value if start_time is not None: body['start_time'] = start_time - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + while True: - json = self._api.do('POST', '/api/2.0/clusters/events', body=body, headers=headers) - if 'events' not in json or not json['events']: - return - for v in json['events']: - yield ClusterEvent.from_dict(v) - if 'next_page' not in json or not json['next_page']: - return - body = json['next_page'] - - def get(self, cluster_id: str) -> ClusterDetails: + json = self._api.do('POST', + '/api/2.0/clusters/events' + , body=body + + , headers=headers) + if 'events' not in json or not json['events']: + return + for v in json['events']: + yield ClusterEvent.from_dict(v) + if 'next_page' not in json or not json['next_page']: + return + body = json['next_page'] + + + + + def get(self + , cluster_id: str + ) -> ClusterDetails: """Get cluster info. Retrieves the information for a cluster given its identifier. Clusters can be described while they are @@ -4401,14 +4073,22 @@ def get(self, cluster_id: str) -> ClusterDetails: :returns: :class:`ClusterDetails` """ - + query = {} if cluster_id is not None: query['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/get', query=query, headers=headers) + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/clusters/get' + , query=query + + , headers=headers) return ClusterDetails.from_dict(res) - def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: + + + def get_permission_levels(self + , cluster_id: str + ) -> GetClusterPermissionLevelsResponse: """Get cluster permission levels. Gets the permission levels that a user can have on an object. @@ -4418,14 +4098,20 @@ def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsRe :returns: :class:`GetClusterPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json', } + + headers = {'Accept': 'application/json',} res = self._api.do('GET', - f'/api/2.0/permissions/clusters/{cluster_id}/permissionLevels', - headers=headers) + f'/api/2.0/permissions/clusters/{cluster_id}/permissionLevels' + + + , headers=headers) return GetClusterPermissionLevelsResponse.from_dict(res) - def get_permissions(self, cluster_id: str) -> ClusterPermissions: + + + def get_permissions(self + , cluster_id: str + ) -> ClusterPermissions: """Get cluster permissions. Gets the permissions of a cluster. Clusters can inherit permissions from their root object. @@ -4435,12 +4121,21 @@ def get_permissions(self, cluster_id: str) -> ClusterPermissions: :returns: :class:`ClusterPermissions` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/permissions/clusters/{cluster_id}', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + f'/api/2.0/permissions/clusters/{cluster_id}' + + + , headers=headers) return ClusterPermissions.from_dict(res) - def list(self, *, can_use_client: Optional[str] = None) -> Iterator['ClusterDetails']: + + + def list(self + + , * + , can_use_client: Optional[str] = None) -> Iterator['ClusterDetails']: """List all clusters. Return information about all pinned clusters, active clusters, up to 200 of the most recently @@ -4459,14 +4154,21 @@ def list(self, *, can_use_client: Optional[str] = None) -> Iterator['ClusterDeta :returns: Iterator over :class:`ClusterDetails` """ - + query = {} if can_use_client is not None: query['can_use_client'] = can_use_client - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/clusters/list', query=query, headers=headers) + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/clusters/list' + , query=query + + , headers=headers) parsed = ListClustersResponse.from_dict(json).clusters return parsed if parsed is not None else [] + + + def list_node_types(self) -> ListNodeTypesResponse: """List node types. @@ -4474,11 +4176,16 @@ def list_node_types(self) -> ListNodeTypesResponse: :returns: :class:`ListNodeTypesResponse` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-node-types', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/clusters/list-node-types' + + , headers=headers) return ListNodeTypesResponse.from_dict(res) + + def list_zones(self) -> ListAvailableZonesResponse: """List availability zones. @@ -4487,12 +4194,19 @@ def list_zones(self) -> ListAvailableZonesResponse: :returns: :class:`ListAvailableZonesResponse` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-zones', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/clusters/list-zones' + + , headers=headers) return ListAvailableZonesResponse.from_dict(res) - def permanent_delete(self, cluster_id: str): + + + def permanent_delete(self + , cluster_id: str + ): """Permanently delete cluster. Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously @@ -4508,10 +4222,19 @@ def permanent_delete(self, cluster_id: str): """ body = {} if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/permanent-delete', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/permanent-delete' + , body=body + + , headers=headers) + - def pin(self, cluster_id: str): + + + def pin(self + , cluster_id: str + ): """Pin cluster. Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a @@ -4524,14 +4247,20 @@ def pin(self, cluster_id: str): """ body = {} if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/pin', body=body, headers=headers) - - def resize(self, - cluster_id: str, - *, - autoscale: Optional[AutoScale] = None, - num_workers: Optional[int] = None) -> Wait[ClusterDetails]: + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/pin' + , body=body + + , headers=headers) + + + + + def resize(self + , cluster_id: str + , * + , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None) -> Wait[ClusterDetails]: """Resize cluster. Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a @@ -4560,20 +4289,27 @@ def resize(self, if autoscale is not None: body['autoscale'] = autoscale.as_dict() if cluster_id is not None: body['cluster_id'] = cluster_id if num_workers is not None: body['num_workers'] = num_workers - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/resize', body=body, headers=headers) - return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) - - def resize_and_wait(self, - cluster_id: str, - *, - autoscale: Optional[AutoScale] = None, - num_workers: Optional[int] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.resize(autoscale=autoscale, cluster_id=cluster_id, - num_workers=num_workers).result(timeout=timeout) - - def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]: + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/resize' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_running + + , cluster_id=cluster_id) + + + def resize_and_wait(self, cluster_id: str + , * , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout) + + + def restart(self + , cluster_id: str + , * + , restart_user: Optional[str] = None) -> Wait[ClusterDetails]: """Restart cluster. Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, @@ -4591,22 +4327,27 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai body = {} if cluster_id is not None: body['cluster_id'] = cluster_id if restart_user is not None: body['restart_user'] = restart_user - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/restart', body=body, headers=headers) - return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) - - def restart_and_wait(self, - cluster_id: str, - *, - restart_user: Optional[str] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/restart' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_running + + , cluster_id=cluster_id) - def set_permissions( - self, - cluster_id: str, - *, - access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: + + def restart_and_wait(self, cluster_id: str + , * , restart_user: Optional[str] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout) + + + def set_permissions(self + , cluster_id: str + , * + , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: """Set cluster permissions. Sets permissions on a cluster. Clusters can inherit permissions from their root object. @@ -4618,12 +4359,17 @@ def set_permissions( :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('PUT', f'/api/2.0/permissions/clusters/{cluster_id}', body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + res = self._api.do('PUT', + f'/api/2.0/permissions/clusters/{cluster_id}' + , body=body + + , headers=headers) return ClusterPermissions.from_dict(res) + + def spark_versions(self) -> GetSparkVersionsResponse: """List available Spark versions. @@ -4631,12 +4377,19 @@ def spark_versions(self) -> GetSparkVersionsResponse: :returns: :class:`GetSparkVersionsResponse` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/spark-versions', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/clusters/spark-versions' + + , headers=headers) return GetSparkVersionsResponse.from_dict(res) - def start(self, cluster_id: str) -> Wait[ClusterDetails]: + + + def start(self + , cluster_id: str + ) -> Wait[ClusterDetails]: """Start terminated cluster. Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: @@ -4655,14 +4408,26 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: """ body = {} if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/start', body=body, headers=headers) - return Wait(self.wait_get_cluster_running, cluster_id=cluster_id) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/start' + , body=body + + , headers=headers) + return Wait(self.wait_get_cluster_running + + , cluster_id=cluster_id) - def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: + + def start_and_wait(self, cluster_id: str + , + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.start(cluster_id=cluster_id).result(timeout=timeout) - - def unpin(self, cluster_id: str): + + + def unpin(self + , cluster_id: str + ): """Unpin cluster. Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. @@ -4676,14 +4441,20 @@ def unpin(self, cluster_id: str): """ body = {} if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/unpin', body=body, headers=headers) - - def update_permissions( - self, - cluster_id: str, - *, - access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/clusters/unpin' + , body=body + + , headers=headers) + + + + + def update_permissions(self + , cluster_id: str + , * + , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: """Update cluster permissions. Updates the permissions on a cluster. Clusters can inherit permissions from their root object. @@ -4695,124 +4466,115 @@ def update_permissions( :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('PATCH', f'/api/2.0/permissions/clusters/{cluster_id}', body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + res = self._api.do('PATCH', + f'/api/2.0/permissions/clusters/{cluster_id}' + , body=body + + , headers=headers) return ClusterPermissions.from_dict(res) - + + class CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" - + def __init__(self, api_client): self._api = api_client + + def wait_command_status_command_execution_cancelled(self, cluster_id: str, command_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.CANCELLED, ) + failure_states = (CommandStatus.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if poll.results: + status_message = poll.results.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Cancelled, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_command_status_command_execution_finished_or_error(self, cluster_id: str, command_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) + failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Finished or Error, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_context_status_command_execution_running(self, cluster_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (ContextStatus.RUNNING, ) + failure_states = (ContextStatus.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.context_status(cluster_id=cluster_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Running, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + - def wait_command_status_command_execution_cancelled( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.CANCELLED, ) - failure_states = (CommandStatus.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if poll.results: - status_message = poll.results.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Cancelled, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_command_status_command_execution_finished_or_error( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) - failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Finished or Error, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_context_status_command_execution_running( - self, - cluster_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (ContextStatus.RUNNING, ) - failure_states = (ContextStatus.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.context_status(cluster_id=cluster_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Running, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def cancel(self, - *, - cluster_id: Optional[str] = None, - command_id: Optional[str] = None, - context_id: Optional[str] = None) -> Wait[CommandStatusResponse]: + + def cancel(self + + , * + , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None) -> Wait[CommandStatusResponse]: """Cancel a command. Cancels a currently running command within an execution context. @@ -4831,24 +4593,26 @@ def cancel(self, if cluster_id is not None: body['clusterId'] = cluster_id if command_id is not None: body['commandId'] = command_id if context_id is not None: body['contextId'] = context_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/1.2/commands/cancel', body=body, headers=headers) - return Wait(self.wait_command_status_command_execution_cancelled, - cluster_id=cluster_id, - command_id=command_id, - context_id=context_id) - - def cancel_and_wait( - self, - *, - cluster_id: Optional[str] = None, - command_id: Optional[str] = None, - context_id: Optional[str] = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: - return self.cancel(cluster_id=cluster_id, command_id=command_id, - context_id=context_id).result(timeout=timeout) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/1.2/commands/cancel' + , body=body + + , headers=headers) + return Wait(self.wait_command_status_command_execution_cancelled + + , cluster_id=cluster_id, command_id=command_id, context_id=context_id) - def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse: + + def cancel_and_wait(self + , * , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: + return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout) + + + def command_status(self + , cluster_id: str, context_id: str, command_id: str + ) -> CommandStatusResponse: """Get command info. Gets the status of and, if available, the results from a currently executing command. @@ -4861,16 +4625,24 @@ def command_status(self, cluster_id: str, context_id: str, command_id: str) -> C :returns: :class:`CommandStatusResponse` """ - + query = {} if cluster_id is not None: query['clusterId'] = cluster_id if command_id is not None: query['commandId'] = command_id if context_id is not None: query['contextId'] = context_id - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/1.2/commands/status', query=query, headers=headers) + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/1.2/commands/status' + , query=query + + , headers=headers) return CommandStatusResponse.from_dict(res) - def context_status(self, cluster_id: str, context_id: str) -> ContextStatusResponse: + + + def context_status(self + , cluster_id: str, context_id: str + ) -> ContextStatusResponse: """Get status. Gets the status for an execution context. @@ -4880,18 +4652,24 @@ def context_status(self, cluster_id: str, context_id: str) -> ContextStatusRespo :returns: :class:`ContextStatusResponse` """ - + query = {} if cluster_id is not None: query['clusterId'] = cluster_id if context_id is not None: query['contextId'] = context_id - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/1.2/contexts/status', query=query, headers=headers) + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/1.2/contexts/status' + , query=query + + , headers=headers) return ContextStatusResponse.from_dict(res) - def create(self, - *, - cluster_id: Optional[str] = None, - language: Optional[Language] = None) -> Wait[ContextStatusResponse]: + + + def create(self + + , * + , cluster_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[ContextStatusResponse]: """Create an execution context. Creates an execution context for running cluster commands. @@ -4909,22 +4687,26 @@ def create(self, body = {} if cluster_id is not None: body['clusterId'] = cluster_id if language is not None: body['language'] = language.value - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/1.2/contexts/create', body=body, headers=headers) - return Wait(self.wait_context_status_command_execution_running, - response=Created.from_dict(op_response), - cluster_id=cluster_id, - context_id=op_response['id']) - - def create_and_wait( - self, - *, - cluster_id: Optional[str] = None, - language: Optional[Language] = None, + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + op_response = self._api.do('POST', + '/api/1.2/contexts/create' + , body=body + + , headers=headers) + return Wait(self.wait_context_status_command_execution_running + , response = Created.from_dict(op_response) + , cluster_id=cluster_id, context_id=op_response['id']) + + + def create_and_wait(self + , * , cluster_id: Optional[str] = None, language: Optional[Language] = None, timeout=timedelta(minutes=20)) -> ContextStatusResponse: return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) - - def destroy(self, cluster_id: str, context_id: str): + + + def destroy(self + , cluster_id: str, context_id: str + ): """Delete an execution context. Deletes an execution context. @@ -4937,15 +4719,20 @@ def destroy(self, cluster_id: str, context_id: str): body = {} if cluster_id is not None: body['clusterId'] = cluster_id if context_id is not None: body['contextId'] = context_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/1.2/contexts/destroy', body=body, headers=headers) - - def execute(self, - *, - cluster_id: Optional[str] = None, - command: Optional[str] = None, - context_id: Optional[str] = None, - language: Optional[Language] = None) -> Wait[CommandStatusResponse]: + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/1.2/contexts/destroy' + , body=body + + , headers=headers) + + + + + def execute(self + + , * + , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[CommandStatusResponse]: """Run a command. Runs a cluster command in the given execution context, using the provided language. @@ -4969,26 +4756,23 @@ def execute(self, if command is not None: body['command'] = command if context_id is not None: body['contextId'] = context_id if language is not None: body['language'] = language.value - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/1.2/commands/execute', body=body, headers=headers) - return Wait(self.wait_command_status_command_execution_finished_or_error, - response=Created.from_dict(op_response), - cluster_id=cluster_id, - command_id=op_response['id'], - context_id=context_id) - - def execute_and_wait( - self, - *, - cluster_id: Optional[str] = None, - command: Optional[str] = None, - context_id: Optional[str] = None, - language: Optional[Language] = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: - return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, - language=language).result(timeout=timeout) - + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + op_response = self._api.do('POST', + '/api/1.2/commands/execute' + , body=body + + , headers=headers) + return Wait(self.wait_command_status_command_execution_finished_or_error + , response = Created.from_dict(op_response) + , cluster_id=cluster_id, command_id=op_response['id'], context_id=context_id) + + def execute_and_wait(self + , * , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: + return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result(timeout=timeout) + + class GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. @@ -4997,16 +4781,16 @@ class GlobalInitScriptsAPI: Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, - name: str, - script: str, - *, - enabled: Optional[bool] = None, - position: Optional[int] = None) -> CreateResponse: + + def create(self + , name: str, script: str + , * + , enabled: Optional[bool] = None, position: Optional[int] = None) -> CreateResponse: """Create init script. Creates a new global init script in this workspace. @@ -5035,11 +4819,19 @@ def create(self, if name is not None: body['name'] = name if position is not None: body['position'] = position if script is not None: body['script'] = script - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('POST', '/api/2.0/global-init-scripts', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + res = self._api.do('POST', + '/api/2.0/global-init-scripts' + , body=body + + , headers=headers) return CreateResponse.from_dict(res) - def delete(self, script_id: str): + + + def delete(self + , script_id: str + ): """Delete init script. Deletes a global init script. @@ -5049,11 +4841,20 @@ def delete(self, script_id: str): """ - + headers = {} - self._api.do('DELETE', f'/api/2.0/global-init-scripts/{script_id}', headers=headers) + self._api.do('DELETE', + f'/api/2.0/global-init-scripts/{script_id}' + + + , headers=headers) + - def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: + + + def get(self + , script_id: str + ) -> GlobalInitScriptDetailsWithContent: """Get an init script. Gets all the details of a script, including its Base64-encoded contents. @@ -5063,11 +4864,17 @@ def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: :returns: :class:`GlobalInitScriptDetailsWithContent` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/global-init-scripts/{script_id}', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + f'/api/2.0/global-init-scripts/{script_id}' + + + , headers=headers) return GlobalInitScriptDetailsWithContent.from_dict(res) + + def list(self) -> Iterator['GlobalInitScriptDetails']: """Get init scripts. @@ -5077,30 +4884,33 @@ def list(self) -> Iterator['GlobalInitScriptDetails']: :returns: Iterator over :class:`GlobalInitScriptDetails` """ - - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/global-init-scripts', headers=headers) + + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/global-init-scripts' + + , headers=headers) parsed = ListGlobalInitScriptsResponse.from_dict(json).scripts return parsed if parsed is not None else [] + - def update(self, - name: str, - script: str, - script_id: str, - *, - enabled: Optional[bool] = None, - position: Optional[int] = None): + + + def update(self + , script_id: str, name: str, script: str + , * + , enabled: Optional[bool] = None, position: Optional[int] = None): """Update init script. Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. + :param script_id: str + The ID of the global init script. :param name: str The name of the script :param script: str The Base64-encoded content of the script. - :param script_id: str - The ID of the global init script. :param enabled: bool (optional) Specifies whether the script is enabled. The script runs only if enabled. :param position: int (optional) @@ -5121,10 +4931,16 @@ def update(self, if name is not None: body['name'] = name if position is not None: body['position'] = position if script is not None: body['script'] = script - headers = {'Content-Type': 'application/json', } - self._api.do('PATCH', f'/api/2.0/global-init-scripts/{script_id}', body=body, headers=headers) - + headers = {'Content-Type': 'application/json',} + self._api.do('PATCH', + f'/api/2.0/global-init-scripts/{script_id}' + , body=body + + , headers=headers) + + + class InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. @@ -5140,25 +4956,16 @@ class InstancePoolsAPI: Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, - instance_pool_name: str, - node_type_id: str, - *, - aws_attributes: Optional[InstancePoolAwsAttributes] = None, - azure_attributes: Optional[InstancePoolAzureAttributes] = None, - custom_tags: Optional[Dict[str, str]] = None, - disk_spec: Optional[DiskSpec] = None, - enable_elastic_disk: Optional[bool] = None, - gcp_attributes: Optional[InstancePoolGcpAttributes] = None, - idle_instance_autotermination_minutes: Optional[int] = None, - max_capacity: Optional[int] = None, - min_idle_instances: Optional[int] = None, - preloaded_docker_images: Optional[List[DockerImage]] = None, - preloaded_spark_versions: Optional[List[str]] = None) -> CreateInstancePoolResponse: + + def create(self + , instance_pool_name: str, node_type_id: str + , * + , aws_attributes: Optional[InstancePoolAwsAttributes] = None, azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str,str]] = None, disk_spec: Optional[DiskSpec] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None) -> CreateInstancePoolResponse: """Create a new instance pool. Creates a new instance pool using idle and ready-to-use cloud instances. @@ -5219,21 +5026,26 @@ def create(self, if disk_spec is not None: body['disk_spec'] = disk_spec.as_dict() if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() - if idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes + if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name if max_capacity is not None: body['max_capacity'] = max_capacity if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances if node_type_id is not None: body['node_type_id'] = node_type_id - if preloaded_docker_images is not None: - body['preloaded_docker_images'] = [v.as_dict() for v in preloaded_docker_images] - if preloaded_spark_versions is not None: - body['preloaded_spark_versions'] = [v for v in preloaded_spark_versions] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do('POST', '/api/2.0/instance-pools/create', body=body, headers=headers) + if preloaded_docker_images is not None: body['preloaded_docker_images'] = [v.as_dict() for v in preloaded_docker_images] + if preloaded_spark_versions is not None: body['preloaded_spark_versions'] = [v for v in preloaded_spark_versions] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + res = self._api.do('POST', + '/api/2.0/instance-pools/create' + , body=body + + , headers=headers) return CreateInstancePoolResponse.from_dict(res) - def delete(self, instance_pool_id: str): + + + def delete(self + , instance_pool_id: str + ): """Delete an instance pool. Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. @@ -5245,18 +5057,20 @@ def delete(self, instance_pool_id: str): """ body = {} if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/instance-pools/delete', body=body, headers=headers) - - def edit(self, - instance_pool_id: str, - instance_pool_name: str, - node_type_id: str, - *, - custom_tags: Optional[Dict[str, str]] = None, - idle_instance_autotermination_minutes: Optional[int] = None, - max_capacity: Optional[int] = None, - min_idle_instances: Optional[int] = None): + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/instance-pools/delete' + , body=body + + , headers=headers) + + + + + def edit(self + , instance_pool_id: str, instance_pool_name: str, node_type_id: str + , * + , custom_tags: Optional[Dict[str,str]] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None): """Edit an existing instance pool. Modifies the configuration of an existing instance pool. @@ -5293,17 +5107,25 @@ def edit(self, """ body = {} if custom_tags is not None: body['custom_tags'] = custom_tags - if idle_instance_autotermination_minutes is not None: - body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes + if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name if max_capacity is not None: body['max_capacity'] = max_capacity if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances if node_type_id is not None: body['node_type_id'] = node_type_id - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/instance-pools/edit', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/instance-pools/edit' + , body=body + + , headers=headers) + - def get(self, instance_pool_id: str) -> GetInstancePool: + + + def get(self + , instance_pool_id: str + ) -> GetInstancePool: """Get instance pool information. Retrieve the information for an instance pool based on its identifier. @@ -5313,14 +5135,22 @@ def get(self, instance_pool_id: str) -> GetInstancePool: :returns: :class:`GetInstancePool` """ - + query = {} if instance_pool_id is not None: query['instance_pool_id'] = instance_pool_id - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/instance-pools/get', query=query, headers=headers) + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/instance-pools/get' + , query=query + + , headers=headers) return GetInstancePool.from_dict(res) - def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse: + + + def get_permission_levels(self + , instance_pool_id: str + ) -> GetInstancePoolPermissionLevelsResponse: """Get instance pool permission levels. Gets the permission levels that a user can have on an object. @@ -5330,14 +5160,20 @@ def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermiss :returns: :class:`GetInstancePoolPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json', } + + headers = {'Accept': 'application/json',} res = self._api.do('GET', - f'/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels', - headers=headers) + f'/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels' + + + , headers=headers) return GetInstancePoolPermissionLevelsResponse.from_dict(res) - def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: + + + def get_permissions(self + , instance_pool_id: str + ) -> InstancePoolPermissions: """Get instance pool permissions. Gets the permissions of an instance pool. Instance pools can inherit permissions from their root @@ -5348,11 +5184,17 @@ def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: :returns: :class:`InstancePoolPermissions` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/permissions/instance-pools/{instance_pool_id}', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + f'/api/2.0/permissions/instance-pools/{instance_pool_id}' + + + , headers=headers) return InstancePoolPermissions.from_dict(res) + + def list(self) -> Iterator['InstancePoolAndStats']: """List instance pool info. @@ -5360,18 +5202,22 @@ def list(self) -> Iterator['InstancePoolAndStats']: :returns: Iterator over :class:`InstancePoolAndStats` """ - - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/instance-pools/list', headers=headers) + + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/instance-pools/list' + + , headers=headers) parsed = ListInstancePools.from_dict(json).instance_pools return parsed if parsed is not None else [] + - def set_permissions( - self, - instance_pool_id: str, - *, - access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - ) -> InstancePoolPermissions: + + + def set_permissions(self + , instance_pool_id: str + , * + , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: """Set instance pool permissions. Sets permissions on an instance pool. Instance pools can inherit permissions from their root object. @@ -5383,21 +5229,21 @@ def set_permissions( :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} res = self._api.do('PUT', - f'/api/2.0/permissions/instance-pools/{instance_pool_id}', - body=body, - headers=headers) + f'/api/2.0/permissions/instance-pools/{instance_pool_id}' + , body=body + + , headers=headers) return InstancePoolPermissions.from_dict(res) - def update_permissions( - self, - instance_pool_id: str, - *, - access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - ) -> InstancePoolPermissions: + + + def update_permissions(self + , instance_pool_id: str + , * + , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: """Update instance pool permissions. Updates the permissions on an instance pool. Instance pools can inherit permissions from their root @@ -5410,32 +5256,33 @@ def update_permissions( :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: - body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} res = self._api.do('PATCH', - f'/api/2.0/permissions/instance-pools/{instance_pool_id}', - body=body, - headers=headers) + f'/api/2.0/permissions/instance-pools/{instance_pool_id}' + , body=body + + , headers=headers) return InstancePoolPermissions.from_dict(res) - + + class InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html""" - + def __init__(self, api_client): self._api = api_client + - def add(self, - instance_profile_arn: str, - *, - iam_role_arn: Optional[str] = None, - is_meta_instance_profile: Optional[bool] = None, - skip_validation: Optional[bool] = None): + + def add(self + , instance_profile_arn: str + , * + , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None, skip_validation: Optional[bool] = None): """Register an instance profile. In the UI, you can select the instance profile when launching clusters. This API is only available to @@ -5470,14 +5317,20 @@ def add(self, if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/instance-profiles/add', body=body, headers=headers) - - def edit(self, - instance_profile_arn: str, - *, - iam_role_arn: Optional[str] = None, - is_meta_instance_profile: Optional[bool] = None): + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/instance-profiles/add' + , body=body + + , headers=headers) + + + + + def edit(self + , instance_profile_arn: str + , * + , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None): """Edit an instance profile. The only supported field to change is the optional IAM role ARN associated with the instance profile. @@ -5515,9 +5368,16 @@ def edit(self, if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/instance-profiles/edit', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/instance-profiles/edit' + , body=body + + , headers=headers) + + + def list(self) -> Iterator['InstanceProfile']: """List available instance profiles. @@ -5527,13 +5387,21 @@ def list(self) -> Iterator['InstanceProfile']: :returns: Iterator over :class:`InstanceProfile` """ - - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/instance-profiles/list', headers=headers) + + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/instance-profiles/list' + + , headers=headers) parsed = ListInstanceProfilesResponse.from_dict(json).instance_profiles return parsed if parsed is not None else [] + - def remove(self, instance_profile_arn: str): + + + def remove(self + , instance_profile_arn: str + ): """Remove the instance profile. Remove the instance profile with the provided ARN. Existing clusters with this instance profile will @@ -5548,10 +5416,16 @@ def remove(self, instance_profile_arn: str): """ body = {} if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/instance-profiles/remove', body=body, headers=headers) - + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/instance-profiles/remove' + , body=body + + , headers=headers) + + + class LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. @@ -5569,10 +5443,12 @@ class LibrariesAPI: When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.""" - + def __init__(self, api_client): self._api = api_client + + def all_cluster_statuses(self) -> ListAllClusterLibraryStatusesResponse: """Get all statuses. @@ -5582,12 +5458,19 @@ def all_cluster_statuses(self) -> ListAllClusterLibraryStatusesResponse: :returns: :class:`ListAllClusterLibraryStatusesResponse` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/libraries/all-cluster-statuses', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + '/api/2.0/libraries/all-cluster-statuses' + + , headers=headers) return ListAllClusterLibraryStatusesResponse.from_dict(res) - def cluster_status(self, cluster_id: str) -> Iterator['LibraryFullStatus']: + + + def cluster_status(self + , cluster_id: str + ) -> Iterator['LibraryFullStatus']: """Get status. Get the status of libraries on a cluster. A status will be available for all libraries installed on @@ -5608,15 +5491,24 @@ def cluster_status(self, cluster_id: str) -> Iterator['LibraryFullStatus']: :returns: Iterator over :class:`LibraryFullStatus` """ - + query = {} if cluster_id is not None: query['cluster_id'] = cluster_id - headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/libraries/cluster-status', query=query, headers=headers) + headers = {'Accept': 'application/json',} + json = self._api.do('GET', + '/api/2.0/libraries/cluster-status' + , query=query + + , headers=headers) parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + - def install(self, cluster_id: str, libraries: List[Library]): + + + def install(self + , cluster_id: str, libraries: List[Library] + ): """Add a library. Add libraries to be installed on a cluster. The installation is asynchronous; it happens in the @@ -5635,10 +5527,19 @@ def install(self, cluster_id: str, libraries: List[Library]): body = {} if cluster_id is not None: body['cluster_id'] = cluster_id if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/libraries/install', body=body, headers=headers) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/libraries/install' + , body=body + + , headers=headers) + - def uninstall(self, cluster_id: str, libraries: List[Library]): + + + def uninstall(self + , cluster_id: str, libraries: List[Library] + ): """Uninstall libraries. Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the cluster is @@ -5655,10 +5556,16 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): body = {} if cluster_id is not None: body['cluster_id'] = cluster_id if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/libraries/uninstall', body=body, headers=headers) - + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + self._api.do('POST', + '/api/2.0/libraries/uninstall' + , body=body + + , headers=headers) + + + class PolicyFamiliesAPI: """View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. @@ -5669,11 +5576,15 @@ class PolicyFamiliesAPI: Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, policy_family_id: str) -> PolicyFamily: + + def get(self + , policy_family_id: str + ) -> PolicyFamily: """Get policy family information. Retrieve the information for an policy family based on its identifier. @@ -5682,15 +5593,21 @@ def get(self, policy_family_id: str) -> PolicyFamily: :returns: :class:`PolicyFamily` """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/policy-families/{policy_family_id}', headers=headers) + + headers = {'Accept': 'application/json',} + res = self._api.do('GET', + f'/api/2.0/policy-families/{policy_family_id}' + + + , headers=headers) return PolicyFamily.from_dict(res) - def list(self, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator['PolicyFamily']: + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator['PolicyFamily']: """List policy families. Retrieve a list of policy families. This API is paginated. @@ -5702,18 +5619,41 @@ def list(self, :returns: Iterator over :class:`PolicyFamily` """ - + query = {} if max_results is not None: query['max_results'] = max_results if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - + headers = {'Accept': 'application/json',} + + while True: - json = self._api.do('GET', '/api/2.0/policy-families', query=query, headers=headers) - if 'policy_families' not in json or not json['policy_families']: - return - for v in json['policy_families']: - yield PolicyFamily.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] + json = self._api.do('GET', + '/api/2.0/policy-families' + , query=query + + , headers=headers) + if 'policy_families' not in json or not json['policy_families']: + return + for v in json['policy_families']: + yield PolicyFamily.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + + + + + + + + + + + + diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 20a4fc47f..5fac07cdd 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -1,10 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass -from typing import BinaryIO, Dict, Iterator, List, Optional - -from ._internal import _repeated +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index dc3ef2f8b..54ef2ec86 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -1066,6 +1069,10 @@ def create(self, :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) @@ -1228,6 +1235,10 @@ def update(self, :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param members: List[:class:`ComplexValue`] (optional) @@ -1288,6 +1299,10 @@ def create(self, :param display_name: str (optional) String that represents a concatenation of given and family names. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) @@ -1453,6 +1468,10 @@ def update(self, :param display_name: str (optional) String that represents a concatenation of given and family names. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param roles: List[:class:`ComplexValue`] (optional) @@ -1513,14 +1532,23 @@ def create(self, :param active: bool (optional) If this user is active :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) - Databricks user ID. + Databricks user ID. This is automatically set by Databricks. Any value provided by the client will + be ignored. :param name: :class:`Name` (optional) :param roles: List[:class:`ComplexValue`] (optional) Corresponds to AWS instance profile/arn role. @@ -1719,15 +1747,24 @@ def update(self, Replaces a user's information with the data supplied in request. :param id: str - Databricks user ID. + Databricks user ID. This is automatically set by Databricks. Any value provided by the client will + be ignored. :param active: bool (optional) If this user is active :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. :param groups: List[:class:`ComplexValue`] (optional) :param name: :class:`Name` (optional) :param roles: List[:class:`ComplexValue`] (optional) @@ -1805,6 +1842,10 @@ def create(self, :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) @@ -1954,6 +1995,10 @@ def update(self, :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the group. See [assigning entitlements] for a full list of supported + values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param members: List[:class:`ComplexValue`] (optional) @@ -2154,6 +2199,10 @@ def create(self, :param display_name: str (optional) String that represents a concatenation of given and family names. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) @@ -2306,6 +2355,10 @@ def update(self, :param display_name: str (optional) String that represents a concatenation of given and family names. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the service principal. See [assigning entitlements] for a full list of + supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) :param roles: List[:class:`ComplexValue`] (optional) @@ -2363,14 +2416,23 @@ def create(self, :param active: bool (optional) If this user is active :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. :param groups: List[:class:`ComplexValue`] (optional) :param id: str (optional) - Databricks user ID. + Databricks user ID. This is automatically set by Databricks. Any value provided by the client will + be ignored. :param name: :class:`Name` (optional) :param roles: List[:class:`ComplexValue`] (optional) Corresponds to AWS instance profile/arn role. @@ -2600,15 +2662,24 @@ def update(self, Replaces a user's information with the data supplied in request. :param id: str - Databricks user ID. + Databricks user ID. This is automatically set by Databricks. Any value provided by the client will + be ignored. :param active: bool (optional) If this user is active :param display_name: str (optional) - String that represents a concatenation of given and family names. For example `John Smith`. + String that represents a concatenation of given and family names. For example `John Smith`. This + field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use + Account SCIM APIs to update `displayName`. + + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) + Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. + + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) + External ID is not currently supported. It is reserved for future use. :param groups: List[:class:`ComplexValue`] (optional) :param name: :class:`Name` (optional) :param roles: List[:class:`ComplexValue`] (optional) @@ -2721,18 +2792,18 @@ def list(self, workspace_id: int) -> Iterator['PermissionAssignment']: parsed = PermissionAssignments.from_dict(json).permission_assignments return parsed if parsed is not None else [] - def update(self, permissions: List[WorkspacePermission], workspace_id: int, principal_id: int): + def update(self, workspace_id: int, principal_id: int, permissions: List[WorkspacePermission]): """Create or update permissions assignment. Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. - :param permissions: List[:class:`WorkspacePermission`] - Array of permissions assignments to update on the workspace. :param workspace_id: int The workspace ID. :param principal_id: int The ID of the user, service principal, or group. + :param permissions: List[:class:`WorkspacePermission`] + Array of permissions assignments to update on the workspace. """ diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 0721278db..c79afe382 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -1,19 +1,21 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') -from databricks.sdk.service import compute, iam +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import iam # all definitions in this file are in alphabetical order @@ -257,6 +259,7 @@ class CreateJob: compute: Optional['List[JobCompute]'] = None continuous: Optional['Continuous'] = None deployment: Optional['JobDeployment'] = None + edit_mode: Optional['CreateJobEditMode'] = None email_notifications: Optional['JobEmailNotifications'] = None format: Optional['Format'] = None git_source: Optional['GitSource'] = None @@ -273,7 +276,6 @@ class CreateJob: tasks: Optional['List[Task]'] = None timeout_seconds: Optional[int] = None trigger: Optional['TriggerSettings'] = None - ui_state: Optional['CreateJobUiState'] = None webhook_notifications: Optional['WebhookNotifications'] = None def as_dict(self) -> dict: @@ -283,6 +285,7 @@ def as_dict(self) -> dict: if self.compute: body['compute'] = [v.as_dict() for v in self.compute] if self.continuous: body['continuous'] = self.continuous.as_dict() if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() if self.format is not None: body['format'] = self.format.value if self.git_source: body['git_source'] = self.git_source.as_dict() @@ -299,7 +302,6 @@ def as_dict(self) -> dict: if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds if self.trigger: body['trigger'] = self.trigger.as_dict() - if self.ui_state is not None: body['ui_state'] = self.ui_state.value if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body @@ -309,6 +311,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateJob': compute=_repeated(d, 'compute', JobCompute), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), + edit_mode=_enum(d, 'edit_mode', CreateJobEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), @@ -325,18 +328,17 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateJob': tasks=_repeated(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), - ui_state=_enum(d, 'ui_state', CreateJobUiState), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) -class CreateJobUiState(Enum): - """State of the job in UI. +class CreateJobEditMode(Enum): + """Edit mode of the job. - * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in an - editable state and can be modified.""" + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is + in an editable state and can be modified.""" EDITABLE = 'EDITABLE' - LOCKED = 'LOCKED' + UI_LOCKED = 'UI_LOCKED' @dataclass @@ -911,6 +913,7 @@ class JobSettings: compute: Optional['List[JobCompute]'] = None continuous: Optional['Continuous'] = None deployment: Optional['JobDeployment'] = None + edit_mode: Optional['JobSettingsEditMode'] = None email_notifications: Optional['JobEmailNotifications'] = None format: Optional['Format'] = None git_source: Optional['GitSource'] = None @@ -927,7 +930,6 @@ class JobSettings: tasks: Optional['List[Task]'] = None timeout_seconds: Optional[int] = None trigger: Optional['TriggerSettings'] = None - ui_state: Optional['JobSettingsUiState'] = None webhook_notifications: Optional['WebhookNotifications'] = None def as_dict(self) -> dict: @@ -935,6 +937,7 @@ def as_dict(self) -> dict: if self.compute: body['compute'] = [v.as_dict() for v in self.compute] if self.continuous: body['continuous'] = self.continuous.as_dict() if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() if self.format is not None: body['format'] = self.format.value if self.git_source: body['git_source'] = self.git_source.as_dict() @@ -951,7 +954,6 @@ def as_dict(self) -> dict: if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds if self.trigger: body['trigger'] = self.trigger.as_dict() - if self.ui_state is not None: body['ui_state'] = self.ui_state.value if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body @@ -960,6 +962,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'JobSettings': return cls(compute=_repeated(d, 'compute', JobCompute), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), + edit_mode=_enum(d, 'edit_mode', JobSettingsEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), @@ -976,18 +979,17 @@ def from_dict(cls, d: Dict[str, any]) -> 'JobSettings': tasks=_repeated(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), - ui_state=_enum(d, 'ui_state', JobSettingsUiState), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) -class JobSettingsUiState(Enum): - """State of the job in UI. +class JobSettingsEditMode(Enum): + """Edit mode of the job. - * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in an - editable state and can be modified.""" + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is + in an editable state and can be modified.""" EDITABLE = 'EDITABLE' - LOCKED = 'LOCKED' + UI_LOCKED = 'UI_LOCKED' @dataclass @@ -2936,6 +2938,7 @@ def create(self, compute: Optional[List[JobCompute]] = None, continuous: Optional[Continuous] = None, deployment: Optional[JobDeployment] = None, + edit_mode: Optional[CreateJobEditMode] = None, email_notifications: Optional[JobEmailNotifications] = None, format: Optional[Format] = None, git_source: Optional[GitSource] = None, @@ -2952,7 +2955,6 @@ def create(self, tasks: Optional[List[Task]] = None, timeout_seconds: Optional[int] = None, trigger: Optional[TriggerSettings] = None, - ui_state: Optional[CreateJobUiState] = None, webhook_notifications: Optional[WebhookNotifications] = None) -> CreateResponse: """Create a new job. @@ -2967,6 +2969,11 @@ def create(self, always one run executing. Only one of `schedule` and `continuous` can be used. :param deployment: :class:`JobDeployment` (optional) Deployment information for jobs managed by external sources. + :param edit_mode: :class:`CreateJobEditMode` (optional) + Edit mode of the job. + + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in + an editable state and can be modified. :param email_notifications: :class:`JobEmailNotifications` (optional) An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted. @@ -3031,11 +3038,6 @@ def create(self, Trigger settings for the job. Can be used to trigger a run when new files arrive in an external location. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. - :param ui_state: :class:`CreateJobUiState` (optional) - State of the job in UI. - - * `LOCKED`: The job is in a locked state and cannot be modified. * `EDITABLE`: The job is in an - editable state and can be modified. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. @@ -3047,6 +3049,7 @@ def create(self, if compute is not None: body['compute'] = [v.as_dict() for v in compute] if continuous is not None: body['continuous'] = continuous.as_dict() if deployment is not None: body['deployment'] = deployment.as_dict() + if edit_mode is not None: body['edit_mode'] = edit_mode.value if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict() if format is not None: body['format'] = format.value if git_source is not None: body['git_source'] = git_source.as_dict() @@ -3063,7 +3066,6 @@ def create(self, if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks] if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds if trigger is not None: body['trigger'] = trigger.as_dict() - if ui_state is not None: body['ui_state'] = ui_state.value if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.1/jobs/create', body=body, headers=headers) @@ -3166,7 +3168,11 @@ def get_permissions(self, job_id: str) -> JobPermissions: res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}', headers=headers) return JobPermissions.from_dict(res) - def get_run(self, run_id: int, *, include_history: Optional[bool] = None) -> Run: + def get_run(self, + run_id: int, + *, + include_history: Optional[bool] = None, + include_resolved_values: Optional[bool] = None) -> Run: """Get a single job run. Retrieve the metadata of a run. @@ -3175,12 +3181,15 @@ def get_run(self, run_id: int, *, include_history: Optional[bool] = None) -> Run The canonical identifier of the run for which to retrieve the metadata. This field is required. :param include_history: bool (optional) Whether to include the repair history in the response. + :param include_resolved_values: bool (optional) + Whether to include resolved parameter values in the response. :returns: :class:`Run` """ query = {} if include_history is not None: query['include_history'] = include_history + if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values if run_id is not None: query['run_id'] = run_id headers = {'Accept': 'application/json', } res = self._api.do('GET', '/api/2.1/jobs/runs/get', query=query, headers=headers) diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 4a3d5df80..7ea35e172 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 8fbb3287f..d831de7a1 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -1,10 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass -from typing import Dict, Iterator, List, Optional - -from ._internal import _from_dict, _repeated +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -57,21 +61,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateCustomAppIntegrationOutput': integration_id=d.get('integration_id', None)) -@dataclass -class CreateOAuthEnrollment: - enable_all_published_apps: Optional[bool] = None - - def as_dict(self) -> dict: - body = {} - if self.enable_all_published_apps is not None: - body['enable_all_published_apps'] = self.enable_all_published_apps - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'CreateOAuthEnrollment': - return cls(enable_all_published_apps=d.get('enable_all_published_apps', None)) - - @dataclass class CreatePublishedAppIntegration: app_id: Optional[str] = None @@ -243,20 +232,6 @@ def from_dict(cls, d: Dict[str, any]) -> 'ListServicePrincipalSecretsResponse': return cls(secrets=_repeated(d, 'secrets', SecretInfo)) -@dataclass -class OAuthEnrollmentStatus: - is_enabled: Optional[bool] = None - - def as_dict(self) -> dict: - body = {} - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'OAuthEnrollmentStatus': - return cls(is_enabled=d.get('is_enabled', None)) - - @dataclass class PublishedAppOutput: app_id: Optional[str] = None @@ -500,59 +475,6 @@ def update(self, headers=headers) -class OAuthEnrollmentAPI: - """These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using - any OAuth published/custom application integration. - - **Note:** Your account must be on the E2 version to use these APIs, this is because OAuth is only - supported on the E2 version.""" - - def __init__(self, api_client): - self._api = api_client - - def create(self, *, enable_all_published_apps: Optional[bool] = None): - """Create OAuth Enrollment request. - - Create an OAuth Enrollment request to enroll OAuth for this account and optionally enable the OAuth - integration for all the partner applications in the account. - - The parter applications are: - Power BI - Tableau Desktop - Databricks CLI - - The enrollment is executed asynchronously, so the API will return 204 immediately. The actual - enrollment take a few minutes, you can check the status via API :method:OAuthEnrollment/get. - - :param enable_all_published_apps: bool (optional) - If true, enable OAuth for all the published applications in the account. - - - """ - body = {} - if enable_all_published_apps is not None: - body['enable_all_published_apps'] = enable_all_published_apps - headers = {'Content-Type': 'application/json', } - self._api.do('POST', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/enrollment', - body=body, - headers=headers) - - def get(self) -> OAuthEnrollmentStatus: - """Get OAuth enrollment status. - - Gets the OAuth enrollment status for this Account. - - You can only add/use the OAuth published/custom application integrations when OAuth enrollment status - is enabled. - - :returns: :class:`OAuthEnrollmentStatus` - """ - - headers = {'Accept': 'application/json', } - res = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/enrollment', - headers=headers) - return OAuthEnrollmentStatus.from_dict(res) - - class OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks. Administrators can add the published OAuth applications to their account through the OAuth Published App diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ba68454cc..3510ebdd1 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -1,18 +1,22 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute from databricks.sdk.service import compute # all definitions in this file are in alphabetical order diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 726b23203..1aa2d51b4 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1,15 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -789,6 +788,7 @@ class UpdateWorkspaceRequest: credentials_id: Optional[str] = None custom_tags: Optional['Dict[str,str]'] = None managed_services_customer_managed_key_id: Optional[str] = None + network_connectivity_config_id: Optional[str] = None network_id: Optional[str] = None storage_configuration_id: Optional[str] = None storage_customer_managed_key_id: Optional[str] = None @@ -801,6 +801,8 @@ def as_dict(self) -> dict: if self.custom_tags: body['custom_tags'] = self.custom_tags if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id if self.network_id is not None: body['network_id'] = self.network_id if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id @@ -816,6 +818,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'UpdateWorkspaceRequest': custom_tags=d.get('custom_tags', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), + network_connectivity_config_id=d.get('network_connectivity_config_id', None), network_id=d.get('network_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), @@ -1467,9 +1470,9 @@ def list(self) -> Iterator['PrivateAccessSettings']: return [PrivateAccessSettings.from_dict(v) for v in res] def replace(self, + private_access_settings_id: str, private_access_settings_name: str, region: str, - private_access_settings_id: str, *, allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, @@ -1494,12 +1497,12 @@ def replace(self, [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html + :param private_access_settings_id: str + Databricks Account API private access settings ID. :param private_access_settings_name: str The human-readable name of the private access settings object. :param region: str The cloud region for workspaces associated with this private access settings object. - :param private_access_settings_id: str - Databricks Account API private access settings ID. :param allowed_vpc_endpoint_ids: List[str] (optional) An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in @@ -2047,6 +2050,7 @@ def update(self, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str, str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, + network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: @@ -2159,6 +2163,9 @@ def update(self, :param managed_services_customer_managed_key_id: str (optional) The ID of the workspace's managed services encryption key configuration object. This parameter is available only for updating failed workspaces. + :param network_connectivity_config_id: str (optional) + The ID of the network connectivity configuration object, which is the parent resource of this + private endpoint rule object. :param network_id: str (optional) The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a @@ -2180,6 +2187,8 @@ def update(self, if custom_tags is not None: body['custom_tags'] = custom_tags if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id + if network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = network_connectivity_config_id if network_id is not None: body['network_id'] = network_id if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id if storage_customer_managed_key_id is not None: @@ -2199,6 +2208,7 @@ def update_and_wait( credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str, str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, + network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, @@ -2207,6 +2217,7 @@ def update_and_wait( credentials_id=credentials_id, custom_tags=custom_tags, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, + network_connectivity_config_id=network_connectivity_config_id, network_id=network_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index c281f3d02..e5f19990a 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1,21 +1,46 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') # all definitions in this file are in alphabetical order +@dataclass +class AppEvents: + event_name: Optional[str] = None + event_time: Optional[str] = None + event_type: Optional[str] = None + message: Optional[str] = None + service_name: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.event_name is not None: body['event_name'] = self.event_name + if self.event_time is not None: body['event_time'] = self.event_time + if self.event_type is not None: body['event_type'] = self.event_type + if self.message is not None: body['message'] = self.message + if self.service_name is not None: body['service_name'] = self.service_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AppEvents': + return cls(event_name=d.get('event_name', None), + event_time=d.get('event_time', None), + event_type=d.get('event_type', None), + message=d.get('message', None), + service_name=d.get('service_name', None)) + + @dataclass class AppManifest: dependencies: Optional['List[Any]'] = None @@ -24,7 +49,7 @@ class AppManifest: name: Optional[str] = None registry: Optional[Any] = None services: Optional[Any] = None - version: Optional[int] = None + version: Optional[Any] = None def as_dict(self) -> dict: body = {} @@ -34,7 +59,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.registry: body['registry'] = self.registry if self.services: body['services'] = self.services - if self.version is not None: body['version'] = self.version + if self.version: body['version'] = self.version return body @classmethod @@ -48,6 +73,26 @@ def from_dict(cls, d: Dict[str, any]) -> 'AppManifest': version=d.get('version', None)) +@dataclass +class AppServiceStatus: + deployment: Optional[Any] = None + name: Optional[str] = None + template: Optional[Any] = None + + def as_dict(self) -> dict: + body = {} + if self.deployment: body['deployment'] = self.deployment + if self.name is not None: body['name'] = self.name + if self.template: body['template'] = self.template + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'AppServiceStatus': + return cls(deployment=d.get('deployment', None), + name=d.get('name', None), + template=d.get('template', None)) + + @dataclass class BuildLogsResponse: logs: str @@ -100,6 +145,20 @@ def from_dict(cls, d: Dict[str, any]) -> 'DataframeSplitInput': return cls(columns=d.get('columns', None), data=d.get('data', None), index=d.get('index', None)) +@dataclass +class DeleteAppResponse: + name: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'DeleteAppResponse': + return cls(name=d.get('name', None)) + + @dataclass class DeployAppRequest: manifest: 'AppManifest' @@ -118,18 +177,25 @@ def from_dict(cls, d: Dict[str, any]) -> 'DeployAppRequest': @dataclass class DeploymentStatus: + container_logs: Optional['List[Any]'] = None deployment_id: Optional[str] = None + extra_info: Optional[str] = None state: Optional['DeploymentStatusState'] = None def as_dict(self) -> dict: body = {} + if self.container_logs: body['container_logs'] = [v for v in self.container_logs] if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.extra_info is not None: body['extra_info'] = self.extra_info if self.state is not None: body['state'] = self.state.value return body @classmethod def from_dict(cls, d: Dict[str, any]) -> 'DeploymentStatus': - return cls(deployment_id=d.get('deployment_id', None), state=_enum(d, 'state', DeploymentStatusState)) + return cls(container_logs=d.get('container_logs', None), + deployment_id=d.get('deployment_id', None), + extra_info=d.get('extra_info', None), + state=_enum(d, 'state', DeploymentStatusState)) class DeploymentStatusState(Enum): @@ -271,6 +337,29 @@ def from_dict(cls, d: Dict[str, any]) -> 'EndpointTag': return cls(key=d.get('key', None), value=d.get('value', None)) +@dataclass +class GetAppResponse: + current_services: Optional['List[AppServiceStatus]'] = None + name: Optional[str] = None + pending_services: Optional['List[AppServiceStatus]'] = None + url: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.current_services: body['current_services'] = [v.as_dict() for v in self.current_services] + if self.name is not None: body['name'] = self.name + if self.pending_services: body['pending_services'] = [v.as_dict() for v in self.pending_services] + if self.url is not None: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'GetAppResponse': + return cls(current_services=_repeated(d, 'current_services', AppServiceStatus), + name=d.get('name', None), + pending_services=_repeated(d, 'pending_services', AppServiceStatus), + url=d.get('url', None)) + + @dataclass class GetServingEndpointPermissionLevelsResponse: permission_levels: Optional['List[ServingEndpointPermissionsDescription]'] = None @@ -285,6 +374,36 @@ def from_dict(cls, d: Dict[str, any]) -> 'GetServingEndpointPermissionLevelsResp return cls(permission_levels=_repeated(d, 'permission_levels', ServingEndpointPermissionsDescription)) +@dataclass +class ListAppEventsResponse: + events: Optional['List[AppEvents]'] = None + + def as_dict(self) -> dict: + body = {} + if self.events: body['events'] = [v.as_dict() for v in self.events] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListAppEventsResponse': + return cls(events=_repeated(d, 'events', AppEvents)) + + +@dataclass +class ListAppsResponse: + apps: Optional['List[Any]'] = None + next_page_token: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.apps: body['apps'] = [v for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'ListAppsResponse': + return cls(apps=d.get('apps', None), next_page_token=d.get('next_page_token', None)) + + @dataclass class ListEndpointsResponse: endpoints: Optional['List[ServingEndpoint]'] = None @@ -788,7 +907,7 @@ def create(self, manifest: AppManifest, *, resources: Optional[Any] = None) -> D res = self._api.do('POST', '/api/2.0/preview/apps/deployments', body=body, headers=headers) return DeploymentStatus.from_dict(res) - def delete(self, name: str): + def delete_app(self, name: str) -> DeleteAppResponse: """Delete an application. Delete an application definition @@ -796,13 +915,14 @@ def delete(self, name: str): :param name: str The name of an application. This field is required. - + :returns: :class:`DeleteAppResponse` """ headers = {'Accept': 'application/json', } - self._api.do('DELETE', f'/api/2.0/preview/apps/instances/{name}', headers=headers) + res = self._api.do('DELETE', f'/api/2.0/preview/apps/instances/{name}', headers=headers) + return DeleteAppResponse.from_dict(res) - def get(self, name: str): + def get_app(self, name: str) -> GetAppResponse: """Get definition for an application. Get an application definition @@ -810,11 +930,64 @@ def get(self, name: str): :param name: str The name of an application. This field is required. + :returns: :class:`GetAppResponse` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do('GET', f'/api/2.0/preview/apps/instances/{name}', headers=headers) + return GetAppResponse.from_dict(res) + + def get_app_deployment_status(self, + deployment_id: str, + *, + include_app_log: Optional[str] = None) -> DeploymentStatus: + """Get deployment status for an application. + + Get deployment status for an application + :param deployment_id: str + The deployment id for an application. This field is required. + :param include_app_log: str (optional) + Boolean flag to include application logs + + :returns: :class:`DeploymentStatus` """ + query = {} + if include_app_log is not None: query['include_app_log'] = include_app_log headers = {'Accept': 'application/json', } - self._api.do('GET', f'/api/2.0/preview/apps/instances/{name}', headers=headers) + res = self._api.do('GET', + f'/api/2.0/preview/apps/deployments/{deployment_id}', + query=query, + headers=headers) + return DeploymentStatus.from_dict(res) + + def get_apps(self) -> ListAppsResponse: + """List all applications. + + List all available applications + + :returns: :class:`ListAppsResponse` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do('GET', '/api/2.0/preview/apps/instances', headers=headers) + return ListAppsResponse.from_dict(res) + + def get_events(self, name: str) -> ListAppEventsResponse: + """Get deployment events for an application. + + Get deployment events for an application + + :param name: str + The name of an application. This field is required. + + :returns: :class:`ListAppEventsResponse` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/events', headers=headers) + return ListAppEventsResponse.from_dict(res) class ServingEndpointsAPI: @@ -1111,8 +1284,8 @@ def set_permissions( return ServingEndpointPermissions.from_dict(res) def update_config(self, - served_models: List[ServedModelInput], name: str, + served_models: List[ServedModelInput], *, traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]: """Update a serving endpoint with a new config. @@ -1121,11 +1294,11 @@ def update_config(self, served models, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. + :param name: str + The name of the serving endpoint to update. This field is required. :param served_models: List[:class:`ServedModelInput`] A list of served models for the endpoint to serve. A serving endpoint can have up to 10 served models. - :param name: str - The name of the serving endpoint to update. This field is required. :param traffic_config: :class:`TrafficConfig` (optional) The traffic config defining how invocations to the serving endpoint should be routed. @@ -1147,8 +1320,8 @@ def update_config(self, def update_config_and_wait( self, - served_models: List[ServedModelInput], name: str, + served_models: List[ServedModelInput], *, traffic_config: Optional[TrafficConfig] = None, timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 54819dc66..e5956c55c 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -61,6 +64,22 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateIpAccessListResponse': return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) +@dataclass +class CreateNetworkConnectivityConfigRequest: + name: str + region: str + + def as_dict(self) -> dict: + body = {} + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreateNetworkConnectivityConfigRequest': + return cls(name=d.get('name', None), region=d.get('region', None)) + + @dataclass class CreateOboTokenRequest: application_id: str @@ -97,6 +116,37 @@ def from_dict(cls, d: Dict[str, any]) -> 'CreateOboTokenResponse': return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None)) +@dataclass +class CreatePrivateEndpointRuleRequest: + resource_id: str + group_id: 'CreatePrivateEndpointRuleRequestGroupId' + network_connectivity_config_id: Optional[str] = None + + def as_dict(self) -> dict: + body = {} + if self.group_id is not None: body['group_id'] = self.group_id.value + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'CreatePrivateEndpointRuleRequest': + return cls(group_id=_enum(d, 'group_id', CreatePrivateEndpointRuleRequestGroupId), + network_connectivity_config_id=d.get('network_connectivity_config_id', None), + resource_id=d.get('resource_id', None)) + + +class CreatePrivateEndpointRuleRequestGroupId(Enum): + """The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.""" + + BLOB = 'blob' + DFS = 'dfs' + MYSQL_SERVER = 'mysqlServer' + SQL_SERVER = 'sqlServer' + + @dataclass class CreateTokenRequest: comment: Optional[str] = None @@ -389,6 +439,192 @@ class ListType(Enum): BLOCK = 'BLOCK' +@dataclass +class NccAzurePrivateEndpointRule: + connection_state: Optional['NccAzurePrivateEndpointRuleConnectionState'] = None + creation_time: Optional[int] = None + deactivated: Optional[bool] = None + deactivated_at: Optional[int] = None + endpoint_name: Optional[str] = None + group_id: Optional['NccAzurePrivateEndpointRuleGroupId'] = None + network_connectivity_config_id: Optional[str] = None + resource_id: Optional[str] = None + rule_id: Optional[str] = None + updated_time: Optional[int] = None + + def as_dict(self) -> dict: + body = {} + if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.group_id is not None: body['group_id'] = self.group_id.value + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NccAzurePrivateEndpointRule': + return cls(connection_state=_enum(d, 'connection_state', NccAzurePrivateEndpointRuleConnectionState), + creation_time=d.get('creation_time', None), + deactivated=d.get('deactivated', None), + deactivated_at=d.get('deactivated_at', None), + endpoint_name=d.get('endpoint_name', None), + group_id=_enum(d, 'group_id', NccAzurePrivateEndpointRuleGroupId), + network_connectivity_config_id=d.get('network_connectivity_config_id', None), + resource_id=d.get('resource_id', None), + rule_id=d.get('rule_id', None), + updated_time=d.get('updated_time', None)) + + +class NccAzurePrivateEndpointRuleConnectionState(Enum): + """The current status of this private endpoint. The private endpoint rules are effective only if + the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your + resources in the Azure portal before they take effect. + + The possible values are: - INIT: (deprecated) The endpoint has been created and pending + approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The + endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED: + Connection was rejected by the private link resource owner. - DISCONNECTED: Connection was + removed by the private link resource owner, the private endpoint becomes informative and should + be deleted for clean-up.""" + + DISCONNECTED = 'DISCONNECTED' + ESTABLISHED = 'ESTABLISHED' + INIT = 'INIT' + PENDING = 'PENDING' + REJECTED = 'REJECTED' + + +class NccAzurePrivateEndpointRuleGroupId(Enum): + """The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.""" + + BLOB = 'blob' + DFS = 'dfs' + MYSQL_SERVER = 'mysqlServer' + SQL_SERVER = 'sqlServer' + + +@dataclass +class NccAzureServiceEndpointRule: + """The stable Azure service endpoints. You can configure the firewall of your Azure resources to + allow traffic from your Databricks serverless compute resources.""" + + subnets: Optional['List[str]'] = None + target_region: Optional[str] = None + target_services: Optional['List[str]'] = None + + def as_dict(self) -> dict: + body = {} + if self.subnets: body['subnets'] = [v for v in self.subnets] + if self.target_region is not None: body['target_region'] = self.target_region + if self.target_services: body['target_services'] = [v for v in self.target_services] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NccAzureServiceEndpointRule': + return cls(subnets=d.get('subnets', None), + target_region=d.get('target_region', None), + target_services=d.get('target_services', None)) + + +@dataclass +class NccEgressConfig: + """The network connectivity rules that apply to network traffic from your serverless compute + resources.""" + + default_rules: Optional['NccEgressDefaultRules'] = None + target_rules: Optional['NccEgressTargetRules'] = None + + def as_dict(self) -> dict: + body = {} + if self.default_rules: body['default_rules'] = self.default_rules.as_dict() + if self.target_rules: body['target_rules'] = self.target_rules.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NccEgressConfig': + return cls(default_rules=_from_dict(d, 'default_rules', NccEgressDefaultRules), + target_rules=_from_dict(d, 'target_rules', NccEgressTargetRules)) + + +@dataclass +class NccEgressDefaultRules: + """The network connectivity rules that are applied by default without resource specific + configurations. You can find the stable network information of your serverless compute resources + here.""" + + azure_service_endpoint_rule: Optional['NccAzureServiceEndpointRule'] = None + + def as_dict(self) -> dict: + body = {} + if self.azure_service_endpoint_rule: + body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NccEgressDefaultRules': + return cls(azure_service_endpoint_rule=_from_dict(d, 'azure_service_endpoint_rule', + NccAzureServiceEndpointRule)) + + +@dataclass +class NccEgressTargetRules: + """The network connectivity rules that configured for each destinations. These rules override + default rules.""" + + azure_private_endpoint_rules: Optional['List[NccAzurePrivateEndpointRule]'] = None + + def as_dict(self) -> dict: + body = {} + if self.azure_private_endpoint_rules: + body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NccEgressTargetRules': + return cls(azure_private_endpoint_rules=_repeated(d, 'azure_private_endpoint_rules', + NccAzurePrivateEndpointRule)) + + +@dataclass +class NetworkConnectivityConfiguration: + account_id: Optional[str] = None + creation_time: Optional[int] = None + egress_config: Optional['NccEgressConfig'] = None + name: Optional[str] = None + network_connectivity_config_id: Optional[str] = None + region: Optional[str] = None + updated_time: Optional[int] = None + + def as_dict(self) -> dict: + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.egress_config: body['egress_config'] = self.egress_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.network_connectivity_config_id is not None: + body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.region is not None: body['region'] = self.region + if self.updated_time is not None: body['updated_time'] = self.updated_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> 'NetworkConnectivityConfiguration': + return cls(account_id=d.get('account_id', None), + creation_time=d.get('creation_time', None), + egress_config=_from_dict(d, 'egress_config', NccEgressConfig), + name=d.get('name', None), + network_connectivity_config_id=d.get('network_connectivity_config_id', None), + region=d.get('region', None), + updated_time=d.get('updated_time', None)) + + @dataclass class PartitionId: workspace_id: Optional[int] = None @@ -840,11 +1076,11 @@ def list(self) -> Iterator['IpAccessListInfo']: return parsed if parsed is not None else [] def replace(self, + ip_access_list_id: str, label: str, list_type: ListType, ip_addresses: List[str], enabled: bool, - ip_access_list_id: str, *, list_id: Optional[str] = None): """Replace access list. @@ -859,6 +1095,8 @@ def replace(self, returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. + :param ip_access_list_id: str + The ID for the corresponding IP access list. :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` @@ -870,8 +1108,6 @@ def replace(self, Array of IP addresses or CIDR values to be added to the IP access list. :param enabled: bool Specifies whether this IP access list is enabled. - :param ip_access_list_id: str - The ID for the corresponding IP access list. :param list_id: str (optional) Universally unique identifier (UUID) of the IP access list. @@ -890,11 +1126,11 @@ def replace(self, headers=headers) def update(self, + ip_access_list_id: str, label: str, list_type: ListType, ip_addresses: List[str], enabled: bool, - ip_access_list_id: str, *, list_id: Optional[str] = None): """Update access list. @@ -913,6 +1149,8 @@ def update(self, It can take a few minutes for the changes to take effect. + :param ip_access_list_id: str + The ID for the corresponding IP access list. :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` @@ -924,8 +1162,6 @@ def update(self, Array of IP addresses or CIDR values to be added to the IP access list. :param enabled: bool Specifies whether this IP access list is enabled. - :param ip_access_list_id: str - The ID for the corresponding IP access list. :param list_id: str (optional) Universally unique identifier (UUID) of the IP access list. @@ -1257,11 +1493,11 @@ def list(self) -> Iterator['IpAccessListInfo']: return parsed if parsed is not None else [] def replace(self, + ip_access_list_id: str, label: str, list_type: ListType, ip_addresses: List[str], enabled: bool, - ip_access_list_id: str, *, list_id: Optional[str] = None): """Replace access list. @@ -1277,6 +1513,8 @@ def replace(self, effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. + :param ip_access_list_id: str + The ID for the corresponding IP access list to modify. :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` @@ -1288,8 +1526,6 @@ def replace(self, Array of IP addresses or CIDR values to be added to the IP access list. :param enabled: bool Specifies whether this IP access list is enabled. - :param ip_access_list_id: str - The ID for the corresponding IP access list to modify. :param list_id: str (optional) Universally unique identifier (UUID) of the IP access list. @@ -1305,11 +1541,11 @@ def replace(self, self._api.do('PUT', f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body, headers=headers) def update(self, + ip_access_list_id: str, label: str, list_type: ListType, ip_addresses: List[str], enabled: bool, - ip_access_list_id: str, *, list_id: Optional[str] = None): """Update access list. @@ -1329,6 +1565,8 @@ def update(self, It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. + :param ip_access_list_id: str + The ID for the corresponding IP access list to modify. :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` @@ -1340,8 +1578,6 @@ def update(self, Array of IP addresses or CIDR values to be added to the IP access list. :param enabled: bool Specifies whether this IP access list is enabled. - :param ip_access_list_id: str - The ID for the corresponding IP access list to modify. :param list_id: str (optional) Universally unique identifier (UUID) of the IP access list. @@ -1357,6 +1593,172 @@ def update(self, self._api.do('PATCH', f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body, headers=headers) +class NetworkConnectivityAPI: + """These APIs provide configurations for the network connectivity of your workspaces for serverless compute + resources. This API provides stable subnets for your workspace so that you can configure your firewalls on + your Azure Storage accounts to allow access from Databricks. You can also use the API to provision private + endpoints for Databricks to privately connect serverless compute resources to your Azure resources using + Azure Private Link. See [configure serverless secure connectivity]. + + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security""" + + def __init__(self, api_client): + self._api = api_client + + def create_network_connectivity_configuration(self, name: str, + region: str) -> NetworkConnectivityConfiguration: + """Create a network connectivity configuration. + + Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when + accessing your Azure Storage accounts. You can also use a network connectivity configuration to create + Databricks-managed private endpoints so that Databricks serverless compute resources privately access + your resources. + + **IMPORTANT**: After you create the network connectivity configuration, you must assign one or more + workspaces to the new network connectivity configuration. You can share one network connectivity + configuration with multiple workspaces from the same Azure region within the same Databricks account. + See [configure serverless secure connectivity]. + + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + + :param name: str + The name of the network connectivity configuration. The name can contain alphanumeric characters, + hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the + regular expression `^[0-9a-zA-Z-_]{3,30}$`. + :param region: str + The Azure region for this network connectivity configuration. Only workspaces in the same Azure + region can be attached to this network connectivity configuration. + + :returns: :class:`NetworkConnectivityConfiguration` + """ + body = {} + if name is not None: body['name'] = name + if region is not None: body['region'] = region + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + res = self._api.do('POST', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs', + body=body, + headers=headers) + return NetworkConnectivityConfiguration.from_dict(res) + + def create_private_endpoint_rule( + self, network_connectivity_config_id: str, resource_id: str, + group_id: CreatePrivateEndpointRuleRequestGroupId) -> NccAzurePrivateEndpointRule: + """Create a private endpoint rule. + + Create a private endpoint rule for the specified network connectivity config object. Once the object + is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure + resource. + + **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to + complete the connection. To get the information of the private endpoint created, make a `GET` request + on the new private endpoint rule. See [serverless private link]. + + [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link + + :param network_connectivity_config_id: str + Your Network Connectvity Configuration ID. + :param resource_id: str + The Azure resource ID of the target resource. + :param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId` + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`. + + :returns: :class:`NccAzurePrivateEndpointRule` + """ + body = {} + if group_id is not None: body['group_id'] = group_id.value + if resource_id is not None: body['resource_id'] = resource_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + res = self._api.do( + 'POST', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules', + body=body, + headers=headers) + return NccAzurePrivateEndpointRule.from_dict(res) + + def delete_network_connectivity_configuration(self, network_connectivity_config_id: str): + """Delete a network connectivity configuration. + + Deletes a network connectivity configuration. + + :param network_connectivity_config_id: str + Your Network Connectvity Configuration ID. + + + """ + + headers = {'Accept': 'application/json', } + self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}', + headers=headers) + + def delete_private_endpoint_rule(self, network_connectivity_config_id: str, + private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule: + """Delete a private endpoint rule. + + Initiates deleting a private endpoint rule. The private endpoint will be deactivated and will be + purged after seven days of deactivation. When a private endpoint is in deactivated state, + `deactivated` field is set to `true` and the private endpoint is not available to your serverless + compute resources. + + :param network_connectivity_config_id: str + Your Network Connectvity Configuration ID. + :param private_endpoint_rule_id: str + Your private endpoint rule ID. + + :returns: :class:`NccAzurePrivateEndpointRule` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do( + 'DELETE', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}', + headers=headers) + return NccAzurePrivateEndpointRule.from_dict(res) + + def get_network_connectivity_configuration( + self, network_connectivity_config_id: str) -> NetworkConnectivityConfiguration: + """Get a network connectivity configuration. + + Gets a network connectivity configuration. + + :param network_connectivity_config_id: str + Your Network Connectvity Configuration ID. + + :returns: :class:`NetworkConnectivityConfiguration` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}', + headers=headers) + return NetworkConnectivityConfiguration.from_dict(res) + + def get_private_endpoint_rule(self, network_connectivity_config_id: str, + private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule: + """Get a private endpoint rule. + + Gets the private endpoint rule. + + :param network_connectivity_config_id: str + Your Network Connectvity Configuration ID. + :param private_endpoint_rule_id: str + Your private endpoint rule ID. + + :returns: :class:`NccAzurePrivateEndpointRule` + """ + + headers = {'Accept': 'application/json', } + res = self._api.do( + 'GET', + f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}', + headers=headers) + return NccAzurePrivateEndpointRule.from_dict(res) + + class SettingsAPI: """// TODO(yuyuan.tang) to add the description for the setting""" diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 66300c438..cbd5bda82 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1,14 +1,18 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import catalog from databricks.sdk.service import catalog # all definitions in this file are in alphabetical order @@ -1566,18 +1570,18 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I parsed = ListRecipientsResponse.from_dict(json).recipients return parsed if parsed is not None else [] - def rotate_token(self, existing_token_expire_in_seconds: int, name: str) -> RecipientInfo: + def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: """Rotate a token. Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. + :param name: str + The name of the recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error. - :param name: str - The name of the recipient. :returns: :class:`RecipientInfo` """ diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index af3352729..091596352 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1,15 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional - -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') @@ -242,6 +241,7 @@ def from_dict(cls, d: Dict[str, any]) -> 'ChannelInfo': class ChannelName(Enum): + """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' @@ -2579,23 +2579,23 @@ def list(self) -> Iterator['Alert']: return [Alert.from_dict(v) for v in res] def update(self, + alert_id: str, name: str, options: AlertOptions, query_id: str, - alert_id: str, *, rearm: Optional[int] = None): """Update an alert. Updates an alert. + :param alert_id: str :param name: str Name of the alert. :param options: :class:`AlertOptions` Alert configuration options. :param query_id: str Query ID. - :param alert_id: str :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. @@ -2662,21 +2662,21 @@ def delete(self, id: str): self._api.do('DELETE', f'/api/2.0/preview/sql/widgets/{id}', headers=headers) def update(self, + id: str, dashboard_id: str, options: WidgetOptions, width: int, - id: str, *, text: Optional[str] = None, visualization_id: Optional[str] = None) -> Widget: """Update existing widget. + :param id: str :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. :param options: :class:`WidgetOptions` :param width: int Width of a widget - :param id: str :param text: str (optional) If this is a textbox widget, the application displays this text. This field is ignored if the widget contains a visualization in the `visualization` field. diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 693b5314c..cb1d117cc 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -1,11 +1,14 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional - -from ._internal import _enum, _from_dict, _repeated +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated, Wait _LOG = logging.getLogger('databricks.sdk') diff --git a/examples/alerts/create_alerts.py b/examples/alerts/create_alerts.py index 72367ca88..9d828cec6 100755 --- a/examples/alerts/create_alerts.py +++ b/examples/alerts/create_alerts.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/alerts/get_alerts.py b/examples/alerts/get_alerts.py index 3c24e8566..164ebf091 100755 --- a/examples/alerts/get_alerts.py +++ b/examples/alerts/get_alerts.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/alerts/list_alerts.py b/examples/alerts/list_alerts.py index 2009772c5..a09c12336 100755 --- a/examples/alerts/list_alerts.py +++ b/examples/alerts/list_alerts.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/alerts/update_alerts.py b/examples/alerts/update_alerts.py index 130f71913..da5fb974c 100755 --- a/examples/alerts/update_alerts.py +++ b/examples/alerts/update_alerts.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/billable_usage/download_usage_download.py b/examples/billable_usage/download_usage_download.py index aba474963..9dbe74746 100755 --- a/examples/billable_usage/download_usage_download.py +++ b/examples/billable_usage/download_usage_download.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/budgets/create_budgets.py b/examples/budgets/create_budgets.py index 12f20786a..52149c5b0 100755 --- a/examples/budgets/create_budgets.py +++ b/examples/budgets/create_budgets.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import billing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/budgets/get_budgets.py b/examples/budgets/get_budgets.py index 8640fc974..3bd1d6028 100755 --- a/examples/budgets/get_budgets.py +++ b/examples/budgets/get_budgets.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import billing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/budgets/list_budgets.py b/examples/budgets/list_budgets.py index 303690ab7..29d215fbf 100755 --- a/examples/budgets/list_budgets.py +++ b/examples/budgets/list_budgets.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/budgets/update_budgets.py b/examples/budgets/update_budgets.py index 1a0193b1d..a773410d2 100755 --- a/examples/budgets/update_budgets.py +++ b/examples/budgets/update_budgets.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import billing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/catalogs/create_catalog_workspace_bindings.py b/examples/catalogs/create_catalog_workspace_bindings.py index 7d746c0bb..aedc8027e 100755 --- a/examples/catalogs/create_catalog_workspace_bindings.py +++ b/examples/catalogs/create_catalog_workspace_bindings.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/create_catalogs.py b/examples/catalogs/create_catalogs.py index 7d746c0bb..aedc8027e 100755 --- a/examples/catalogs/create_catalogs.py +++ b/examples/catalogs/create_catalogs.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/create_schemas.py b/examples/catalogs/create_schemas.py index f0d29dea8..af4b398ee 100755 --- a/examples/catalogs/create_schemas.py +++ b/examples/catalogs/create_schemas.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/create_shares.py b/examples/catalogs/create_shares.py index 85ef22704..ece59b163 100755 --- a/examples/catalogs/create_shares.py +++ b/examples/catalogs/create_shares.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/create_tables.py b/examples/catalogs/create_tables.py index 85ef22704..ece59b163 100755 --- a/examples/catalogs/create_tables.py +++ b/examples/catalogs/create_tables.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/create_volumes.py b/examples/catalogs/create_volumes.py index 85ef22704..ece59b163 100755 --- a/examples/catalogs/create_volumes.py +++ b/examples/catalogs/create_volumes.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/get_catalogs.py b/examples/catalogs/get_catalogs.py index af9c4f660..16006ae09 100755 --- a/examples/catalogs/get_catalogs.py +++ b/examples/catalogs/get_catalogs.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/list_catalogs.py b/examples/catalogs/list_catalogs.py index e03770a19..092dd6382 100755 --- a/examples/catalogs/list_catalogs.py +++ b/examples/catalogs/list_catalogs.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/update_catalog_workspace_bindings.py b/examples/catalogs/update_catalog_workspace_bindings.py index cafd374a9..b9aac603c 100755 --- a/examples/catalogs/update_catalog_workspace_bindings.py +++ b/examples/catalogs/update_catalog_workspace_bindings.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/catalogs/update_catalogs.py b/examples/catalogs/update_catalogs.py index 6f8e257cd..3f6e0941c 100755 --- a/examples/catalogs/update_catalogs.py +++ b/examples/catalogs/update_catalogs.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/cluster_policies/create_cluster_policies.py b/examples/cluster_policies/create_cluster_policies.py index 53d6ce96f..a6290009a 100755 --- a/examples/cluster_policies/create_cluster_policies.py +++ b/examples/cluster_policies/create_cluster_policies.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/cluster_policies/edit_cluster_policies.py b/examples/cluster_policies/edit_cluster_policies.py index 5c2777e17..429bfe159 100755 --- a/examples/cluster_policies/edit_cluster_policies.py +++ b/examples/cluster_policies/edit_cluster_policies.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/cluster_policies/get_cluster_policies.py b/examples/cluster_policies/get_cluster_policies.py index d93196bef..0eb997d17 100755 --- a/examples/cluster_policies/get_cluster_policies.py +++ b/examples/cluster_policies/get_cluster_policies.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/cluster_policies/list_cluster_policies.py b/examples/cluster_policies/list_cluster_policies.py index e4ebb60b3..916fae823 100755 --- a/examples/cluster_policies/list_cluster_policies.py +++ b/examples/cluster_policies/list_cluster_policies.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/change_owner_clusters_api_integration.py b/examples/clusters/change_owner_clusters_api_integration.py index bea93c1c3..0ef21defa 100755 --- a/examples/clusters/change_owner_clusters_api_integration.py +++ b/examples/clusters/change_owner_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/create_clusters_api_integration.py b/examples/clusters/create_clusters_api_integration.py index 9b6111a5f..868909f3b 100755 --- a/examples/clusters/create_clusters_api_integration.py +++ b/examples/clusters/create_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/delete_clusters_api_integration.py b/examples/clusters/delete_clusters_api_integration.py index 3f61f0fe2..1d57c6a67 100755 --- a/examples/clusters/delete_clusters_api_integration.py +++ b/examples/clusters/delete_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/edit_clusters_api_integration.py b/examples/clusters/edit_clusters_api_integration.py index d58ad65ea..a77c4e37d 100755 --- a/examples/clusters/edit_clusters_api_integration.py +++ b/examples/clusters/edit_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/ensure_cluster_is_running_commands_direct_usage.py b/examples/clusters/ensure_cluster_is_running_commands_direct_usage.py index 4d93f3d3b..14b1fdba3 100755 --- a/examples/clusters/ensure_cluster_is_running_commands_direct_usage.py +++ b/examples/clusters/ensure_cluster_is_running_commands_direct_usage.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/events_clusters_api_integration.py b/examples/clusters/events_clusters_api_integration.py index 971f520ea..4fb3c6690 100755 --- a/examples/clusters/events_clusters_api_integration.py +++ b/examples/clusters/events_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/get_clusters_api_integration.py b/examples/clusters/get_clusters_api_integration.py index 23e833cfa..9b5e3c073 100755 --- a/examples/clusters/get_clusters_api_integration.py +++ b/examples/clusters/get_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/list_clusters_api_integration.py b/examples/clusters/list_clusters_api_integration.py index 8c1992f83..f912dceb2 100755 --- a/examples/clusters/list_clusters_api_integration.py +++ b/examples/clusters/list_clusters_api_integration.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/list_node_types_clusters_api_integration.py b/examples/clusters/list_node_types_clusters_api_integration.py index 3f087c955..528ddfab9 100755 --- a/examples/clusters/list_node_types_clusters_api_integration.py +++ b/examples/clusters/list_node_types_clusters_api_integration.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/pin_clusters_api_integration.py b/examples/clusters/pin_clusters_api_integration.py index b3b57097a..15e856f31 100755 --- a/examples/clusters/pin_clusters_api_integration.py +++ b/examples/clusters/pin_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/resize_clusters_api_integration.py b/examples/clusters/resize_clusters_api_integration.py index 96ca09ad1..9a8f74de9 100755 --- a/examples/clusters/resize_clusters_api_integration.py +++ b/examples/clusters/resize_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/restart_clusters_api_integration.py b/examples/clusters/restart_clusters_api_integration.py index 9ddd8cd2b..9f9681d00 100755 --- a/examples/clusters/restart_clusters_api_integration.py +++ b/examples/clusters/restart_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/select_node_type_instance_pools.py b/examples/clusters/select_node_type_instance_pools.py index 85dc77a82..1fd1f06c7 100755 --- a/examples/clusters/select_node_type_instance_pools.py +++ b/examples/clusters/select_node_type_instance_pools.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/select_spark_version_clusters_api_integration.py b/examples/clusters/select_spark_version_clusters_api_integration.py index 28a16baa4..5de066086 100755 --- a/examples/clusters/select_spark_version_clusters_api_integration.py +++ b/examples/clusters/select_spark_version_clusters_api_integration.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/start_clusters_api_integration.py b/examples/clusters/start_clusters_api_integration.py index 5870b68a0..aee621eec 100755 --- a/examples/clusters/start_clusters_api_integration.py +++ b/examples/clusters/start_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/clusters/unpin_clusters_api_integration.py b/examples/clusters/unpin_clusters_api_integration.py index d671a0e53..b2646933f 100755 --- a/examples/clusters/unpin_clusters_api_integration.py +++ b/examples/clusters/unpin_clusters_api_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/command_context/execute_commands.py b/examples/command_context/execute_commands.py index 7c7c45ae4..c3717e8ec 100755 --- a/examples/command_context/execute_commands.py +++ b/examples/command_context/execute_commands.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/command_execution/create_commands_direct_usage.py b/examples/command_execution/create_commands_direct_usage.py index fc61e55f3..051ce0105 100755 --- a/examples/command_execution/create_commands_direct_usage.py +++ b/examples/command_execution/create_commands_direct_usage.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/command_execution/execute_commands_direct_usage.py b/examples/command_execution/execute_commands_direct_usage.py index 98fa13a19..1d4d6e6a0 100755 --- a/examples/command_execution/execute_commands_direct_usage.py +++ b/examples/command_execution/execute_commands_direct_usage.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/command_execution/start_commands.py b/examples/command_execution/start_commands.py index 9901a2f2c..44851bd6b 100755 --- a/examples/command_execution/start_commands.py +++ b/examples/command_execution/start_commands.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/connections/create_connections.py b/examples/connections/create_connections.py index 1bd9414aa..fc6cdf398 100755 --- a/examples/connections/create_connections.py +++ b/examples/connections/create_connections.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/connections/get_connections.py b/examples/connections/get_connections.py index 4ab7b0108..a783fc024 100755 --- a/examples/connections/get_connections.py +++ b/examples/connections/get_connections.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/connections/list_connections.py b/examples/connections/list_connections.py index 1ffe401a5..96e5cf0e4 100755 --- a/examples/connections/list_connections.py +++ b/examples/connections/list_connections.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/connections/update_connections.py b/examples/connections/update_connections.py index 32d96ee4d..57b532d19 100755 --- a/examples/connections/update_connections.py +++ b/examples/connections/update_connections.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/credentials/create_credentials.py b/examples/credentials/create_credentials.py index 9885467c5..424be9133 100755 --- a/examples/credentials/create_credentials.py +++ b/examples/credentials/create_credentials.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/credentials/create_log_delivery.py b/examples/credentials/create_log_delivery.py index 28b521cd6..b46f2a58a 100755 --- a/examples/credentials/create_log_delivery.py +++ b/examples/credentials/create_log_delivery.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/credentials/create_workspaces.py b/examples/credentials/create_workspaces.py index 9885467c5..424be9133 100755 --- a/examples/credentials/create_workspaces.py +++ b/examples/credentials/create_workspaces.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/credentials/get_credentials.py b/examples/credentials/get_credentials.py index 847fc70ba..e865318bd 100755 --- a/examples/credentials/get_credentials.py +++ b/examples/credentials/get_credentials.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/credentials/list_credentials.py b/examples/credentials/list_credentials.py index fb9e82011..3a5a83aaa 100755 --- a/examples/credentials/list_credentials.py +++ b/examples/credentials/list_credentials.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/current_user/me_current_user.py b/examples/current_user/me_current_user.py index 19c07fc8d..ab32ed668 100755 --- a/examples/current_user/me_current_user.py +++ b/examples/current_user/me_current_user.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/current_user/me_tokens.py b/examples/current_user/me_tokens.py index 5619145a3..6d7a7b892 100755 --- a/examples/current_user/me_tokens.py +++ b/examples/current_user/me_tokens.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/dashboards/create_dashboards.py b/examples/dashboards/create_dashboards.py index 9ac5670a0..db193d8e2 100755 --- a/examples/dashboards/create_dashboards.py +++ b/examples/dashboards/create_dashboards.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/dashboards/delete_dashboards.py b/examples/dashboards/delete_dashboards.py index 8c5806ec6..31338f726 100755 --- a/examples/dashboards/delete_dashboards.py +++ b/examples/dashboards/delete_dashboards.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/dashboards/get_dashboards.py b/examples/dashboards/get_dashboards.py index 3aa5ae55f..80eddb5e9 100755 --- a/examples/dashboards/get_dashboards.py +++ b/examples/dashboards/get_dashboards.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/dashboards/list_dashboards.py b/examples/dashboards/list_dashboards.py index 10b1c0f34..b4d0e2101 100755 --- a/examples/dashboards/list_dashboards.py +++ b/examples/dashboards/list_dashboards.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/dashboards/restore_dashboards.py b/examples/dashboards/restore_dashboards.py index 7a34049d4..7adf5eb26 100755 --- a/examples/dashboards/restore_dashboards.py +++ b/examples/dashboards/restore_dashboards.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/data_sources/list_alerts.py b/examples/data_sources/list_alerts.py index 24bb54509..d79ca4723 100755 --- a/examples/data_sources/list_alerts.py +++ b/examples/data_sources/list_alerts.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/data_sources/list_queries.py b/examples/data_sources/list_queries.py index 24bb54509..d79ca4723 100755 --- a/examples/data_sources/list_queries.py +++ b/examples/data_sources/list_queries.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/databricks/must_tokens.py b/examples/databricks/must_tokens.py index 3c1c6c6d8..cd2d59868 100755 --- a/examples/databricks/must_tokens.py +++ b/examples/databricks/must_tokens.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/encryption_keys/create_encryption_keys.py b/examples/encryption_keys/create_encryption_keys.py index 83201e135..c1ff6e4af 100755 --- a/examples/encryption_keys/create_encryption_keys.py +++ b/examples/encryption_keys/create_encryption_keys.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/encryption_keys/get_encryption_keys.py b/examples/encryption_keys/get_encryption_keys.py index 9b325dc2c..2b7cc03d2 100755 --- a/examples/encryption_keys/get_encryption_keys.py +++ b/examples/encryption_keys/get_encryption_keys.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/encryption_keys/list_encryption_keys.py b/examples/encryption_keys/list_encryption_keys.py index a1dc5094c..59d4f5de4 100755 --- a/examples/encryption_keys/list_encryption_keys.py +++ b/examples/encryption_keys/list_encryption_keys.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/experiments/create_experiment_experiments.py b/examples/experiments/create_experiment_experiments.py index 99ca7488d..aef219949 100755 --- a/examples/experiments/create_experiment_experiments.py +++ b/examples/experiments/create_experiment_experiments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/create_experiment_m_lflow_runs.py b/examples/experiments/create_experiment_m_lflow_runs.py index 99ca7488d..aef219949 100755 --- a/examples/experiments/create_experiment_m_lflow_runs.py +++ b/examples/experiments/create_experiment_m_lflow_runs.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/create_run_m_lflow_runs.py b/examples/experiments/create_run_m_lflow_runs.py index 2351d4b58..ce82335fa 100755 --- a/examples/experiments/create_run_m_lflow_runs.py +++ b/examples/experiments/create_run_m_lflow_runs.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/get_experiment_experiments.py b/examples/experiments/get_experiment_experiments.py index a007e99a9..5ad560fd1 100755 --- a/examples/experiments/get_experiment_experiments.py +++ b/examples/experiments/get_experiment_experiments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/list_experiments_experiments.py b/examples/experiments/list_experiments_experiments.py index e21d1913a..5af630c95 100755 --- a/examples/experiments/list_experiments_experiments.py +++ b/examples/experiments/list_experiments_experiments.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/update_experiment_experiments.py b/examples/experiments/update_experiment_experiments.py index 9eba74f56..6c123fe21 100755 --- a/examples/experiments/update_experiment_experiments.py +++ b/examples/experiments/update_experiment_experiments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/experiments/update_run_m_lflow_runs.py b/examples/experiments/update_run_m_lflow_runs.py index aacdd498f..f85e19d06 100755 --- a/examples/experiments/update_run_m_lflow_runs.py +++ b/examples/experiments/update_run_m_lflow_runs.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/external_locations/create_external_locations_on_aws.py b/examples/external_locations/create_external_locations_on_aws.py index f136e3055..c3ddfd1a8 100755 --- a/examples/external_locations/create_external_locations_on_aws.py +++ b/examples/external_locations/create_external_locations_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/external_locations/create_volumes.py b/examples/external_locations/create_volumes.py index 6e0027e3c..1fa40fe5f 100755 --- a/examples/external_locations/create_volumes.py +++ b/examples/external_locations/create_volumes.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/external_locations/get_external_locations_on_aws.py b/examples/external_locations/get_external_locations_on_aws.py index 8189fc0df..551545f92 100755 --- a/examples/external_locations/get_external_locations_on_aws.py +++ b/examples/external_locations/get_external_locations_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/external_locations/list_external_locations_on_aws.py b/examples/external_locations/list_external_locations_on_aws.py index d847088b9..e2d552f73 100755 --- a/examples/external_locations/list_external_locations_on_aws.py +++ b/examples/external_locations/list_external_locations_on_aws.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/external_locations/update_external_locations_on_aws.py b/examples/external_locations/update_external_locations_on_aws.py index 8d0e8e607..355ffc28e 100755 --- a/examples/external_locations/update_external_locations_on_aws.py +++ b/examples/external_locations/update_external_locations_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/git_credentials/create_git_credentials.py b/examples/git_credentials/create_git_credentials.py index d505d22ac..235ac65af 100755 --- a/examples/git_credentials/create_git_credentials.py +++ b/examples/git_credentials/create_git_credentials.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/git_credentials/get_git_credentials.py b/examples/git_credentials/get_git_credentials.py index a42965b84..b97a58a4a 100755 --- a/examples/git_credentials/get_git_credentials.py +++ b/examples/git_credentials/get_git_credentials.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/git_credentials/list_git_credentials.py b/examples/git_credentials/list_git_credentials.py index a00e28b53..c6033d6e4 100755 --- a/examples/git_credentials/list_git_credentials.py +++ b/examples/git_credentials/list_git_credentials.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/git_credentials/update_git_credentials.py b/examples/git_credentials/update_git_credentials.py index c877fefcb..6c294bb49 100755 --- a/examples/git_credentials/update_git_credentials.py +++ b/examples/git_credentials/update_git_credentials.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/global_init_scripts/create_global_init_scripts.py b/examples/global_init_scripts/create_global_init_scripts.py index 6058bbf59..46915c517 100755 --- a/examples/global_init_scripts/create_global_init_scripts.py +++ b/examples/global_init_scripts/create_global_init_scripts.py @@ -1,7 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/global_init_scripts/get_global_init_scripts.py b/examples/global_init_scripts/get_global_init_scripts.py index fe12d41b8..2d1f924e0 100755 --- a/examples/global_init_scripts/get_global_init_scripts.py +++ b/examples/global_init_scripts/get_global_init_scripts.py @@ -1,7 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/global_init_scripts/list_global_init_scripts.py b/examples/global_init_scripts/list_global_init_scripts.py index 391abd8e4..c153bb10f 100755 --- a/examples/global_init_scripts/list_global_init_scripts.py +++ b/examples/global_init_scripts/list_global_init_scripts.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/global_init_scripts/update_global_init_scripts.py b/examples/global_init_scripts/update_global_init_scripts.py index a53e410a2..094c8b36f 100755 --- a/examples/global_init_scripts/update_global_init_scripts.py +++ b/examples/global_init_scripts/update_global_init_scripts.py @@ -1,7 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/grants/get_effective_tables.py b/examples/grants/get_effective_tables.py index b7958d718..4c6dadd4a 100755 --- a/examples/grants/get_effective_tables.py +++ b/examples/grants/get_effective_tables.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/grants/update_tables.py b/examples/grants/update_tables.py index 3bba5dc63..7211fa737 100755 --- a/examples/grants/update_tables.py +++ b/examples/grants/update_tables.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/create_generic_permissions.py b/examples/groups/create_generic_permissions.py index 1ba53cbb1..2a4bdd129 100755 --- a/examples/groups/create_generic_permissions.py +++ b/examples/groups/create_generic_permissions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/create_groups.py b/examples/groups/create_groups.py index 1ba53cbb1..2a4bdd129 100755 --- a/examples/groups/create_groups.py +++ b/examples/groups/create_groups.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/create_secrets.py b/examples/groups/create_secrets.py index 1ba53cbb1..2a4bdd129 100755 --- a/examples/groups/create_secrets.py +++ b/examples/groups/create_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/delete_generic_permissions.py b/examples/groups/delete_generic_permissions.py index 261d5c772..43acb248c 100755 --- a/examples/groups/delete_generic_permissions.py +++ b/examples/groups/delete_generic_permissions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/delete_groups.py b/examples/groups/delete_groups.py index 0033bdbb6..ccc55c057 100755 --- a/examples/groups/delete_groups.py +++ b/examples/groups/delete_groups.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/delete_secrets.py b/examples/groups/delete_secrets.py index 261d5c772..43acb248c 100755 --- a/examples/groups/delete_secrets.py +++ b/examples/groups/delete_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/groups/get_groups.py b/examples/groups/get_groups.py index e6d176a8a..f0c75afa8 100755 --- a/examples/groups/get_groups.py +++ b/examples/groups/get_groups.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_pools/create_instance_pools.py b/examples/instance_pools/create_instance_pools.py index de027a221..946313d64 100755 --- a/examples/instance_pools/create_instance_pools.py +++ b/examples/instance_pools/create_instance_pools.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_pools/edit_instance_pools.py b/examples/instance_pools/edit_instance_pools.py index 69a8186e4..2047ce709 100755 --- a/examples/instance_pools/edit_instance_pools.py +++ b/examples/instance_pools/edit_instance_pools.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_pools/get_instance_pools.py b/examples/instance_pools/get_instance_pools.py index 4eeb201b9..c7edb415e 100755 --- a/examples/instance_pools/get_instance_pools.py +++ b/examples/instance_pools/get_instance_pools.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_pools/list_instance_pools.py b/examples/instance_pools/list_instance_pools.py index 285168325..7813286a7 100755 --- a/examples/instance_pools/list_instance_pools.py +++ b/examples/instance_pools/list_instance_pools.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_profiles/add_aws_instance_profiles.py b/examples/instance_profiles/add_aws_instance_profiles.py index b090eb178..87dccb22c 100755 --- a/examples/instance_profiles/add_aws_instance_profiles.py +++ b/examples/instance_profiles/add_aws_instance_profiles.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_profiles/edit_aws_instance_profiles.py b/examples/instance_profiles/edit_aws_instance_profiles.py index bc1e798b0..42b137174 100755 --- a/examples/instance_profiles/edit_aws_instance_profiles.py +++ b/examples/instance_profiles/edit_aws_instance_profiles.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/instance_profiles/list_aws_instance_profiles.py b/examples/instance_profiles/list_aws_instance_profiles.py index c9fe5261a..0a042536a 100755 --- a/examples/instance_profiles/list_aws_instance_profiles.py +++ b/examples/instance_profiles/list_aws_instance_profiles.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/io/read_usage_download.py b/examples/io/read_usage_download.py index d52b31b0b..19af1e8b3 100755 --- a/examples/io/read_usage_download.py +++ b/examples/io/read_usage_download.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/ip_access_lists/create_ip_access_lists.py b/examples/ip_access_lists/create_ip_access_lists.py index a52b80f92..bce47d446 100755 --- a/examples/ip_access_lists/create_ip_access_lists.py +++ b/examples/ip_access_lists/create_ip_access_lists.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import settings +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/ip_access_lists/get_ip_access_lists.py b/examples/ip_access_lists/get_ip_access_lists.py index 85d7929e3..73b91ac7d 100755 --- a/examples/ip_access_lists/get_ip_access_lists.py +++ b/examples/ip_access_lists/get_ip_access_lists.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import settings +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/ip_access_lists/list_ip_access_lists.py b/examples/ip_access_lists/list_ip_access_lists.py index f9fd78676..e45ece6c8 100755 --- a/examples/ip_access_lists/list_ip_access_lists.py +++ b/examples/ip_access_lists/list_ip_access_lists.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/ip_access_lists/replace_ip_access_lists.py b/examples/ip_access_lists/replace_ip_access_lists.py index a61f5f81b..c5aa21c54 100755 --- a/examples/ip_access_lists/replace_ip_access_lists.py +++ b/examples/ip_access_lists/replace_ip_access_lists.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import settings +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/cancel_all_runs_jobs_api_full_integration.py b/examples/jobs/cancel_all_runs_jobs_api_full_integration.py index 260f6f882..f08a19608 100755 --- a/examples/jobs/cancel_all_runs_jobs_api_full_integration.py +++ b/examples/jobs/cancel_all_runs_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/cancel_run_jobs_api_full_integration.py b/examples/jobs/cancel_run_jobs_api_full_integration.py index 9cb4f75cd..7ac8b0499 100755 --- a/examples/jobs/cancel_run_jobs_api_full_integration.py +++ b/examples/jobs/cancel_run_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/create_jobs_api_full_integration.py b/examples/jobs/create_jobs_api_full_integration.py index 1f5082886..c55b44bd9 100755 --- a/examples/jobs/create_jobs_api_full_integration.py +++ b/examples/jobs/create_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/export_run_jobs_api_full_integration.py b/examples/jobs/export_run_jobs_api_full_integration.py index 613961199..e2f3c4d4d 100755 --- a/examples/jobs/export_run_jobs_api_full_integration.py +++ b/examples/jobs/export_run_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/get_jobs_api_full_integration.py b/examples/jobs/get_jobs_api_full_integration.py index 3a30f1794..4008d3779 100755 --- a/examples/jobs/get_jobs_api_full_integration.py +++ b/examples/jobs/get_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/get_run_output_jobs_api_full_integration.py b/examples/jobs/get_run_output_jobs_api_full_integration.py index 804577415..9602865f2 100755 --- a/examples/jobs/get_run_output_jobs_api_full_integration.py +++ b/examples/jobs/get_run_output_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/list_jobs_api_full_integration.py b/examples/jobs/list_jobs_api_full_integration.py index 56f1cc7e6..7998ea178 100755 --- a/examples/jobs/list_jobs_api_full_integration.py +++ b/examples/jobs/list_jobs_api_full_integration.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/list_runs_jobs_api_full_integration.py b/examples/jobs/list_runs_jobs_api_full_integration.py index f863309d4..8615f208a 100755 --- a/examples/jobs/list_runs_jobs_api_full_integration.py +++ b/examples/jobs/list_runs_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/repair_run_jobs_api_full_integration.py b/examples/jobs/repair_run_jobs_api_full_integration.py index b90cbc1d8..0cb12661a 100755 --- a/examples/jobs/repair_run_jobs_api_full_integration.py +++ b/examples/jobs/repair_run_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/reset_jobs_api_full_integration.py b/examples/jobs/reset_jobs_api_full_integration.py index 3215fe6fd..926f6329d 100755 --- a/examples/jobs/reset_jobs_api_full_integration.py +++ b/examples/jobs/reset_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/run_now_jobs_api_full_integration.py b/examples/jobs/run_now_jobs_api_full_integration.py index 3896990b6..e9c6413c8 100755 --- a/examples/jobs/run_now_jobs_api_full_integration.py +++ b/examples/jobs/run_now_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/submit_jobs_api_full_integration.py b/examples/jobs/submit_jobs_api_full_integration.py index 5769d7675..fddb28a1f 100755 --- a/examples/jobs/submit_jobs_api_full_integration.py +++ b/examples/jobs/submit_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/jobs/update_jobs_api_full_integration.py b/examples/jobs/update_jobs_api_full_integration.py index 61194b8e8..c74b9d8da 100755 --- a/examples/jobs/update_jobs_api_full_integration.py +++ b/examples/jobs/update_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/libraries/update_libraries.py b/examples/libraries/update_libraries.py index d741ee054..1d00e2c1c 100755 --- a/examples/libraries/update_libraries.py +++ b/examples/libraries/update_libraries.py @@ -1,7 +1,6 @@ -import os - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/log_delivery/create_log_delivery.py b/examples/log_delivery/create_log_delivery.py index 36edc03a1..cf29955f2 100755 --- a/examples/log_delivery/create_log_delivery.py +++ b/examples/log_delivery/create_log_delivery.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import billing, provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/log_delivery/get_log_delivery.py b/examples/log_delivery/get_log_delivery.py index af3f61927..34cf0a02b 100755 --- a/examples/log_delivery/get_log_delivery.py +++ b/examples/log_delivery/get_log_delivery.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import billing, provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/log_delivery/list_log_delivery.py b/examples/log_delivery/list_log_delivery.py index b992f89d8..ac69cd5f9 100755 --- a/examples/log_delivery/list_log_delivery.py +++ b/examples/log_delivery/list_log_delivery.py @@ -1,5 +1,6 @@ from databricks.sdk import AccountClient -from databricks.sdk.service import billing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/metastore_assignments/list_metastore_assignments.py b/examples/metastore_assignments/list_metastore_assignments.py index a1ce4418b..0e967ef91 100755 --- a/examples/metastore_assignments/list_metastore_assignments.py +++ b/examples/metastore_assignments/list_metastore_assignments.py @@ -1,6 +1,6 @@ -import os - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/metastores/assign_metastores.py b/examples/metastores/assign_metastores.py index 5bdb740f7..9ee60274b 100755 --- a/examples/metastores/assign_metastores.py +++ b/examples/metastores/assign_metastores.py @@ -1,11 +1,10 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() -workspace_id = os.environ["TEST_WORKSPACE_ID"] +workspace_id = os.environ["DUMMY_WORKSPACE_ID"] created = w.metastores.create(name=f'sdk-{time.time_ns()}', storage_root="s3://%s/%s" % diff --git a/examples/metastores/create_metastores.py b/examples/metastores/create_metastores.py index bce183b4b..18d81fac6 100755 --- a/examples/metastores/create_metastores.py +++ b/examples/metastores/create_metastores.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/current_metastores.py b/examples/metastores/current_metastores.py index 3749e9174..b9470bfb0 100755 --- a/examples/metastores/current_metastores.py +++ b/examples/metastores/current_metastores.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/enable_optimization_metastores.py b/examples/metastores/enable_optimization_metastores.py index 8d3d4cd08..a367a55ff 100755 --- a/examples/metastores/enable_optimization_metastores.py +++ b/examples/metastores/enable_optimization_metastores.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/get_metastores.py b/examples/metastores/get_metastores.py index 38ce05c0a..a54b0aaa1 100755 --- a/examples/metastores/get_metastores.py +++ b/examples/metastores/get_metastores.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/list_metastores.py b/examples/metastores/list_metastores.py index c62d21863..3498fded5 100755 --- a/examples/metastores/list_metastores.py +++ b/examples/metastores/list_metastores.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/summary_metastores.py b/examples/metastores/summary_metastores.py index a89ed46ac..4d8f73148 100755 --- a/examples/metastores/summary_metastores.py +++ b/examples/metastores/summary_metastores.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/metastores/unassign_metastores.py b/examples/metastores/unassign_metastores.py index 146bd75d2..1b9acbca4 100755 --- a/examples/metastores/unassign_metastores.py +++ b/examples/metastores/unassign_metastores.py @@ -1,11 +1,10 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() -workspace_id = os.environ["TEST_WORKSPACE_ID"] +workspace_id = os.environ["DUMMY_WORKSPACE_ID"] created = w.metastores.create(name=f'sdk-{time.time_ns()}', storage_root="s3://%s/%s" % diff --git a/examples/metastores/update_metastores.py b/examples/metastores/update_metastores.py index 8ac70371c..1d59dcc76 100755 --- a/examples/metastores/update_metastores.py +++ b/examples/metastores/update_metastores.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_comment_model_version_comments.py b/examples/model_registry/create_comment_model_version_comments.py index 8bb5decd4..c3ad94827 100755 --- a/examples/model_registry/create_comment_model_version_comments.py +++ b/examples/model_registry/create_comment_model_version_comments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_model_model_version_comments.py b/examples/model_registry/create_model_model_version_comments.py index f09c8691f..437e177fd 100755 --- a/examples/model_registry/create_model_model_version_comments.py +++ b/examples/model_registry/create_model_model_version_comments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_model_model_versions.py b/examples/model_registry/create_model_model_versions.py index f09c8691f..437e177fd 100755 --- a/examples/model_registry/create_model_model_versions.py +++ b/examples/model_registry/create_model_model_versions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_model_models.py b/examples/model_registry/create_model_models.py index eb1316e0f..b779f02e0 100755 --- a/examples/model_registry/create_model_models.py +++ b/examples/model_registry/create_model_models.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_model_version_model_version_comments.py b/examples/model_registry/create_model_version_model_version_comments.py index e7cf59c58..25cb05ff4 100755 --- a/examples/model_registry/create_model_version_model_version_comments.py +++ b/examples/model_registry/create_model_version_model_version_comments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_model_version_model_versions.py b/examples/model_registry/create_model_version_model_versions.py index 83ae3ee5d..d0e2e53df 100755 --- a/examples/model_registry/create_model_version_model_versions.py +++ b/examples/model_registry/create_model_version_model_versions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/create_webhook_registry_webhooks.py b/examples/model_registry/create_webhook_registry_webhooks.py index c38600b55..7a035dc0e 100755 --- a/examples/model_registry/create_webhook_registry_webhooks.py +++ b/examples/model_registry/create_webhook_registry_webhooks.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/get_model_models.py b/examples/model_registry/get_model_models.py index 0393ed51b..eeac4d307 100755 --- a/examples/model_registry/get_model_models.py +++ b/examples/model_registry/get_model_models.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/list_models_models.py b/examples/model_registry/list_models_models.py index 0ddfd7bfa..2b0f8b9c7 100755 --- a/examples/model_registry/list_models_models.py +++ b/examples/model_registry/list_models_models.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/list_webhooks_registry_webhooks.py b/examples/model_registry/list_webhooks_registry_webhooks.py index cc8559fc7..456b54022 100755 --- a/examples/model_registry/list_webhooks_registry_webhooks.py +++ b/examples/model_registry/list_webhooks_registry_webhooks.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/update_comment_model_version_comments.py b/examples/model_registry/update_comment_model_version_comments.py index 6e10540e4..c2ff8e657 100755 --- a/examples/model_registry/update_comment_model_version_comments.py +++ b/examples/model_registry/update_comment_model_version_comments.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/update_model_models.py b/examples/model_registry/update_model_models.py index 5a0d8618c..f01c5f07b 100755 --- a/examples/model_registry/update_model_models.py +++ b/examples/model_registry/update_model_models.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/update_model_version_model_versions.py b/examples/model_registry/update_model_version_model_versions.py index 18050c180..44c30400c 100755 --- a/examples/model_registry/update_model_version_model_versions.py +++ b/examples/model_registry/update_model_version_model_versions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/model_registry/update_webhook_registry_webhooks.py b/examples/model_registry/update_webhook_registry_webhooks.py index 638582ef7..fcf0c80cb 100755 --- a/examples/model_registry/update_webhook_registry_webhooks.py +++ b/examples/model_registry/update_webhook_registry_webhooks.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import ml +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/networks/create_networks.py b/examples/networks/create_networks.py index e0f1a984a..df9d254ee 100755 --- a/examples/networks/create_networks.py +++ b/examples/networks/create_networks.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/networks/get_networks.py b/examples/networks/get_networks.py index 4ee37c814..da2d6c9f2 100755 --- a/examples/networks/get_networks.py +++ b/examples/networks/get_networks.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/networks/list_networks.py b/examples/networks/list_networks.py index b8705727f..6c3331c6a 100755 --- a/examples/networks/list_networks.py +++ b/examples/networks/list_networks.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/permissions/get_generic_permissions.py b/examples/permissions/get_generic_permissions.py index c103bbdde..2c756241d 100755 --- a/examples/permissions/get_generic_permissions.py +++ b/examples/permissions/get_generic_permissions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/permissions/get_permission_levels_generic_permissions.py b/examples/permissions/get_permission_levels_generic_permissions.py index 4dcd5bd25..39f127c9a 100755 --- a/examples/permissions/get_permission_levels_generic_permissions.py +++ b/examples/permissions/get_permission_levels_generic_permissions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/permissions/set_generic_permissions.py b/examples/permissions/set_generic_permissions.py index 6e42b8183..0ef296e28 100755 --- a/examples/permissions/set_generic_permissions.py +++ b/examples/permissions/set_generic_permissions.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/pipelines/create_pipelines.py b/examples/pipelines/create_pipelines.py index 5a2b3933e..0e21110f2 100755 --- a/examples/pipelines/create_pipelines.py +++ b/examples/pipelines/create_pipelines.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import pipelines +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/pipelines/get_pipelines.py b/examples/pipelines/get_pipelines.py index 6222b4c86..512e2f39a 100755 --- a/examples/pipelines/get_pipelines.py +++ b/examples/pipelines/get_pipelines.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import pipelines +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/pipelines/list_pipeline_events_pipelines.py b/examples/pipelines/list_pipeline_events_pipelines.py index c4150187c..97b7fa5c3 100755 --- a/examples/pipelines/list_pipeline_events_pipelines.py +++ b/examples/pipelines/list_pipeline_events_pipelines.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import pipelines +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/pipelines/list_pipelines_pipelines.py b/examples/pipelines/list_pipelines_pipelines.py index 70d8c8b52..155a9f4fa 100755 --- a/examples/pipelines/list_pipelines_pipelines.py +++ b/examples/pipelines/list_pipelines_pipelines.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import pipelines +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/pipelines/update_pipelines.py b/examples/pipelines/update_pipelines.py index 656d90071..d9fc1d464 100755 --- a/examples/pipelines/update_pipelines.py +++ b/examples/pipelines/update_pipelines.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import pipelines +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/policy_families/get_cluster_policy_families.py b/examples/policy_families/get_cluster_policy_families.py index 6927b1358..b637d8286 100755 --- a/examples/policy_families/get_cluster_policy_families.py +++ b/examples/policy_families/get_cluster_policy_families.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/policy_families/list_cluster_policy_families.py b/examples/policy_families/list_cluster_policy_families.py index caf9ff71f..22089e3eb 100755 --- a/examples/policy_families/list_cluster_policy_families.py +++ b/examples/policy_families/list_cluster_policy_families.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import compute +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/private_access/create_private_access.py b/examples/private_access/create_private_access.py index 5cf99643c..8418ebad2 100755 --- a/examples/private_access/create_private_access.py +++ b/examples/private_access/create_private_access.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/private_access/get_private_access.py b/examples/private_access/get_private_access.py index 2c4eed208..353351458 100755 --- a/examples/private_access/get_private_access.py +++ b/examples/private_access/get_private_access.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/private_access/list_private_access.py b/examples/private_access/list_private_access.py index 834257fe3..9f27aa332 100755 --- a/examples/private_access/list_private_access.py +++ b/examples/private_access/list_private_access.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/private_access/replace_private_access.py b/examples/private_access/replace_private_access.py index 8cb6f2faf..e65913703 100755 --- a/examples/private_access/replace_private_access.py +++ b/examples/private_access/replace_private_access.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/providers/create_providers.py b/examples/providers/create_providers.py index cee06e5d2..b170b2111 100755 --- a/examples/providers/create_providers.py +++ b/examples/providers/create_providers.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/providers/get_providers.py b/examples/providers/get_providers.py index ba86bc5ea..7c5891ae6 100755 --- a/examples/providers/get_providers.py +++ b/examples/providers/get_providers.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/providers/list_providers.py b/examples/providers/list_providers.py index 92be410ac..db867f873 100755 --- a/examples/providers/list_providers.py +++ b/examples/providers/list_providers.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sharing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/providers/list_shares_providers.py b/examples/providers/list_shares_providers.py index b2e885938..8bfb61c61 100755 --- a/examples/providers/list_shares_providers.py +++ b/examples/providers/list_shares_providers.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/providers/update_providers.py b/examples/providers/update_providers.py index 66b68c35c..eae7fdfbd 100755 --- a/examples/providers/update_providers.py +++ b/examples/providers/update_providers.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/queries/create_alerts.py b/examples/queries/create_alerts.py index 37d71ac60..a5b86d8bf 100755 --- a/examples/queries/create_alerts.py +++ b/examples/queries/create_alerts.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/queries/create_queries.py b/examples/queries/create_queries.py index c8d5ac93d..1eaf70eba 100755 --- a/examples/queries/create_queries.py +++ b/examples/queries/create_queries.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/queries/get_queries.py b/examples/queries/get_queries.py index d29b75982..96a0bcb9d 100755 --- a/examples/queries/get_queries.py +++ b/examples/queries/get_queries.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/queries/update_queries.py b/examples/queries/update_queries.py index 85a9609ad..5e2b8edeb 100755 --- a/examples/queries/update_queries.py +++ b/examples/queries/update_queries.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/query_history/list_sql_query_history.py b/examples/query_history/list_sql_query_history.py index 672b46ed5..9f017b23b 100755 --- a/examples/query_history/list_sql_query_history.py +++ b/examples/query_history/list_sql_query_history.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/create_recipients.py b/examples/recipients/create_recipients.py index 4c01e2f5e..f73043158 100755 --- a/examples/recipients/create_recipients.py +++ b/examples/recipients/create_recipients.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/get_recipients.py b/examples/recipients/get_recipients.py index 4e8998e41..e4a095fd1 100755 --- a/examples/recipients/get_recipients.py +++ b/examples/recipients/get_recipients.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/list_recipients.py b/examples/recipients/list_recipients.py index 2a6500882..bfebd066f 100755 --- a/examples/recipients/list_recipients.py +++ b/examples/recipients/list_recipients.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sharing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/rotate_token_recipients.py b/examples/recipients/rotate_token_recipients.py index 5abc2af9e..998a75afb 100755 --- a/examples/recipients/rotate_token_recipients.py +++ b/examples/recipients/rotate_token_recipients.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/share_permissions_recipients.py b/examples/recipients/share_permissions_recipients.py index 1f04c0321..125d1087d 100755 --- a/examples/recipients/share_permissions_recipients.py +++ b/examples/recipients/share_permissions_recipients.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/recipients/update_recipients.py b/examples/recipients/update_recipients.py index 959266dde..a7ea029d9 100755 --- a/examples/recipients/update_recipients.py +++ b/examples/recipients/update_recipients.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/repos/create_repos.py b/examples/repos/create_repos.py index b3a100949..022dd7965 100755 --- a/examples/repos/create_repos.py +++ b/examples/repos/create_repos.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/repos/get_repos.py b/examples/repos/get_repos.py index fcb6c69c6..7123be626 100755 --- a/examples/repos/get_repos.py +++ b/examples/repos/get_repos.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/repos/list_repos.py b/examples/repos/list_repos.py index 62354a560..fab4f59c5 100755 --- a/examples/repos/list_repos.py +++ b/examples/repos/list_repos.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/repos/update_repos.py b/examples/repos/update_repos.py index afb94375a..16ee494f2 100755 --- a/examples/repos/update_repos.py +++ b/examples/repos/update_repos.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/create_schemas.py b/examples/schemas/create_schemas.py index 7bc3bb19f..5ff9b4849 100755 --- a/examples/schemas/create_schemas.py +++ b/examples/schemas/create_schemas.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/create_shares.py b/examples/schemas/create_shares.py index 88dfa1bcc..2f4ca7e7d 100755 --- a/examples/schemas/create_shares.py +++ b/examples/schemas/create_shares.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/create_tables.py b/examples/schemas/create_tables.py index 88dfa1bcc..2f4ca7e7d 100755 --- a/examples/schemas/create_tables.py +++ b/examples/schemas/create_tables.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/create_volumes.py b/examples/schemas/create_volumes.py index 88dfa1bcc..2f4ca7e7d 100755 --- a/examples/schemas/create_volumes.py +++ b/examples/schemas/create_volumes.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/get_schemas.py b/examples/schemas/get_schemas.py index 2b9c0a582..2240e6b32 100755 --- a/examples/schemas/get_schemas.py +++ b/examples/schemas/get_schemas.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/list_schemas.py b/examples/schemas/list_schemas.py index 7939a4411..d21856426 100755 --- a/examples/schemas/list_schemas.py +++ b/examples/schemas/list_schemas.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/schemas/update_schemas.py b/examples/schemas/update_schemas.py index b95a7b1ce..e68809d4f 100755 --- a/examples/schemas/update_schemas.py +++ b/examples/schemas/update_schemas.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/create_scope_secrets.py b/examples/secrets/create_scope_secrets.py index f881d0111..20cd8cf65 100755 --- a/examples/secrets/create_scope_secrets.py +++ b/examples/secrets/create_scope_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/list_acls_secrets.py b/examples/secrets/list_acls_secrets.py index 97873f187..1d1fde7ec 100755 --- a/examples/secrets/list_acls_secrets.py +++ b/examples/secrets/list_acls_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/list_scopes_secrets.py b/examples/secrets/list_scopes_secrets.py index 2265ba610..b6e820caf 100755 --- a/examples/secrets/list_scopes_secrets.py +++ b/examples/secrets/list_scopes_secrets.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/list_secrets_secrets.py b/examples/secrets/list_secrets_secrets.py index 29acb4d1c..e515b4dfa 100755 --- a/examples/secrets/list_secrets_secrets.py +++ b/examples/secrets/list_secrets_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/put_acl_secrets.py b/examples/secrets/put_acl_secrets.py index ffc83fa57..ad9c69fd2 100755 --- a/examples/secrets/put_acl_secrets.py +++ b/examples/secrets/put_acl_secrets.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/secrets/put_secret_secrets.py b/examples/secrets/put_secret_secrets.py index 233e31740..e7f3fbd34 100755 --- a/examples/secrets/put_secret_secrets.py +++ b/examples/secrets/put_secret_secrets.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/create_account_service_principal.py b/examples/service_principals/create_account_service_principal.py index 55fe3ae47..8dd4a3a79 100755 --- a/examples/service_principals/create_account_service_principal.py +++ b/examples/service_principals/create_account_service_principal.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/create_create_obo_token_on_aws.py b/examples/service_principals/create_create_obo_token_on_aws.py index 9a8ba9b5e..b32bca4ef 100755 --- a/examples/service_principals/create_create_obo_token_on_aws.py +++ b/examples/service_principals/create_create_obo_token_on_aws.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/create_service_principals_on_aws.py b/examples/service_principals/create_service_principals_on_aws.py index 8d0923cfd..57b35d97f 100755 --- a/examples/service_principals/create_service_principals_on_aws.py +++ b/examples/service_principals/create_service_principals_on_aws.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/create_workspace_assignment_on_aws.py b/examples/service_principals/create_workspace_assignment_on_aws.py index 86ffe951d..420213eb0 100755 --- a/examples/service_principals/create_workspace_assignment_on_aws.py +++ b/examples/service_principals/create_workspace_assignment_on_aws.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/get_account_service_principal.py b/examples/service_principals/get_account_service_principal.py index 313b6ac8c..cc1799d81 100755 --- a/examples/service_principals/get_account_service_principal.py +++ b/examples/service_principals/get_account_service_principal.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/get_service_principals_on_aws.py b/examples/service_principals/get_service_principals_on_aws.py index 28cd35a21..e6073680a 100755 --- a/examples/service_principals/get_service_principals_on_aws.py +++ b/examples/service_principals/get_service_principals_on_aws.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/list_account_service_principal.py b/examples/service_principals/list_account_service_principal.py index 15c9b6ed7..f85675cbe 100755 --- a/examples/service_principals/list_account_service_principal.py +++ b/examples/service_principals/list_account_service_principal.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/list_service_principals_on_aws.py b/examples/service_principals/list_service_principals_on_aws.py index 72d5bcbe7..a09f795b5 100755 --- a/examples/service_principals/list_service_principals_on_aws.py +++ b/examples/service_principals/list_service_principals_on_aws.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/patch_account_service_principal.py b/examples/service_principals/patch_account_service_principal.py index cb4a7c5fe..3b1918442 100755 --- a/examples/service_principals/patch_account_service_principal.py +++ b/examples/service_principals/patch_account_service_principal.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/patch_service_principals_on_aws.py b/examples/service_principals/patch_service_principals_on_aws.py index 7f11cd222..6f8b3af56 100755 --- a/examples/service_principals/patch_service_principals_on_aws.py +++ b/examples/service_principals/patch_service_principals_on_aws.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/service_principals/update_account_service_principal.py b/examples/service_principals/update_account_service_principal.py index 116f31c4c..ae9721494 100755 --- a/examples/service_principals/update_account_service_principal.py +++ b/examples/service_principals/update_account_service_principal.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/service_principals/update_service_principals_on_aws.py b/examples/service_principals/update_service_principals_on_aws.py index 9c9c0a953..aec813af4 100755 --- a/examples/service_principals/update_service_principals_on_aws.py +++ b/examples/service_principals/update_service_principals_on_aws.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/shares/create_shares.py b/examples/shares/create_shares.py index ae491e561..342f2142e 100755 --- a/examples/shares/create_shares.py +++ b/examples/shares/create_shares.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/shares/get_shares.py b/examples/shares/get_shares.py index a010bc753..61901494c 100755 --- a/examples/shares/get_shares.py +++ b/examples/shares/get_shares.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/shares/list_shares.py b/examples/shares/list_shares.py index b8668e7f7..acd226754 100755 --- a/examples/shares/list_shares.py +++ b/examples/shares/list_shares.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/shares/update_shares.py b/examples/shares/update_shares.py index ae01ddc1f..19645e9d8 100755 --- a/examples/shares/update_shares.py +++ b/examples/shares/update_shares.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sharing +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/statement_execution/execute_shares.py b/examples/statement_execution/execute_shares.py index 96e8b8d2e..83ce7ed83 100755 --- a/examples/statement_execution/execute_shares.py +++ b/examples/statement_execution/execute_shares.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/statement_execution/execute_tables.py b/examples/statement_execution/execute_tables.py index 96e8b8d2e..83ce7ed83 100755 --- a/examples/statement_execution/execute_tables.py +++ b/examples/statement_execution/execute_tables.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage/create_log_delivery.py b/examples/storage/create_log_delivery.py index 466b944ea..25b00fd4b 100755 --- a/examples/storage/create_log_delivery.py +++ b/examples/storage/create_log_delivery.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/storage/create_storage.py b/examples/storage/create_storage.py index c1e010868..fd9840915 100755 --- a/examples/storage/create_storage.py +++ b/examples/storage/create_storage.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/storage/create_workspaces.py b/examples/storage/create_workspaces.py index e8c3bb4e5..2bafe4379 100755 --- a/examples/storage/create_workspaces.py +++ b/examples/storage/create_workspaces.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/storage/get_storage.py b/examples/storage/get_storage.py index 47c521b76..32576fa65 100755 --- a/examples/storage/get_storage.py +++ b/examples/storage/get_storage.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/storage/list_storage.py b/examples/storage/list_storage.py index b1980f064..a91c9496a 100755 --- a/examples/storage/list_storage.py +++ b/examples/storage/list_storage.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/storage_credentials/create_external_locations_on_aws.py b/examples/storage_credentials/create_external_locations_on_aws.py index 7d2ba29e1..86e148cff 100755 --- a/examples/storage_credentials/create_external_locations_on_aws.py +++ b/examples/storage_credentials/create_external_locations_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage_credentials/create_storage_credentials_on_aws.py b/examples/storage_credentials/create_storage_credentials_on_aws.py index b20d7dc9d..1590b965a 100755 --- a/examples/storage_credentials/create_storage_credentials_on_aws.py +++ b/examples/storage_credentials/create_storage_credentials_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage_credentials/create_volumes.py b/examples/storage_credentials/create_volumes.py index 4c9383bbf..ed63ce2aa 100755 --- a/examples/storage_credentials/create_volumes.py +++ b/examples/storage_credentials/create_volumes.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage_credentials/get_storage_credentials_on_aws.py b/examples/storage_credentials/get_storage_credentials_on_aws.py index 58f3ff4a8..01aa71c29 100755 --- a/examples/storage_credentials/get_storage_credentials_on_aws.py +++ b/examples/storage_credentials/get_storage_credentials_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage_credentials/list_storage_credentials_on_aws.py b/examples/storage_credentials/list_storage_credentials_on_aws.py index fad05f4af..1e39f4249 100755 --- a/examples/storage_credentials/list_storage_credentials_on_aws.py +++ b/examples/storage_credentials/list_storage_credentials_on_aws.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/storage_credentials/update_storage_credentials_on_aws.py b/examples/storage_credentials/update_storage_credentials_on_aws.py index 90ebf2531..316898785 100755 --- a/examples/storage_credentials/update_storage_credentials_on_aws.py +++ b/examples/storage_credentials/update_storage_credentials_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tables/get_tables.py b/examples/tables/get_tables.py index 7c81faf6d..b6db70a32 100755 --- a/examples/tables/get_tables.py +++ b/examples/tables/get_tables.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tables/list_summaries_tables.py b/examples/tables/list_summaries_tables.py index d3e64fd0a..baee5e212 100755 --- a/examples/tables/list_summaries_tables.py +++ b/examples/tables/list_summaries_tables.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tables/list_tables.py b/examples/tables/list_tables.py index 6c14faa12..5f30b790c 100755 --- a/examples/tables/list_tables.py +++ b/examples/tables/list_tables.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/token_management/create_obo_token_create_obo_token_on_aws.py b/examples/token_management/create_obo_token_create_obo_token_on_aws.py index 881827b8f..dafb9d92c 100755 --- a/examples/token_management/create_obo_token_create_obo_token_on_aws.py +++ b/examples/token_management/create_obo_token_create_obo_token_on_aws.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/token_management/get_create_obo_token_on_aws.py b/examples/token_management/get_create_obo_token_on_aws.py index d47d60a30..25098abaa 100755 --- a/examples/token_management/get_create_obo_token_on_aws.py +++ b/examples/token_management/get_create_obo_token_on_aws.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/token_management/list_create_obo_token_on_aws.py b/examples/token_management/list_create_obo_token_on_aws.py index 9634fc08d..32c2b2562 100755 --- a/examples/token_management/list_create_obo_token_on_aws.py +++ b/examples/token_management/list_create_obo_token_on_aws.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import settings +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tokens/create_tokens.py b/examples/tokens/create_tokens.py index e7545d2ed..7a3670012 100755 --- a/examples/tokens/create_tokens.py +++ b/examples/tokens/create_tokens.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tokens/get_tokens.py b/examples/tokens/get_tokens.py index fade24f21..a00aa00ce 100755 --- a/examples/tokens/get_tokens.py +++ b/examples/tokens/get_tokens.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/tokens/list_tokens.py b/examples/tokens/list_tokens.py index 9983610c8..7675075b9 100755 --- a/examples/tokens/list_tokens.py +++ b/examples/tokens/list_tokens.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/create_account_users.py b/examples/users/create_account_users.py index 064b6f0c8..8b59d6280 100755 --- a/examples/users/create_account_users.py +++ b/examples/users/create_account_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/users/create_clusters_api_integration.py b/examples/users/create_clusters_api_integration.py index 020c18744..1887643df 100755 --- a/examples/users/create_clusters_api_integration.py +++ b/examples/users/create_clusters_api_integration.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/create_workspace_users.py b/examples/users/create_workspace_users.py index 4a348cf32..dfbc0a394 100755 --- a/examples/users/create_workspace_users.py +++ b/examples/users/create_workspace_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/delete_account_users.py b/examples/users/delete_account_users.py index 16eb8d757..656c20b3e 100755 --- a/examples/users/delete_account_users.py +++ b/examples/users/delete_account_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/users/delete_clusters_api_integration.py b/examples/users/delete_clusters_api_integration.py index fecdb0364..fd275dcea 100755 --- a/examples/users/delete_clusters_api_integration.py +++ b/examples/users/delete_clusters_api_integration.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/delete_workspace_users.py b/examples/users/delete_workspace_users.py index 81004b1a1..29a3ab40e 100755 --- a/examples/users/delete_workspace_users.py +++ b/examples/users/delete_workspace_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/get_account_users.py b/examples/users/get_account_users.py index 58f2b7b4b..23d884090 100755 --- a/examples/users/get_account_users.py +++ b/examples/users/get_account_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/users/get_workspace_users.py b/examples/users/get_workspace_users.py index 55919ff73..6c0b76a41 100755 --- a/examples/users/get_workspace_users.py +++ b/examples/users/get_workspace_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/list_workspace_users.py b/examples/users/list_workspace_users.py index fbc477cbd..ee20d099c 100755 --- a/examples/users/list_workspace_users.py +++ b/examples/users/list_workspace_users.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/patch_account_users.py b/examples/users/patch_account_users.py index b68c0a9e5..85be58d37 100755 --- a/examples/users/patch_account_users.py +++ b/examples/users/patch_account_users.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/users/patch_workspace_users.py b/examples/users/patch_workspace_users.py index b5618b2f5..621df61d7 100755 --- a/examples/users/patch_workspace_users.py +++ b/examples/users/patch_workspace_users.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/users/update_workspace_users.py b/examples/users/update_workspace_users.py index 843d3cbc1..6e6e34cdb 100755 --- a/examples/users/update_workspace_users.py +++ b/examples/users/update_workspace_users.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/volumes/create_volumes.py b/examples/volumes/create_volumes.py index 660451bf4..3b55a8f99 100755 --- a/examples/volumes/create_volumes.py +++ b/examples/volumes/create_volumes.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/volumes/list_volumes.py b/examples/volumes/list_volumes.py index 32e39912b..2c3bf2eb7 100755 --- a/examples/volumes/list_volumes.py +++ b/examples/volumes/list_volumes.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/volumes/read_volumes.py b/examples/volumes/read_volumes.py index 3051ddd6f..c3fdc5833 100755 --- a/examples/volumes/read_volumes.py +++ b/examples/volumes/read_volumes.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/volumes/update_volumes.py b/examples/volumes/update_volumes.py index 6becccb10..742be37b4 100755 --- a/examples/volumes/update_volumes.py +++ b/examples/volumes/update_volumes.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import catalog +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/vpc_endpoints/create_vpc_endpoints.py b/examples/vpc_endpoints/create_vpc_endpoints.py index 750331890..af8c314bf 100755 --- a/examples/vpc_endpoints/create_vpc_endpoints.py +++ b/examples/vpc_endpoints/create_vpc_endpoints.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/vpc_endpoints/get_vpc_endpoints.py b/examples/vpc_endpoints/get_vpc_endpoints.py index d49d32683..ef7043178 100755 --- a/examples/vpc_endpoints/get_vpc_endpoints.py +++ b/examples/vpc_endpoints/get_vpc_endpoints.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/vpc_endpoints/list_vpc_endpoints.py b/examples/vpc_endpoints/list_vpc_endpoints.py index fb741f2a1..d9076bb38 100755 --- a/examples/vpc_endpoints/list_vpc_endpoints.py +++ b/examples/vpc_endpoints/list_vpc_endpoints.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/warehouses/create_sql_warehouses.py b/examples/warehouses/create_sql_warehouses.py index 15e8f474c..812e90226 100755 --- a/examples/warehouses/create_sql_warehouses.py +++ b/examples/warehouses/create_sql_warehouses.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/warehouses/edit_sql_warehouses.py b/examples/warehouses/edit_sql_warehouses.py index 0e3c8e8f0..e5f7b0b87 100755 --- a/examples/warehouses/edit_sql_warehouses.py +++ b/examples/warehouses/edit_sql_warehouses.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/warehouses/get_sql_warehouses.py b/examples/warehouses/get_sql_warehouses.py index 7b59844ca..85e43f8e3 100755 --- a/examples/warehouses/get_sql_warehouses.py +++ b/examples/warehouses/get_sql_warehouses.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/warehouses/list_sql_warehouses.py b/examples/warehouses/list_sql_warehouses.py index bd74313e6..6138078fa 100755 --- a/examples/warehouses/list_sql_warehouses.py +++ b/examples/warehouses/list_sql_warehouses.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient -from databricks.sdk.service import sql +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/export_workspace_integration.py b/examples/workspace/export_workspace_integration.py index efd497889..e1d3f37c9 100755 --- a/examples/workspace/export_workspace_integration.py +++ b/examples/workspace/export_workspace_integration.py @@ -1,7 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/get_status_generic_permissions.py b/examples/workspace/get_status_generic_permissions.py index 44723bcbe..b73155993 100755 --- a/examples/workspace/get_status_generic_permissions.py +++ b/examples/workspace/get_status_generic_permissions.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/get_status_workspace_integration.py b/examples/workspace/get_status_workspace_integration.py index 3ccd249c7..cc45fbf43 100755 --- a/examples/workspace/get_status_workspace_integration.py +++ b/examples/workspace/get_status_workspace_integration.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/import_generic_permissions.py b/examples/workspace/import_generic_permissions.py index 885f0f3b0..da4c44719 100755 --- a/examples/workspace/import_generic_permissions.py +++ b/examples/workspace/import_generic_permissions.py @@ -1,8 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/import_jobs_api_full_integration.py b/examples/workspace/import_jobs_api_full_integration.py index c22159e23..01c2b1c6c 100755 --- a/examples/workspace/import_jobs_api_full_integration.py +++ b/examples/workspace/import_jobs_api_full_integration.py @@ -1,8 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/import_pipelines.py b/examples/workspace/import_pipelines.py index e0e989b4a..b90bd5df4 100755 --- a/examples/workspace/import_pipelines.py +++ b/examples/workspace/import_pipelines.py @@ -1,8 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/import_workspace_integration.py b/examples/workspace/import_workspace_integration.py index 81c7c41a8..d924c2cbc 100755 --- a/examples/workspace/import_workspace_integration.py +++ b/examples/workspace/import_workspace_integration.py @@ -1,8 +1,6 @@ -import base64 -import time - from databricks.sdk import WorkspaceClient -from databricks.sdk.service import workspace +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace/list_workspace_integration.py b/examples/workspace/list_workspace_integration.py index 1e664310f..8a6a2d2d8 100755 --- a/examples/workspace/list_workspace_integration.py +++ b/examples/workspace/list_workspace_integration.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace_assignment/list_workspace_assignment_on_aws.py b/examples/workspace_assignment/list_workspace_assignment_on_aws.py index 15a7aa954..eb81e7945 100755 --- a/examples/workspace_assignment/list_workspace_assignment_on_aws.py +++ b/examples/workspace_assignment/list_workspace_assignment_on_aws.py @@ -1,9 +1,9 @@ -import os - from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() -workspace_id = os.environ["TEST_WORKSPACE_ID"] +workspace_id = os.environ["DUMMY_WORKSPACE_ID"] all = a.workspace_assignment.list(workspace_id=workspace_id) diff --git a/examples/workspace_assignment/update_workspace_assignment_on_aws.py b/examples/workspace_assignment/update_workspace_assignment_on_aws.py index c58164b48..ad2419fb3 100755 --- a/examples/workspace_assignment/update_workspace_assignment_on_aws.py +++ b/examples/workspace_assignment/update_workspace_assignment_on_aws.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import iam +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() @@ -10,7 +8,7 @@ spn_id = spn.id -workspace_id = os.environ["TEST_WORKSPACE_ID"] +workspace_id = os.environ["DUMMY_WORKSPACE_ID"] a.workspace_assignment.update(workspace_id=workspace_id, principal_id=spn_id, diff --git a/examples/workspace_bindings/get_catalog_workspace_bindings.py b/examples/workspace_bindings/get_catalog_workspace_bindings.py index 651ab75a7..074c02592 100755 --- a/examples/workspace_bindings/get_catalog_workspace_bindings.py +++ b/examples/workspace_bindings/get_catalog_workspace_bindings.py @@ -1,6 +1,6 @@ -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace_bindings/update_catalog_workspace_bindings.py b/examples/workspace_bindings/update_catalog_workspace_bindings.py index 2737cab31..d98757dc0 100755 --- a/examples/workspace_bindings/update_catalog_workspace_bindings.py +++ b/examples/workspace_bindings/update_catalog_workspace_bindings.py @@ -1,7 +1,6 @@ -import os -import time - from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspace_conf/get_status_repos.py b/examples/workspace_conf/get_status_repos.py index 88a1c002b..fb07c778d 100755 --- a/examples/workspace_conf/get_status_repos.py +++ b/examples/workspace_conf/get_status_repos.py @@ -1,4 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os w = WorkspaceClient() diff --git a/examples/workspaces/create_workspaces.py b/examples/workspaces/create_workspaces.py index bf820677c..25bcffc90 100755 --- a/examples/workspaces/create_workspaces.py +++ b/examples/workspaces/create_workspaces.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/workspaces/get_workspaces.py b/examples/workspaces/get_workspaces.py index 809a1f374..86821a934 100755 --- a/examples/workspaces/get_workspaces.py +++ b/examples/workspaces/get_workspaces.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/workspaces/list_workspaces.py b/examples/workspaces/list_workspaces.py index af482e805..69a6af4b1 100755 --- a/examples/workspaces/list_workspaces.py +++ b/examples/workspaces/list_workspaces.py @@ -1,4 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient() diff --git a/examples/workspaces/update_workspaces.py b/examples/workspaces/update_workspaces.py index f766e1b5a..00c3062db 100755 --- a/examples/workspaces/update_workspaces.py +++ b/examples/workspaces/update_workspaces.py @@ -1,8 +1,6 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning +from databricks.sdk.service import _internal, iam, iam, sql, serving, catalog, billing, billing, catalog, sharing, compute, compute, compute, catalog, provisioning, settings, iam, oauth2, sql, sql, sql, files, sql, provisioning, ml, catalog, files, catalog, workspace, compute, catalog, iam, compute, compute, settings, jobs, compute, billing, catalog, catalog, ml, catalog, settings, settings, provisioning, oauth2, iam, pipelines, compute, provisioning, sharing, oauth2, sql, sql, sql, sharing, sharing, catalog, workspace, catalog, workspace, oauth2, iam, serving, settings, sharing, sql, provisioning, catalog, catalog, catalog, catalog, settings, settings, iam, catalog, provisioning, sql, workspace, iam, catalog, settings, provisioning +import time, base64, os a = AccountClient()