diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 4343d612b..0d79c490d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -22f09783eb8a84d52026f856be3b2068f9498db3 \ No newline at end of file +63caa3cb0c05045e81d3dcf2451fa990d8670f36 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 7449056ba..3e415cc35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,130 @@ # Version changelog +## 0.15.0 + +Bugfixes: + +* Fixed accidental rename ([#471](https://github.com/databricks/databricks-sdk-py/pull/471)). +* Fixed parsing of ISO date strings ([#473](https://github.com/databricks/databricks-sdk-py/pull/473)). + + +Other changes: + +* Updated GCP OAuth Readme ([#464](https://github.com/databricks/databricks-sdk-py/pull/464)). +* Reference Documentation Refactoring ([#467](https://github.com/databricks/databricks-sdk-py/pull/467)). +* Installed local library when generating docs ([#469](https://github.com/databricks/databricks-sdk-py/pull/469)). +* Fixed readme links in pypi ([#472](https://github.com/databricks/databricks-sdk-py/pull/472)). +* Updated a note for installing Python SDK on Databricks Runtime 13.1+ ([#474](https://github.com/databricks/databricks-sdk-py/pull/474)). +* Updated GCP auth readme ([#470](https://github.com/databricks/databricks-sdk-py/pull/470)). + + +API Changes: + + * Changed `update()` method for [w.connections](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/connections.html) workspace-level service with new required argument order. + * Added `cloudflare_api_token` field for `databricks.sdk.service.catalog.CreateStorageCredential`. + * Added `cloudflare_api_token` field for `databricks.sdk.service.catalog.StorageCredentialInfo`. + * Changed `name` field for `databricks.sdk.service.catalog.UpdateCatalog` to be required. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateCatalog`. + * Changed `name` field for `databricks.sdk.service.catalog.UpdateConnection` to no longer be required. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateConnection`. + * Changed `name` field for `databricks.sdk.service.catalog.UpdateExternalLocation` to be required. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateMetastore`. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateRegisteredModelRequest`. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateSchema`. + * Changed `name` field for `databricks.sdk.service.catalog.UpdateStorageCredential` to be required. + * Added `cloudflare_api_token` field for `databricks.sdk.service.catalog.UpdateStorageCredential`. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateStorageCredential`. + * Added `new_name` field for `databricks.sdk.service.catalog.UpdateVolumeRequestContent`. + * Added `cloudflare_api_token` field for `databricks.sdk.service.catalog.ValidateStorageCredential`. + * Added `databricks.sdk.service.catalog.CloudflareApiToken` dataclass. + * Removed `continuous` field for `databricks.sdk.service.jobs.BaseRun`. + * Removed `continuous` field for `databricks.sdk.service.jobs.Run`. + * Changed `job_parameters` field for `databricks.sdk.service.jobs.RunJobTask` to `databricks.sdk.service.jobs.ParamPairs` dataclass. + * Added `run_if` field for `databricks.sdk.service.jobs.SubmitTask`. + * Added `run_job_task` field for `databricks.sdk.service.jobs.SubmitTask`. + * Changed `update_config()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service with new required argument order. + * Added `put()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service. + * Added `rate_limits` field for `databricks.sdk.service.serving.CreateServingEndpoint`. + * Changed `served_models` field for `databricks.sdk.service.serving.EndpointCoreConfigInput` to no longer be required. + * Added `auto_capture_config` field for `databricks.sdk.service.serving.EndpointCoreConfigInput`. + * Added `served_entities` field for `databricks.sdk.service.serving.EndpointCoreConfigInput`. + * Added `auto_capture_config` field for `databricks.sdk.service.serving.EndpointCoreConfigOutput`. + * Added `served_entities` field for `databricks.sdk.service.serving.EndpointCoreConfigOutput`. + * Added `served_entities` field for `databricks.sdk.service.serving.EndpointCoreConfigSummary`. + * Added `served_entities` field for `databricks.sdk.service.serving.EndpointPendingConfig`. + * Added `extra_params` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `input` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `max_tokens` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `messages` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `n` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `prompt` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `stop` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `stream` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Added `temperature` field for `databricks.sdk.service.serving.QueryEndpointInput`. + * Changed `predictions` field for `databricks.sdk.service.serving.QueryEndpointResponse` to no longer be required. + * Added `choices` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `created` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `data` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `id` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `model` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `object` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Added `usage` field for `databricks.sdk.service.serving.QueryEndpointResponse`. + * Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to `databricks.sdk.service.serving.ServedModelInputWorkloadSize` dataclass. + * Changed `workload_type` field for `databricks.sdk.service.serving.ServedModelInput` to `databricks.sdk.service.serving.ServedModelInputWorkloadType` dataclass. + * Added `task` field for `databricks.sdk.service.serving.ServingEndpoint`. + * Added `task` field for `databricks.sdk.service.serving.ServingEndpointDetailed`. + * Added `databricks.sdk.service.serving.Ai21LabsConfig` dataclass. + * Added `databricks.sdk.service.serving.AnthropicConfig` dataclass. + * Added `databricks.sdk.service.serving.AutoCaptureConfigInput` dataclass. + * Added `databricks.sdk.service.serving.AutoCaptureConfigOutput` dataclass. + * Added `databricks.sdk.service.serving.AutoCaptureState` dataclass. + * Added `databricks.sdk.service.serving.AwsBedrockConfig` dataclass. + * Added `databricks.sdk.service.serving.AwsBedrockConfigBedrockProvider` dataclass. + * Added `databricks.sdk.service.serving.ChatMessage` dataclass. + * Added `databricks.sdk.service.serving.ChatMessageRole` dataclass. + * Added `databricks.sdk.service.serving.CohereConfig` dataclass. + * Added `databricks.sdk.service.serving.DatabricksModelServingConfig` dataclass. + * Added `databricks.sdk.service.serving.EmbeddingsV1ResponseEmbeddingElement` dataclass. + * Added `databricks.sdk.service.serving.EmbeddingsV1ResponseEmbeddingElementObject` dataclass. + * Added `databricks.sdk.service.serving.ExternalModel` dataclass. + * Added `databricks.sdk.service.serving.ExternalModelConfig` dataclass. + * Added `databricks.sdk.service.serving.ExternalModelProvider` dataclass. + * Added `databricks.sdk.service.serving.ExternalModelUsageElement` dataclass. + * Added `databricks.sdk.service.serving.FoundationModel` dataclass. + * Added `databricks.sdk.service.serving.OpenAiConfig` dataclass. + * Added `databricks.sdk.service.serving.PaLmConfig` dataclass. + * Added `databricks.sdk.service.serving.PayloadTable` dataclass. + * Added `databricks.sdk.service.serving.PutRequest` dataclass. + * Added `databricks.sdk.service.serving.PutResponse` dataclass. + * Added `databricks.sdk.service.serving.QueryEndpointResponseObject` dataclass. + * Added `databricks.sdk.service.serving.RateLimit` dataclass. + * Added `databricks.sdk.service.serving.RateLimitKey` dataclass. + * Added `databricks.sdk.service.serving.RateLimitRenewalPeriod` dataclass. + * Added `databricks.sdk.service.serving.ServedEntityInput` dataclass. + * Added `databricks.sdk.service.serving.ServedEntityOutput` dataclass. + * Added `databricks.sdk.service.serving.ServedEntitySpec` dataclass. + * Added `databricks.sdk.service.serving.ServedModelInputWorkloadSize` dataclass. + * Added `databricks.sdk.service.serving.ServedModelInputWorkloadType` dataclass. + * Added `databricks.sdk.service.serving.V1ResponseChoiceElement` dataclass. + * Removed [a.account_network_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_network_policy.html) account-level service. + * Removed `databricks.sdk.service.settings.AccountNetworkPolicyMessage` dataclass. + * Removed `databricks.sdk.service.settings.DeleteAccountNetworkPolicyRequest` dataclass. + * Removed `databricks.sdk.service.settings.DeleteAccountNetworkPolicyResponse` dataclass. + * Removed `databricks.sdk.service.settings.ReadAccountNetworkPolicyRequest` dataclass. + * Removed `databricks.sdk.service.settings.UpdateAccountNetworkPolicyRequest` dataclass. + * Removed `name` field for `databricks.sdk.service.sharing.UpdateCleanRoom`. + * Changed `name` field for `databricks.sdk.service.sharing.UpdateProvider` to be required. + * Added `new_name` field for `databricks.sdk.service.sharing.UpdateProvider`. + * Changed `name` field for `databricks.sdk.service.sharing.UpdateRecipient` to be required. + * Added `new_name` field for `databricks.sdk.service.sharing.UpdateRecipient`. + * Changed `name` field for `databricks.sdk.service.sharing.UpdateShare` to be required. + * Added `new_name` field for `databricks.sdk.service.sharing.UpdateShare`. + * Added `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Added `databricks.sdk.service.sql.StatementId` dataclass. + +OpenAPI SHA: 63caa3cb0c05045e81d3dcf2451fa990d8670f36, Date: 2023-12-12 + ## 0.14.0 Major changes: diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index d4ac587a4..9a3f81e2f 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -47,7 +47,6 @@ WorkspacesAPI) from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI from databricks.sdk.service.settings import (AccountIpAccessListsAPI, - AccountNetworkPolicyAPI, AccountSettingsAPI, CredentialsManagerAPI, IpAccessListsAPI, @@ -667,10 +666,10 @@ def __init__(self, serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model Registry, called served - models. A serving endpoint can have at most ten served models. You can configure traffic - settings to define how requests should be routed to your served models behind an endpoint. + entities. A serving endpoint can have at most ten served entities. You can configure traffic + settings to define how requests should be routed to your served entities behind an endpoint. Additionally, you can configure the scale of resources that should be applied to each served - model.""" + entity.""" self.settings: SettingsAPI = SettingsAPI(self.api_client) """The default namespace setting API allows users to configure the default namespace for a @@ -1087,15 +1086,6 @@ def __init__(self, [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security""" - self.network_policy: AccountNetworkPolicyAPI = AccountNetworkPolicyAPI(self.api_client) - """Network policy is a set of rules that defines what can be accessed from your Databricks - network. E.g.: You can choose to block your SQL UDF to access internet from your Databricks - serverless clusters. - - There is only one instance of this setting per account. Since this setting has a default - value, this setting is present on all accounts even though it's never set on a given - account. Deletion reverts the value of the setting back to the default value.""" - self.networks: NetworksAPI = NetworksAPI(self.api_client) """These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when creating a new workspace if you use customer-managed VPCs.""" diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index b6bb3000b..6e51eb645 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -513,6 +513,33 @@ class CatalogType(Enum): SYSTEM_CATALOG = 'SYSTEM_CATALOG' +@dataclass +class CloudflareApiToken: + access_key_id: str + """The Cloudflare access key id of the token.""" + + secret_access_key: str + """The secret access token generated for the access key id""" + + account_id: str + """The account id associated with the API token.""" + + def as_dict(self) -> dict: + """Serializes the CloudflareApiToken into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CloudflareApiToken: + """Deserializes the CloudflareApiToken from a dictionary.""" + return cls(access_key_id=d.get('access_key_id', None), + account_id=d.get('account_id', None), + secret_access_key=d.get('secret_access_key', None)) + + @dataclass class ColumnInfo: comment: Optional[str] = None @@ -1223,6 +1250,9 @@ class CreateStorageCredential: azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" @@ -1242,6 +1272,7 @@ def as_dict(self) -> dict: if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() if self.comment is not None: body['comment'] = self.comment if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account @@ -1256,6 +1287,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateStorageCredential: return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=d.get('databricks_gcp_service_account', None), name=d.get('name', None), @@ -3163,6 +3195,9 @@ class StorageCredentialInfo: azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" @@ -3206,6 +3241,7 @@ def as_dict(self) -> dict: if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() if self.comment is not None: body['comment'] = self.comment if self.created_at is not None: body['created_at'] = self.created_at if self.created_by is not None: body['created_by'] = self.created_by @@ -3228,6 +3264,7 @@ def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo: return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), @@ -3582,7 +3619,10 @@ class UpdateCatalog: """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" name: Optional[str] = None - """Name of catalog.""" + """The name of the catalog.""" + + new_name: Optional[str] = None + """New name for the catalog.""" owner: Optional[str] = None """Username of current owner of catalog.""" @@ -3598,6 +3638,7 @@ def as_dict(self) -> dict: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.properties: body['properties'] = self.properties return body @@ -3610,21 +3651,25 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog: EnablePredictiveOptimization), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), properties=d.get('properties', None)) @dataclass class UpdateConnection: - name: str - """Name of the connection.""" - options: Dict[str, str] """A map of key-value properties attached to the securable.""" + name: Optional[str] = None + """Name of the connection.""" + name_arg: Optional[str] = None """Name of the connection.""" + new_name: Optional[str] = None + """New name for the connection.""" + owner: Optional[str] = None """Username of current owner of the connection.""" @@ -3633,6 +3678,7 @@ def as_dict(self) -> dict: body = {} if self.name is not None: body['name'] = self.name if self.name_arg is not None: body['name_arg'] = self.name_arg + if self.new_name is not None: body['new_name'] = self.new_name if self.options: body['options'] = self.options if self.owner is not None: body['owner'] = self.owner return body @@ -3642,6 +3688,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateConnection: """Deserializes the UpdateConnection from a dictionary.""" return cls(name=d.get('name', None), name_arg=d.get('name_arg', None), + new_name=d.get('new_name', None), options=d.get('options', None), owner=d.get('owner', None)) @@ -3666,6 +3713,9 @@ class UpdateExternalLocation: name: Optional[str] = None """Name of the external location.""" + new_name: Optional[str] = None + """New name for the external location.""" + owner: Optional[str] = None """The owner of the external location.""" @@ -3687,6 +3737,7 @@ def as_dict(self) -> dict: if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() if self.force is not None: body['force'] = self.force if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation @@ -3702,6 +3753,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation: encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), force=d.get('force', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), @@ -3748,6 +3800,9 @@ class UpdateMetastore: name: Optional[str] = None """The user-specified name of the metastore.""" + new_name: Optional[str] = None + """New name for the metastore.""" + owner: Optional[str] = None """The owner of the metastore.""" @@ -3768,6 +3823,7 @@ def as_dict(self) -> dict: if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value if self.id is not None: body['id'] = self.id if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version @@ -3784,6 +3840,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore: delta_sharing_scope=_enum(d, 'delta_sharing_scope', UpdateMetastoreDeltaSharingScope), id=d.get('id', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), storage_root_credential_id=d.get('storage_root_credential_id', None)) @@ -3888,6 +3945,9 @@ class UpdateRegisteredModelRequest: name: Optional[str] = None """The name of the registered model""" + new_name: Optional[str] = None + """New name for the registered model.""" + owner: Optional[str] = None """The identifier of the user who owns the registered model""" @@ -3897,6 +3957,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.full_name is not None: body['full_name'] = self.full_name if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner return body @@ -3906,6 +3967,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRegisteredModelRequest: return cls(comment=d.get('comment', None), full_name=d.get('full_name', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None)) @@ -3923,6 +3985,9 @@ class UpdateSchema: name: Optional[str] = None """Name of schema, relative to parent catalog.""" + new_name: Optional[str] = None + """New name for the schema.""" + owner: Optional[str] = None """Username of current owner of schema.""" @@ -3937,6 +4002,7 @@ def as_dict(self) -> dict: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value if self.full_name is not None: body['full_name'] = self.full_name if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.properties: body['properties'] = self.properties return body @@ -3949,6 +4015,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateSchema: EnablePredictiveOptimization), full_name=d.get('full_name', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), properties=d.get('properties', None)) @@ -3964,6 +4031,9 @@ class UpdateStorageCredential: azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" @@ -3974,7 +4044,10 @@ class UpdateStorageCredential: """Force update even if there are dependent external locations or external tables.""" name: Optional[str] = None - """The credential name. The name must be unique within the metastore.""" + """Name of the storage credential.""" + + new_name: Optional[str] = None + """New name for the storage credential.""" owner: Optional[str] = None """Username of current owner of credential.""" @@ -3992,11 +4065,13 @@ def as_dict(self) -> dict: if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() if self.comment is not None: body['comment'] = self.comment if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account if self.force is not None: body['force'] = self.force if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation @@ -4008,10 +4083,12 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateStorageCredential: return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=d.get('databricks_gcp_service_account', None), force=d.get('force', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) @@ -4028,6 +4105,9 @@ class UpdateVolumeRequestContent: name: Optional[str] = None """The name of the volume""" + new_name: Optional[str] = None + """New name for the volume.""" + owner: Optional[str] = None """The identifier of the user who owns the volume""" @@ -4037,6 +4117,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner return body @@ -4046,6 +4127,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent: return cls(comment=d.get('comment', None), full_name_arg=d.get('full_name_arg', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None)) @@ -4119,6 +4201,9 @@ class ValidateStorageCredential: azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + databricks_gcp_service_account: Optional[Any] = None """The Databricks created GCP service account configuration.""" @@ -4141,6 +4226,7 @@ def as_dict(self) -> dict: if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account if self.external_location_name is not None: @@ -4156,6 +4242,7 @@ def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredential: return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), databricks_gcp_service_account=d.get('databricks_gcp_service_account', None), external_location_name=d.get('external_location_name', None), read_only=d.get('read_only', None), @@ -4880,6 +4967,7 @@ def update(self, comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[IsolationMode] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, properties: Optional[Dict[str, str]] = None) -> CatalogInfo: """Update a catalog. @@ -4888,13 +4976,15 @@ def update(self, catalog, or a metastore admin (when changing the owner field of the catalog). :param name: str - Name of catalog. + The name of the catalog. :param comment: str (optional) User-provided free-form text description. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) Whether predictive optimization should be enabled for this object and objects under it. :param isolation_mode: :class:`IsolationMode` (optional) Whether the current securable is accessible from all workspaces or a specific set of workspaces. + :param new_name: str (optional) + New name for the catalog. :param owner: str (optional) Username of current owner of catalog. :param properties: Dict[str,str] (optional) @@ -4907,6 +4997,7 @@ def update(self, if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if properties is not None: body['properties'] = properties headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -5012,9 +5103,10 @@ def list(self) -> Iterator[ConnectionInfo]: def update(self, name_arg: str, - name: str, options: Dict[str, str], *, + name: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None) -> ConnectionInfo: """Update a connection. @@ -5022,10 +5114,12 @@ def update(self, :param name_arg: str Name of the connection. - :param name: str - Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param name: str (optional) + Name of the connection. + :param new_name: str (optional) + New name for the connection. :param owner: str (optional) Username of current owner of the connection. @@ -5033,6 +5127,7 @@ def update(self, """ body = {} if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name if options is not None: body['options'] = options if owner is not None: body['owner'] = owner headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -5167,6 +5262,7 @@ def update(self, credential_name: Optional[str] = None, encryption_details: Optional[EncryptionDetails] = None, force: Optional[bool] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, @@ -5189,6 +5285,8 @@ def update(self, Encryption options that apply to clients connecting to cloud storage. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. + :param new_name: str (optional) + New name for the external location. :param owner: str (optional) The owner of the external location. :param read_only: bool (optional) @@ -5206,6 +5304,7 @@ def update(self, if credential_name is not None: body['credential_name'] = credential_name if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() if force is not None: body['force'] = force + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if read_only is not None: body['read_only'] = read_only if skip_validation is not None: body['skip_validation'] = skip_validation @@ -5613,6 +5712,7 @@ def update(self, delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None, delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope] = None, name: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, privilege_model_version: Optional[str] = None, storage_root_credential_id: Optional[str] = None) -> MetastoreInfo: @@ -5632,6 +5732,8 @@ def update(self, The scope of Delta Sharing enabled for the metastore. :param name: str (optional) The user-specified name of the metastore. + :param new_name: str (optional) + New name for the metastore. :param owner: str (optional) The owner of the metastore. :param privilege_model_version: str (optional) @@ -5649,6 +5751,7 @@ def update(self, 'delta_sharing_recipient_token_lifetime_in_seconds'] = delta_sharing_recipient_token_lifetime_in_seconds if delta_sharing_scope is not None: body['delta_sharing_scope'] = delta_sharing_scope.value if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if privilege_model_version is not None: body['privilege_model_version'] = privilege_model_version if storage_root_credential_id is not None: @@ -6056,6 +6159,7 @@ def update(self, *, comment: Optional[str] = None, name: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None) -> RegisteredModelInfo: """Update a Registered Model. @@ -6073,6 +6177,8 @@ def update(self, The comment attached to the registered model :param name: str (optional) The name of the registered model + :param new_name: str (optional) + New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model @@ -6081,6 +6187,7 @@ def update(self, body = {} if comment is not None: body['comment'] = comment if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', f'/api/2.1/unity-catalog/models/{full_name}', body=body, headers=headers) @@ -6189,6 +6296,7 @@ def update(self, comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, name: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, properties: Optional[Dict[str, str]] = None) -> SchemaInfo: """Update a schema. @@ -6206,6 +6314,8 @@ def update(self, Whether predictive optimization should be enabled for this object and objects under it. :param name: str (optional) Name of schema, relative to parent catalog. + :param new_name: str (optional) + New name for the schema. :param owner: str (optional) Username of current owner of schema. :param properties: Dict[str,str] (optional) @@ -6218,6 +6328,7 @@ def update(self, if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if properties is not None: body['properties'] = properties headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -6246,6 +6357,7 @@ def create(self, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[Any] = None, read_only: Optional[bool] = None, @@ -6262,6 +6374,8 @@ def create(self, The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) The Azure service principal configuration. + :param cloudflare_api_token: :class:`CloudflareApiToken` (optional) + The Cloudflare API token configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: Any (optional) @@ -6279,6 +6393,7 @@ def create(self, body['azure_managed_identity'] = azure_managed_identity.as_dict() if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() if comment is not None: body['comment'] = comment if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account @@ -6349,9 +6464,11 @@ def update(self, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[Any] = None, force: Optional[bool] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> StorageCredentialInfo: @@ -6360,19 +6477,23 @@ def update(self, Updates a storage credential on the metastore. :param name: str - The credential name. The name must be unique within the metastore. + Name of the storage credential. :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) The Azure service principal configuration. + :param cloudflare_api_token: :class:`CloudflareApiToken` (optional) + The Cloudflare API token configuration. :param comment: str (optional) Comment associated with the credential. :param databricks_gcp_service_account: Any (optional) The managed GCP service account configuration. :param force: bool (optional) Force update even if there are dependent external locations or external tables. + :param new_name: str (optional) + New name for the storage credential. :param owner: str (optional) Username of current owner of credential. :param read_only: bool (optional) @@ -6388,10 +6509,12 @@ def update(self, body['azure_managed_identity'] = azure_managed_identity.as_dict() if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() if comment is not None: body['comment'] = comment if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account if force is not None: body['force'] = force + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if read_only is not None: body['read_only'] = read_only if skip_validation is not None: body['skip_validation'] = skip_validation @@ -6407,6 +6530,7 @@ def validate(self, aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, databricks_gcp_service_account: Optional[Any] = None, external_location_name: Optional[str] = None, read_only: Optional[bool] = None, @@ -6430,6 +6554,8 @@ def validate(self, The Azure managed identity configuration. :param azure_service_principal: :class:`AzureServicePrincipal` (optional) The Azure service principal configuration. + :param cloudflare_api_token: :class:`CloudflareApiToken` (optional) + The Cloudflare API token configuration. :param databricks_gcp_service_account: Any (optional) The Databricks created GCP service account configuration. :param external_location_name: str (optional) @@ -6449,6 +6575,7 @@ def validate(self, body['azure_managed_identity'] = azure_managed_identity.as_dict() if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account if external_location_name is not None: body['external_location_name'] = external_location_name @@ -6911,6 +7038,7 @@ def update(self, *, comment: Optional[str] = None, name: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None) -> VolumeInfo: """Update a Volume. @@ -6928,6 +7056,8 @@ def update(self, The comment attached to the volume :param name: str (optional) The name of the volume + :param new_name: str (optional) + New name for the volume. :param owner: str (optional) The identifier of the user who owns the volume @@ -6936,6 +7066,7 @@ def update(self, body = {} if comment is not None: body['comment'] = comment if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 8e3d9ec6e..25cc245f2 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -8,7 +8,7 @@ from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Callable, Dict, Iterator, List, Optional from ..errors import OperationFailed from ._internal import Wait, _enum, _from_dict, _repeated_dict @@ -76,9 +76,6 @@ class BaseRun: cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - continuous: Optional[Continuous] = None - """The continuous trigger that triggered this run.""" - creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" @@ -186,7 +183,6 @@ def as_dict(self) -> dict: if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() - if self.continuous: body['continuous'] = self.continuous.as_dict() if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration @@ -219,7 +215,6 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun: cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), - continuous=_from_dict(d, 'continuous', Continuous), creator_user_name=d.get('creator_user_name', None), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), @@ -2325,9 +2320,6 @@ class Run: cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - continuous: Optional[Continuous] = None - """The continuous trigger that triggered this run.""" - creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" @@ -2438,7 +2430,6 @@ def as_dict(self) -> dict: if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() - if self.continuous: body['continuous'] = self.continuous.as_dict() if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration @@ -2472,7 +2463,6 @@ def from_dict(cls, d: Dict[str, any]) -> Run: cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), - continuous=_from_dict(d, 'continuous', Continuous), creator_user_name=d.get('creator_user_name', None), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), @@ -2582,7 +2572,7 @@ class RunJobTask: job_id: int """ID of the job to trigger.""" - job_parameters: Optional[Any] = None + job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used to trigger the job.""" def as_dict(self) -> dict: @@ -3881,6 +3871,14 @@ class SubmitTask: python_wheel_task: Optional[PythonWheelTask] = None """If python_wheel_task, indicates that this job must execute a PythonWheel.""" + run_if: Optional[RunIf] = None + """An optional value indicating the condition that determines whether the task should be run once + its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See + :method:jobs/create for a list of possible values.""" + + run_job_task: Optional[RunJobTask] = None + """If run_job_task, indicates that this job must execute another job.""" + spark_jar_task: Optional[SparkJarTask] = None """If spark_jar_task, indicates that this task must run a JAR.""" @@ -3929,6 +3927,8 @@ def as_dict(self) -> dict: if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() + if self.run_if is not None: body['run_if'] = self.run_if.value + if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() @@ -3952,6 +3952,8 @@ def from_dict(cls, d: Dict[str, any]) -> SubmitTask: notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), + run_if=_enum(d, 'run_if', RunIf), + run_job_task=_from_dict(d, 'run_job_task', RunJobTask), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 11e8102e2..f002a5257 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -343,27 +343,24 @@ class CreateWorkspaceRequest: characters. The key can be of maximum length of 127 characters, and cannot be empty.""" deployment_name: Optional[str] = None - """The deployment name defines part of the subdomain for the workspace. The workspace URL for web - application and REST APIs is `.cloud.databricks.com`. For example, if - the deployment name is `abcsales`, your workspace URL will be + """The deployment name defines part of the subdomain for the workspace. The workspace URL for the + web application and REST APIs is `.cloud.databricks.com`. For + example, if the deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - If your account has a non-empty deployment name prefix at workspace creation time, the workspace - deployment name changes so that the beginning has the account prefix and a hyphen. For example, - if your account's deployment prefix is `acme` and the workspace deployment name is - `workspace-1`, the `deployment_name` field becomes `acme-workspace-1` and that is the value that - is returned in JSON responses for the `deployment_name` field. The workspace URL is - `acme-workspace-1.cloud.databricks.com`. + To set this value, you must have a deployment name prefix. Contact your Databricks account team + to add an account deployment name prefix to your account. - If your account has a non-empty deployment name prefix and you set `deployment_name` to the - reserved keyword `EMPTY`, `deployment_name` is just the account prefix only. For example, if - your account's deployment prefix is `acme` and the workspace deployment name is `EMPTY`, - `deployment_name` becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. + Workspace deployment names follow the account prefix and a hyphen. For example, if your + account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the + JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL + would be `acme-workspace-1.cloud.databricks.com`. - Contact your Databricks representatives to add an account deployment name prefix to your - account. If you do not have a deployment name prefix, the special deployment name value `EMPTY` - is invalid. + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the + deployment name to only include the deployment prefix. For example, if your account's deployment + prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes + `acme` only and the workspace URL is `acme.cloud.databricks.com`. This value must be unique across all non-deleted deployments across all AWS regions. @@ -415,7 +412,7 @@ class CreateWorkspaceRequest: specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" @@ -1426,7 +1423,7 @@ class Workspace: specify this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" @@ -1925,7 +1922,7 @@ def delete(self, private_access_settings_id: str): Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html @@ -1948,7 +1945,7 @@ def get(self, private_access_settings_id: str) -> PrivateAccessSettings: Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html @@ -2345,26 +2342,24 @@ def create(self, of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty. :param deployment_name: str (optional) - The deployment name defines part of the subdomain for the workspace. The workspace URL for web + The deployment name defines part of the subdomain for the workspace. The workspace URL for the web application and REST APIs is `.cloud.databricks.com`. For example, if the deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - If your account has a non-empty deployment name prefix at workspace creation time, the workspace - deployment name changes so that the beginning has the account prefix and a hyphen. For example, if - your account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the - `deployment_name` field becomes `acme-workspace-1` and that is the value that is returned in JSON - responses for the `deployment_name` field. The workspace URL is - `acme-workspace-1.cloud.databricks.com`. + To set this value, you must have a deployment name prefix. Contact your Databricks account team to + add an account deployment name prefix to your account. - If your account has a non-empty deployment name prefix and you set `deployment_name` to the reserved - keyword `EMPTY`, `deployment_name` is just the account prefix only. For example, if your account's - deployment prefix is `acme` and the workspace deployment name is `EMPTY`, `deployment_name` becomes - `acme` only and the workspace URL is `acme.cloud.databricks.com`. + Workspace deployment names follow the account prefix and a hyphen. For example, if your account's + deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response + for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be + `acme-workspace-1.cloud.databricks.com`. - Contact your Databricks representatives to add an account deployment name prefix to your account. If - you do not have a deployment name prefix, the special deployment name value `EMPTY` is invalid. + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment + name to only include the deployment prefix. For example, if your account's deployment prefix is + `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and + the workspace URL is `acme.cloud.databricks.com`. This value must be unique across all non-deleted deployments across all AWS regions. @@ -2407,7 +2402,7 @@ def create(self, specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - Before configuring PrivateLink, read the [Databricks article about PrivateLink]. + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 1d1d47102..9a3347d77 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -18,6 +18,40 @@ # all definitions in this file are in alphabetical order +@dataclass +class Ai21LabsConfig: + ai21labs_api_key: str + """The Databricks secret key reference for an AI21Labs API key.""" + + def as_dict(self) -> dict: + """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig: + """Deserializes the Ai21LabsConfig from a dictionary.""" + return cls(ai21labs_api_key=d.get('ai21labs_api_key', None)) + + +@dataclass +class AnthropicConfig: + anthropic_api_key: str + """The Databricks secret key reference for an Anthropic API key.""" + + def as_dict(self) -> dict: + """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: + """Deserializes the AnthropicConfig from a dictionary.""" + return cls(anthropic_api_key=d.get('anthropic_api_key', None)) + + @dataclass class AppEvents: event_name: Optional[str] = None @@ -121,10 +155,143 @@ def from_dict(cls, d: Dict[str, any]) -> AppServiceStatus: template=d.get('template', None)) +@dataclass +class AutoCaptureConfigInput: + catalog_name: Optional[str] = None + """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if + it was already set.""" + + enabled: Optional[bool] = None + """If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, + you cannot enable again.""" + + schema_name: Optional[str] = None + """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if + it was already set.""" + + table_name_prefix: Optional[str] = None + """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if + it was already set.""" + + def as_dict(self) -> dict: + """Serializes the AutoCaptureConfigInput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput: + """Deserializes the AutoCaptureConfigInput from a dictionary.""" + return cls(catalog_name=d.get('catalog_name', None), + enabled=d.get('enabled', None), + schema_name=d.get('schema_name', None), + table_name_prefix=d.get('table_name_prefix', None)) + + +@dataclass +class AutoCaptureConfigOutput: + catalog_name: Optional[str] = None + """The name of the catalog in Unity Catalog.""" + + enabled: Optional[bool] = None + """If inference tables are enabled or not.""" + + schema_name: Optional[str] = None + """The name of the schema in Unity Catalog.""" + + state: Optional[AutoCaptureState] = None + + table_name_prefix: Optional[str] = None + """The prefix of the table in Unity Catalog.""" + + def as_dict(self) -> dict: + """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.state: body['state'] = self.state.as_dict() + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigOutput: + """Deserializes the AutoCaptureConfigOutput from a dictionary.""" + return cls(catalog_name=d.get('catalog_name', None), + enabled=d.get('enabled', None), + schema_name=d.get('schema_name', None), + state=_from_dict(d, 'state', AutoCaptureState), + table_name_prefix=d.get('table_name_prefix', None)) + + +@dataclass +class AutoCaptureState: + payload_table: Optional[PayloadTable] = None + + def as_dict(self) -> dict: + """Serializes the AutoCaptureState into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.payload_table: body['payload_table'] = self.payload_table.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AutoCaptureState: + """Deserializes the AutoCaptureState from a dictionary.""" + return cls(payload_table=_from_dict(d, 'payload_table', PayloadTable)) + + +@dataclass +class AwsBedrockConfig: + aws_region: str + """The AWS region to use. Bedrock has to be enabled there.""" + + aws_access_key_id: str + """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with + Bedrock services.""" + + aws_secret_access_key: str + """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID, + with permissions to interact with Bedrock services.""" + + bedrock_provider: AwsBedrockConfigBedrockProvider + """The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, + Cohere, AI21Labs, Amazon.""" + + def as_dict(self) -> dict: + """Serializes the AwsBedrockConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AwsBedrockConfig: + """Deserializes the AwsBedrockConfig from a dictionary.""" + return cls(aws_access_key_id=d.get('aws_access_key_id', None), + aws_region=d.get('aws_region', None), + aws_secret_access_key=d.get('aws_secret_access_key', None), + bedrock_provider=_enum(d, 'bedrock_provider', AwsBedrockConfigBedrockProvider)) + + +class AwsBedrockConfigBedrockProvider(Enum): + """The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, + Cohere, AI21Labs, Amazon.""" + + AI21LABS = 'ai21labs' + AMAZON = 'amazon' + ANTHROPIC = 'anthropic' + COHERE = 'cohere' + + @dataclass class BuildLogsResponse: logs: str - """The logs associated with building the served model's environment.""" + """The logs associated with building the served entity's environment.""" def as_dict(self) -> dict: """Serializes the BuildLogsResponse into a dictionary suitable for use as a JSON request body.""" @@ -138,6 +305,52 @@ def from_dict(cls, d: Dict[str, any]) -> BuildLogsResponse: return cls(logs=d.get('logs', None)) +@dataclass +class ChatMessage: + content: Optional[str] = None + """The content of the message.""" + + role: Optional[ChatMessageRole] = None + """The role of the message. One of [system, user, assistant].""" + + def as_dict(self) -> dict: + """Serializes the ChatMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.role is not None: body['role'] = self.role.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ChatMessage: + """Deserializes the ChatMessage from a dictionary.""" + return cls(content=d.get('content', None), role=_enum(d, 'role', ChatMessageRole)) + + +class ChatMessageRole(Enum): + """The role of the message. One of [system, user, assistant].""" + + ASSISTANT = 'assistant' + SYSTEM = 'system' + USER = 'user' + + +@dataclass +class CohereConfig: + cohere_api_key: str + """The Databricks secret key reference for a Cohere API key.""" + + def as_dict(self) -> dict: + """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CohereConfig: + """Deserializes the CohereConfig from a dictionary.""" + return cls(cohere_api_key=d.get('cohere_api_key', None)) + + @dataclass class CreateServingEndpoint: name: str @@ -147,6 +360,10 @@ class CreateServingEndpoint: config: EndpointCoreConfigInput """The core config of the serving endpoint.""" + rate_limits: Optional[List[RateLimit]] = None + """Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model + endpoints are supported as of now.""" + tags: Optional[List[EndpointTag]] = None """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" @@ -155,6 +372,7 @@ def as_dict(self) -> dict: body = {} if self.config: body['config'] = self.config.as_dict() if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body @@ -163,9 +381,36 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint: """Deserializes the CreateServingEndpoint from a dictionary.""" return cls(config=_from_dict(d, 'config', EndpointCoreConfigInput), name=d.get('name', None), + rate_limits=_repeated_dict(d, 'rate_limits', RateLimit), tags=_repeated_dict(d, 'tags', EndpointTag)) +@dataclass +class DatabricksModelServingConfig: + databricks_api_token: str + """The Databricks secret key reference for a Databricks API token that corresponds to a user or + service principal with Can Query access to the model serving endpoint pointed to by this + external model.""" + + databricks_workspace_url: str + """The URL of the Databricks workspace containing the model serving endpoint pointed to by this + external model.""" + + def as_dict(self) -> dict: + """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_workspace_url is not None: + body['databricks_workspace_url'] = self.databricks_workspace_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig: + """Deserializes the DatabricksModelServingConfig from a dictionary.""" + return cls(databricks_api_token=d.get('databricks_api_token', None), + databricks_workspace_url=d.get('databricks_workspace_url', None)) + + @dataclass class DataframeSplitInput: columns: Optional[List[Any]] = None @@ -266,22 +511,64 @@ class DeploymentStatusState(Enum): SUCCESS = 'SUCCESS' +@dataclass +class EmbeddingsV1ResponseEmbeddingElement: + embedding: Optional[List[float]] = None + + index: Optional[int] = None + """The index of the embedding in the response.""" + + object: Optional[EmbeddingsV1ResponseEmbeddingElementObject] = None + """This will always be 'embedding'.""" + + def as_dict(self) -> dict: + """Serializes the EmbeddingsV1ResponseEmbeddingElement into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.embedding: body['embedding'] = [v for v in self.embedding] + if self.index is not None: body['index'] = self.index + if self.object is not None: body['object'] = self.object.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EmbeddingsV1ResponseEmbeddingElement: + """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary.""" + return cls(embedding=d.get('embedding', None), + index=d.get('index', None), + object=_enum(d, 'object', EmbeddingsV1ResponseEmbeddingElementObject)) + + +class EmbeddingsV1ResponseEmbeddingElementObject(Enum): + """This will always be 'embedding'.""" + + EMBEDDING = 'embedding' + + @dataclass class EndpointCoreConfigInput: - served_models: List[ServedModelInput] - """A list of served models for the endpoint to serve. A serving endpoint can have up to 10 served - models.""" + served_entities: List[ServedEntityInput] + """A list of served entities for the endpoint to serve. A serving endpoint can have up to 10 served + entities.""" + + auto_capture_config: Optional[AutoCaptureConfigInput] = None + """Configuration for Inference Tables which automatically logs requests and responses to Unity + Catalog.""" name: Optional[str] = None """The name of the serving endpoint to update. This field is required.""" + served_models: Optional[List[ServedModelInput]] = None + """(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A + serving endpoint can have up to 10 served models.""" + traffic_config: Optional[TrafficConfig] = None """The traffic config defining how invocations to the serving endpoint should be routed.""" def as_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body.""" body = {} + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() if self.name is not None: body['name'] = self.name + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body @@ -289,18 +576,28 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput: """Deserializes the EndpointCoreConfigInput from a dictionary.""" - return cls(name=d.get('name', None), + return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigInput), + name=d.get('name', None), + served_entities=_repeated_dict(d, 'served_entities', ServedEntityInput), served_models=_repeated_dict(d, 'served_models', ServedModelInput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) @dataclass class EndpointCoreConfigOutput: + auto_capture_config: Optional[AutoCaptureConfigOutput] = None + """Configuration for Inference Tables which automatically logs requests and responses to Unity + Catalog.""" + config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" + served_entities: Optional[List[ServedEntityOutput]] = None + """The list of served entities under the serving endpoint config.""" + served_models: Optional[List[ServedModelOutput]] = None - """The list of served models under the serving endpoint config.""" + """(Deprecated, use served_entities instead) The list of served models under the serving endpoint + config.""" traffic_config: Optional[TrafficConfig] = None """The traffic configuration associated with the serving endpoint config.""" @@ -308,7 +605,9 @@ class EndpointCoreConfigOutput: def as_dict(self) -> dict: """Serializes the EndpointCoreConfigOutput into a dictionary suitable for use as a JSON request body.""" body = {} + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body @@ -316,26 +615,34 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigOutput: """Deserializes the EndpointCoreConfigOutput from a dictionary.""" - return cls(config_version=d.get('config_version', None), + return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput), + config_version=d.get('config_version', None), + served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) @dataclass class EndpointCoreConfigSummary: + served_entities: Optional[List[ServedEntitySpec]] = None + """The list of served entities under the serving endpoint config.""" + served_models: Optional[List[ServedModelSpec]] = None - """The list of served models under the serving endpoint config.""" + """(Deprecated, use served_entities instead) The list of served models under the serving endpoint + config.""" def as_dict(self) -> dict: """Serializes the EndpointCoreConfigSummary into a dictionary suitable for use as a JSON request body.""" body = {} + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary: """Deserializes the EndpointCoreConfigSummary from a dictionary.""" - return cls(served_models=_repeated_dict(d, 'served_models', ServedModelSpec)) + return cls(served_entities=_repeated_dict(d, 'served_entities', ServedEntitySpec), + served_models=_repeated_dict(d, 'served_models', ServedModelSpec)) @dataclass @@ -343,8 +650,12 @@ class EndpointPendingConfig: config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" + served_entities: Optional[List[ServedEntityOutput]] = None + """The list of served entities belonging to the last issued update to the serving endpoint.""" + served_models: Optional[List[ServedModelOutput]] = None - """The list of served models belonging to the last issued update to the serving endpoint.""" + """(Deprecated, use served_entities instead) The list of served models belonging to the last issued + update to the serving endpoint.""" start_time: Optional[int] = None """The timestamp when the update to the pending config started.""" @@ -356,6 +667,7 @@ def as_dict(self) -> dict: """Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] if self.start_time is not None: body['start_time'] = self.start_time if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() @@ -365,6 +677,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig: """Deserializes the EndpointPendingConfig from a dictionary.""" return cls(config_version=d.get('config_version', None), + served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), start_time=d.get('start_time', None), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) @@ -380,8 +693,8 @@ class EndpointState: ready: Optional[EndpointStateReady] = None """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is - READY if all of the served models in its active configuration are ready. If any of the actively - served models are in a non-ready state, the endpoint state will be NOT_READY.""" + READY if all of the served entities in its active configuration are ready. If any of the + actively served entities are in a non-ready state, the endpoint state will be NOT_READY.""" def as_dict(self) -> dict: """Serializes the EndpointState into a dictionary suitable for use as a JSON request body.""" @@ -410,8 +723,8 @@ class EndpointStateConfigUpdate(Enum): class EndpointStateReady(Enum): """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is - READY if all of the served models in its active configuration are ready. If any of the actively - served models are in a non-ready state, the endpoint state will be NOT_READY.""" + READY if all of the served entities in its active configuration are ready. If any of the + actively served entities are in a non-ready state, the endpoint state will be NOT_READY.""" NOT_READY = 'NOT_READY' READY = 'READY' @@ -438,6 +751,162 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag: return cls(key=d.get('key', None), value=d.get('value', None)) +@dataclass +class ExternalModel: + provider: ExternalModelProvider + """The name of the provider for the external model. Currently, the supported providers are + 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and + 'palm'.",""" + + name: str + """The name of the external model.""" + + task: str + """The task type of the external model.""" + + config: ExternalModelConfig + """The config for the external model, which must match the provider.""" + + def as_dict(self) -> dict: + """Serializes the ExternalModel into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.name is not None: body['name'] = self.name + if self.provider is not None: body['provider'] = self.provider.value + if self.task is not None: body['task'] = self.task + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExternalModel: + """Deserializes the ExternalModel from a dictionary.""" + return cls(config=_from_dict(d, 'config', ExternalModelConfig), + name=d.get('name', None), + provider=_enum(d, 'provider', ExternalModelProvider), + task=d.get('task', None)) + + +@dataclass +class ExternalModelConfig: + ai21labs_config: Optional[Ai21LabsConfig] = None + """AI21Labs Config""" + + anthropic_config: Optional[AnthropicConfig] = None + """Anthropic Config""" + + aws_bedrock_config: Optional[AwsBedrockConfig] = None + """AWS Bedrock Config""" + + cohere_config: Optional[CohereConfig] = None + """Cohere Config""" + + databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None + """Databricks Model Serving Config""" + + openai_config: Optional[OpenAiConfig] = None + """OpenAI Config""" + + palm_config: Optional[PaLmConfig] = None + """PaLM Config""" + + def as_dict(self) -> dict: + """Serializes the ExternalModelConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config.as_dict() + if self.anthropic_config: body['anthropic_config'] = self.anthropic_config.as_dict() + if self.aws_bedrock_config: body['aws_bedrock_config'] = self.aws_bedrock_config.as_dict() + if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict() + if self.databricks_model_serving_config: + body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict() + if self.openai_config: body['openai_config'] = self.openai_config.as_dict() + if self.palm_config: body['palm_config'] = self.palm_config.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExternalModelConfig: + """Deserializes the ExternalModelConfig from a dictionary.""" + return cls(ai21labs_config=_from_dict(d, 'ai21labs_config', Ai21LabsConfig), + anthropic_config=_from_dict(d, 'anthropic_config', AnthropicConfig), + aws_bedrock_config=_from_dict(d, 'aws_bedrock_config', AwsBedrockConfig), + cohere_config=_from_dict(d, 'cohere_config', CohereConfig), + databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config', + DatabricksModelServingConfig), + openai_config=_from_dict(d, 'openai_config', OpenAiConfig), + palm_config=_from_dict(d, 'palm_config', PaLmConfig)) + + +class ExternalModelProvider(Enum): + """The name of the provider for the external model. Currently, the supported providers are + 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and + 'palm'.",""" + + AI21LABS = 'ai21labs' + ANTHROPIC = 'anthropic' + AWS_BEDROCK = 'aws-bedrock' + COHERE = 'cohere' + DATABRICKS_MODEL_SERVING = 'databricks-model-serving' + OPENAI = 'openai' + PALM = 'palm' + + +@dataclass +class ExternalModelUsageElement: + completion_tokens: Optional[int] = None + """The number of tokens in the chat/completions response.""" + + prompt_tokens: Optional[int] = None + """The number of tokens in the prompt.""" + + total_tokens: Optional[int] = None + """The total number of tokens in the prompt and response.""" + + def as_dict(self) -> dict: + """Serializes the ExternalModelUsageElement into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens + if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens + if self.total_tokens is not None: body['total_tokens'] = self.total_tokens + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement: + """Deserializes the ExternalModelUsageElement from a dictionary.""" + return cls(completion_tokens=d.get('completion_tokens', None), + prompt_tokens=d.get('prompt_tokens', None), + total_tokens=d.get('total_tokens', None)) + + +@dataclass +class FoundationModel: + description: Optional[str] = None + """The description of the foundation model.""" + + display_name: Optional[str] = None + """The display name of the foundation model.""" + + docs: Optional[str] = None + """The URL to the documentation of the foundation model.""" + + name: Optional[str] = None + """The name of the foundation model.""" + + def as_dict(self) -> dict: + """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.docs is not None: body['docs'] = self.docs + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> FoundationModel: + """Deserializes the FoundationModel from a dictionary.""" + return cls(description=d.get('description', None), + display_name=d.get('display_name', None), + docs=d.get('docs', None), + name=d.get('name', None)) + + @dataclass class GetAppResponse: current_services: Optional[List[AppServiceStatus]] = None @@ -538,6 +1007,73 @@ def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse: return cls(endpoints=_repeated_dict(d, 'endpoints', ServingEndpoint)) +@dataclass +class OpenAiConfig: + openai_api_key: str + """The Databricks secret key reference for an OpenAI or Azure OpenAI API key.""" + + openai_api_base: Optional[str] = None + """This is the base URL for the OpenAI API (default: "https://api.openai.com/v1"). For Azure + OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by + Azure.""" + + openai_api_type: Optional[str] = None + """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field + is required, and adjust this parameter to represent the preferred security access validation + protocol. For access token validation, use azure. For authentication using Azure Active + Directory (Azure AD) use, azuread.""" + + openai_api_version: Optional[str] = None + """This is an optional field to specify the OpenAI API version. For Azure OpenAI, this field is + required, and is the version of the Azure OpenAI service to utilize, specified by a date.""" + + openai_deployment_name: Optional[str] = None + """This field is only required for Azure OpenAI and is the name of the deployment resource for the + Azure OpenAI service.""" + + openai_organization: Optional[str] = None + """This is an optional field to specify the organization in OpenAI or Azure OpenAI.""" + + def as_dict(self) -> dict: + """Serializes the OpenAiConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base + if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type + if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version + if self.openai_deployment_name is not None: + body['openai_deployment_name'] = self.openai_deployment_name + if self.openai_organization is not None: body['openai_organization'] = self.openai_organization + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: + """Deserializes the OpenAiConfig from a dictionary.""" + return cls(openai_api_base=d.get('openai_api_base', None), + openai_api_key=d.get('openai_api_key', None), + openai_api_type=d.get('openai_api_type', None), + openai_api_version=d.get('openai_api_version', None), + openai_deployment_name=d.get('openai_deployment_name', None), + openai_organization=d.get('openai_organization', None)) + + +@dataclass +class PaLmConfig: + palm_api_key: str + """The Databricks secret key reference for a PaLM API key.""" + + def as_dict(self) -> dict: + """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PaLmConfig: + """Deserializes the PaLmConfig from a dictionary.""" + return cls(palm_api_key=d.get('palm_api_key', None)) + + @dataclass class PatchServingEndpointTags: add_tags: Optional[List[EndpointTag]] = None @@ -565,6 +1101,50 @@ def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags: name=d.get('name', None)) +@dataclass +class PayloadTable: + name: Optional[str] = None + """The name of the payload table.""" + + status: Optional[str] = None + """The status of the payload table.""" + + status_message: Optional[str] = None + """The status message of the payload table.""" + + def as_dict(self) -> dict: + """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PayloadTable: + """Deserializes the PayloadTable from a dictionary.""" + return cls(name=d.get('name', None), + status=d.get('status', None), + status_message=d.get('status_message', None)) + + +@dataclass +class PutResponse: + rate_limits: Optional[List[RateLimit]] = None + """The list of endpoint rate limits.""" + + def as_dict(self) -> dict: + """Serializes the PutResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PutResponse: + """Deserializes the PutResponse from a dictionary.""" + return cls(rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) + + @dataclass class QueryEndpointInput: dataframe_records: Optional[List[Any]] = None @@ -573,23 +1153,75 @@ class QueryEndpointInput: dataframe_split: Optional[DataframeSplitInput] = None """Pandas Dataframe input in the split orientation.""" + extra_params: Optional[Dict[str, str]] = None + """The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & + foundation model__ serving endpoints. This is a map of strings and should only be used with + other external/foundation model query fields.""" + + input: Optional[Any] = None + """The input string (or array of strings) field used ONLY for __embeddings external & foundation + model__ serving endpoints and is the only field (along with extra_params if needed) used by + embeddings queries.""" + inputs: Optional[Any] = None """Tensor-based input in columnar format.""" instances: Optional[List[Any]] = None """Tensor-based input in row format.""" + max_tokens: Optional[int] = None + """The max tokens field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is an integer and should only be used with other chat/completions query + fields.""" + + messages: Optional[List[ChatMessage]] = None + """The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is + a map of strings and should only be used with other chat query fields.""" + + n: Optional[int] = None + """The n (number of candidates) field used ONLY for __completions__ and __chat external & + foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and + should only be used with other chat/completions query fields.""" + name: Optional[str] = None """The name of the serving endpoint. This field is required.""" + prompt: Optional[Any] = None + """The prompt string (or array of strings) field used ONLY for __completions external & foundation + model__ serving endpoints and should only be used with other completions query fields.""" + + stop: Optional[List[str]] = None + """The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is a list of strings and should only be used with other chat/completions + query fields.""" + + stream: Optional[bool] = None + """The stream field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a boolean defaulting to false and should only be used with other + chat/completions query fields.""" + + temperature: Optional[float] = None + """The temperature field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be + used with other chat/completions query fields.""" + def as_dict(self) -> dict: """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body.""" body = {} if self.dataframe_records: body['dataframe_records'] = [v for v in self.dataframe_records] if self.dataframe_split: body['dataframe_split'] = self.dataframe_split.as_dict() + if self.extra_params: body['extra_params'] = self.extra_params + if self.input: body['input'] = self.input if self.inputs: body['inputs'] = self.inputs if self.instances: body['instances'] = [v for v in self.instances] + if self.max_tokens is not None: body['max_tokens'] = self.max_tokens + if self.messages: body['messages'] = [v.as_dict() for v in self.messages] + if self.n is not None: body['n'] = self.n if self.name is not None: body['name'] = self.name + if self.prompt: body['prompt'] = self.prompt + if self.stop: body['stop'] = [v for v in self.stop] + if self.stream is not None: body['stream'] = self.stream + if self.temperature is not None: body['temperature'] = self.temperature return body @classmethod @@ -597,26 +1229,128 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEndpointInput: """Deserializes the QueryEndpointInput from a dictionary.""" return cls(dataframe_records=d.get('dataframe_records', None), dataframe_split=_from_dict(d, 'dataframe_split', DataframeSplitInput), + extra_params=d.get('extra_params', None), + input=d.get('input', None), inputs=d.get('inputs', None), instances=d.get('instances', None), - name=d.get('name', None)) + max_tokens=d.get('max_tokens', None), + messages=_repeated_dict(d, 'messages', ChatMessage), + n=d.get('n', None), + name=d.get('name', None), + prompt=d.get('prompt', None), + stop=d.get('stop', None), + stream=d.get('stream', None), + temperature=d.get('temperature', None)) @dataclass class QueryEndpointResponse: - predictions: List[Any] + choices: Optional[List[V1ResponseChoiceElement]] = None + """The list of choices returned by the __chat or completions external/foundation model__ serving + endpoint.""" + + created: Optional[int] = None + """The timestamp in seconds when the query was created in Unix time returned by a __completions or + chat external/foundation model__ serving endpoint.""" + + data: Optional[List[EmbeddingsV1ResponseEmbeddingElement]] = None + """The list of the embeddings returned by the __embeddings external/foundation model__ serving + endpoint.""" + + id: Optional[str] = None + """The ID of the query that may be returned by a __completions or chat external/foundation model__ + serving endpoint.""" + + model: Optional[str] = None + """The name of the __external/foundation model__ used for querying. This is the name of the model + that was specified in the endpoint config.""" + + object: Optional[QueryEndpointResponseObject] = None + """The type of object returned by the __external/foundation model__ serving endpoint, one of + [text_completion, chat.completion, list (of embeddings)].""" + + predictions: Optional[List[Any]] = None """The predictions returned by the serving endpoint.""" + usage: Optional[ExternalModelUsageElement] = None + """The usage object that may be returned by the __external/foundation model__ serving endpoint. + This contains information about the number of tokens used in the prompt and response.""" + def as_dict(self) -> dict: """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.choices: body['choices'] = [v.as_dict() for v in self.choices] + if self.created is not None: body['created'] = self.created + if self.data: body['data'] = [v.as_dict() for v in self.data] + if self.id is not None: body['id'] = self.id + if self.model is not None: body['model'] = self.model + if self.object is not None: body['object'] = self.object.value if self.predictions: body['predictions'] = [v for v in self.predictions] + if self.usage: body['usage'] = self.usage.as_dict() return body @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryEndpointResponse: """Deserializes the QueryEndpointResponse from a dictionary.""" - return cls(predictions=d.get('predictions', None)) + return cls(choices=_repeated_dict(d, 'choices', V1ResponseChoiceElement), + created=d.get('created', None), + data=_repeated_dict(d, 'data', EmbeddingsV1ResponseEmbeddingElement), + id=d.get('id', None), + model=d.get('model', None), + object=_enum(d, 'object', QueryEndpointResponseObject), + predictions=d.get('predictions', None), + usage=_from_dict(d, 'usage', ExternalModelUsageElement)) + + +class QueryEndpointResponseObject(Enum): + """The type of object returned by the __external/foundation model__ serving endpoint, one of + [text_completion, chat.completion, list (of embeddings)].""" + + CHAT_COMPLETION = 'chat.completion' + LIST = 'list' + TEXT_COMPLETION = 'text_completion' + + +@dataclass +class RateLimit: + calls: int + """Used to specify how many calls are allowed for a key within the renewal_period.""" + + renewal_period: RateLimitRenewalPeriod + """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.""" + + key: Optional[RateLimitKey] = None + """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are + supported, with 'endpoint' being the default if not specified.""" + + def as_dict(self) -> dict: + """Serializes the RateLimit into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key.value + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RateLimit: + """Deserializes the RateLimit from a dictionary.""" + return cls(calls=d.get('calls', None), + key=_enum(d, 'key', RateLimitKey), + renewal_period=_enum(d, 'renewal_period', RateLimitRenewalPeriod)) + + +class RateLimitKey(Enum): + """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are + supported, with 'endpoint' being the default if not specified.""" + + ENDPOINT = 'endpoint' + USER = 'user' + + +class RateLimitRenewalPeriod(Enum): + """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.""" + + MINUTE = 'minute' @dataclass @@ -642,6 +1376,230 @@ def from_dict(cls, d: Dict[str, any]) -> Route: traffic_percentage=d.get('traffic_percentage', None)) +@dataclass +class ServedEntityInput: + entity_name: Optional[str] = None + """The name of the entity to be served. The entity may be a model in the Databricks Model Registry, + a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC + object, the full name of the object should be given in the form of + __catalog_name__.__schema_name__.__model_name__.""" + + entity_version: Optional[str] = None + """The version of the model in Databricks Model Registry to be served or empty if the entity is a + FEATURE_SPEC.""" + + environment_vars: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified environment variable key-value pairs used + for serving this entity. Note: this is an experimental feature and subject to change. Example + entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": + "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" + + external_model: Optional[ExternalModel] = None + """The external model to be served. NOTE: Only one of external_model and (entity_name, + entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with + the latter set being used for custom model serving for a Databricks registered model. When an + external_model is present, the served entities list can only have one served_entity object. For + an existing endpoint with external_model, it can not be updated to an endpoint without + external_model. If the endpoint is created without external_model, users cannot update it to add + external_model later.""" + + instance_profile_arn: Optional[str] = None + """ARN of the instance profile that the served entity uses to access AWS resources.""" + + name: Optional[str] = None + """The name of a served entity. It must be unique across an endpoint. A served entity name can + consist of alphanumeric characters, dashes, and underscores. If not specified for an external + model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if + not specified for other entities, it defaults to -.""" + + scale_to_zero_enabled: Optional[bool] = None + """Whether the compute resources for the served entity should scale down to zero.""" + + workload_size: Optional[str] = None + """The workload size of the served entity. The workload size corresponds to a range of provisioned + concurrency that the compute autoscales between. A single unit of provisioned concurrency can + process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If + scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size + is 0.""" + + workload_type: Optional[str] = None + """The workload type of the served entity. The workload type selects which type of compute to use + in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + + def as_dict(self) -> dict: + """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput: + """Deserializes the ServedEntityInput from a dictionary.""" + return cls(entity_name=d.get('entity_name', None), + entity_version=d.get('entity_version', None), + environment_vars=d.get('environment_vars', None), + external_model=_from_dict(d, 'external_model', ExternalModel), + instance_profile_arn=d.get('instance_profile_arn', None), + name=d.get('name', None), + scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), + workload_size=d.get('workload_size', None), + workload_type=d.get('workload_type', None)) + + +@dataclass +class ServedEntityOutput: + creation_timestamp: Optional[int] = None + """The creation timestamp of the served entity in Unix time.""" + + creator: Optional[str] = None + """The email of the user who created the served entity.""" + + entity_name: Optional[str] = None + """The name of the entity served. The entity may be a model in the Databricks Model Registry, a + model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC + object, the full name of the object is given in the form of + __catalog_name__.__schema_name__.__model_name__.""" + + entity_version: Optional[str] = None + """The version of the served entity in Databricks Model Registry or empty if the entity is a + FEATURE_SPEC.""" + + environment_vars: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified environment variable key-value pairs used + for serving this entity. Note: this is an experimental feature and subject to change. Example + entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": + "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" + + external_model: Optional[ExternalModel] = None + """The external model that is served. NOTE: Only one of external_model, foundation_model, and + (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is + returned based on the endpoint type.""" + + foundation_model: Optional[FoundationModel] = None + """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and + (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is + returned based on the endpoint type.""" + + instance_profile_arn: Optional[str] = None + """ARN of the instance profile that the served entity uses to access AWS resources.""" + + name: Optional[str] = None + """The name of the served entity.""" + + scale_to_zero_enabled: Optional[bool] = None + """Whether the compute resources for the served entity should scale down to zero.""" + + state: Optional[ServedModelState] = None + """Information corresponding to the state of the served entity.""" + + workload_size: Optional[str] = None + """The workload size of the served entity. The workload size corresponds to a range of provisioned + concurrency that the compute autoscales between. A single unit of provisioned concurrency can + process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), + "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If + scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size + will be 0.""" + + workload_type: Optional[str] = None + """The workload type of the served entity. The workload type selects which type of compute to use + in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + + def as_dict(self) -> dict: + """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.name is not None: body['name'] = self.name + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state.as_dict() + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput: + """Deserializes the ServedEntityOutput from a dictionary.""" + return cls(creation_timestamp=d.get('creation_timestamp', None), + creator=d.get('creator', None), + entity_name=d.get('entity_name', None), + entity_version=d.get('entity_version', None), + environment_vars=d.get('environment_vars', None), + external_model=_from_dict(d, 'external_model', ExternalModel), + foundation_model=_from_dict(d, 'foundation_model', FoundationModel), + instance_profile_arn=d.get('instance_profile_arn', None), + name=d.get('name', None), + scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), + state=_from_dict(d, 'state', ServedModelState), + workload_size=d.get('workload_size', None), + workload_type=d.get('workload_type', None)) + + +@dataclass +class ServedEntitySpec: + entity_name: Optional[str] = None + """The name of the entity served. The entity may be a model in the Databricks Model Registry, a + model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC + object, the full name of the object is given in the form of + __catalog_name__.__schema_name__.__model_name__.""" + + entity_version: Optional[str] = None + """The version of the served entity in Databricks Model Registry or empty if the entity is a + FEATURE_SPEC.""" + + external_model: Optional[ExternalModel] = None + """The external model that is served. NOTE: Only one of external_model, foundation_model, and + (entity_name, entity_version) is returned based on the endpoint type.""" + + foundation_model: Optional[FoundationModel] = None + """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and + (entity_name, entity_version) is returned based on the endpoint type.""" + + name: Optional[str] = None + """The name of the served entity.""" + + def as_dict(self) -> dict: + """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec: + """Deserializes the ServedEntitySpec from a dictionary.""" + return cls(entity_name=d.get('entity_name', None), + entity_version=d.get('entity_version', None), + external_model=_from_dict(d, 'external_model', ExternalModel), + foundation_model=_from_dict(d, 'foundation_model', FoundationModel), + name=d.get('name', None)) + + @dataclass class ServedModelInput: model_name: str @@ -651,7 +1609,7 @@ class ServedModelInput: model_version: str """The version of the model in Databricks Model Registry or Unity Catalog to be served.""" - workload_size: str + workload_size: ServedModelInputWorkloadSize """The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned @@ -676,11 +1634,13 @@ class ServedModelInput: will default to -. A served model name can consist of alphanumeric characters, dashes, and underscores.""" - workload_type: Optional[str] = None + workload_type: Optional[ServedModelInputWorkloadType] = None """The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU - acceleration is available by selecting workload types like GPU_SMALL and others. See - documentation for all options.""" + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body.""" @@ -691,8 +1651,8 @@ def as_dict(self) -> dict: if self.model_version is not None: body['model_version'] = self.model_version if self.name is not None: body['name'] = self.name if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type + if self.workload_size is not None: body['workload_size'] = self.workload_size.value + if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body @classmethod @@ -704,8 +1664,36 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput: model_version=d.get('model_version', None), name=d.get('name', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), - workload_size=d.get('workload_size', None), - workload_type=d.get('workload_type', None)) + workload_size=_enum(d, 'workload_size', ServedModelInputWorkloadSize), + workload_type=_enum(d, 'workload_type', ServedModelInputWorkloadType)) + + +class ServedModelInputWorkloadSize(Enum): + """The workload size of the served model. The workload size corresponds to a range of provisioned + concurrency that the compute will autoscale between. A single unit of provisioned concurrency + can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned + concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned + concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for + each workload size will be 0.""" + + LARGE = 'Large' + MEDIUM = 'Medium' + SMALL = 'Small' + + +class ServedModelInputWorkloadType(Enum): + """The workload type of the served model. The workload type selects which type of compute to use in + the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" + + CPU = 'CPU' + GPU_LARGE = 'GPU_LARGE' + GPU_MEDIUM = 'GPU_MEDIUM' + GPU_SMALL = 'GPU_SMALL' + MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' @dataclass @@ -752,8 +1740,10 @@ class ServedModelOutput: workload_type: Optional[str] = None """The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU - acceleration is available by selecting workload types like GPU_SMALL and others. See - documentation for all options.""" + acceleration is available by selecting workload types like GPU_SMALL and others. See the + available [GPU types]. + + [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" def as_dict(self) -> dict: """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body.""" @@ -818,18 +1808,18 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec: @dataclass class ServedModelState: deployment: Optional[ServedModelStateDeployment] = None - """The state of the served model deployment. DEPLOYMENT_CREATING indicates that the served model is - not ready yet because the deployment is still being created (i.e container image is building, + """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity + is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - served model was previously in a ready state but no longer is and is attempting to recover. - DEPLOYMENT_READY indicates that the served model is ready to receive traffic. DEPLOYMENT_FAILED - indicates that there was an error trying to bring up the served model (e.g container image build - failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED - indicates that the deployment was terminated likely due to a failure in bringing up another - served model under the same endpoint and config version.""" + served entity was previously in a ready state but no longer is and is attempting to recover. + DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED + indicates that there was an error trying to bring up the served entity (e.g container image + build failed, the model server failed to start due to a model loading error, etc.) + DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in + bringing up another served entity under the same endpoint and config version.""" deployment_state_message: Optional[str] = None - """More information about the state of the served model, if available.""" + """More information about the state of the served entity, if available.""" def as_dict(self) -> dict: """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body.""" @@ -847,21 +1837,21 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelState: class ServedModelStateDeployment(Enum): - """The state of the served model deployment. DEPLOYMENT_CREATING indicates that the served model is - not ready yet because the deployment is still being created (i.e container image is building, + """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity + is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the - served model was previously in a ready state but no longer is and is attempting to recover. - DEPLOYMENT_READY indicates that the served model is ready to receive traffic. DEPLOYMENT_FAILED - indicates that there was an error trying to bring up the served model (e.g container image build - failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED - indicates that the deployment was terminated likely due to a failure in bringing up another - served model under the same endpoint and config version.""" + served entity was previously in a ready state but no longer is and is attempting to recover. + DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED + indicates that there was an error trying to bring up the served entity (e.g container image + build failed, the model server failed to start due to a model loading error, etc.) + DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in + bringing up another served entity under the same endpoint and config version.""" - DEPLOYMENT_ABORTED = 'DEPLOYMENT_ABORTED' - DEPLOYMENT_CREATING = 'DEPLOYMENT_CREATING' - DEPLOYMENT_FAILED = 'DEPLOYMENT_FAILED' - DEPLOYMENT_READY = 'DEPLOYMENT_READY' - DEPLOYMENT_RECOVERING = 'DEPLOYMENT_RECOVERING' + ABORTED = 'DEPLOYMENT_ABORTED' + CREATING = 'DEPLOYMENT_CREATING' + FAILED = 'DEPLOYMENT_FAILED' + READY = 'DEPLOYMENT_READY' + RECOVERING = 'DEPLOYMENT_RECOVERING' @dataclass @@ -908,6 +1898,9 @@ class ServingEndpoint: tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" + task: Optional[str] = None + """The task type of the serving endpoint.""" + def as_dict(self) -> dict: """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} @@ -920,6 +1913,7 @@ def as_dict(self) -> dict: if self.name is not None: body['name'] = self.name if self.state: body['state'] = self.state.as_dict() if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.task is not None: body['task'] = self.task return body @classmethod @@ -932,7 +1926,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint: last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), state=_from_dict(d, 'state', EndpointState), - tags=_repeated_dict(d, 'tags', EndpointTag)) + tags=_repeated_dict(d, 'tags', EndpointTag), + task=d.get('task', None)) @dataclass @@ -1040,6 +2035,9 @@ class ServingEndpointDetailed: tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" + task: Optional[str] = None + """The task type of the serving endpoint.""" + def as_dict(self) -> dict: """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1054,6 +2052,7 @@ def as_dict(self) -> dict: if self.permission_level is not None: body['permission_level'] = self.permission_level.value if self.state: body['state'] = self.state.as_dict() if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.task is not None: body['task'] = self.task return body @classmethod @@ -1068,7 +2067,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed: pending_config=_from_dict(d, 'pending_config', EndpointPendingConfig), permission_level=_enum(d, 'permission_level', ServingEndpointDetailedPermissionLevel), state=_from_dict(d, 'state', EndpointState), - tags=_repeated_dict(d, 'tags', EndpointTag)) + tags=_repeated_dict(d, 'tags', EndpointTag), + task=d.get('task', None)) class ServingEndpointDetailedPermissionLevel(Enum): @@ -1185,7 +2185,7 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: @dataclass class TrafficConfig: routes: Optional[List[Route]] = None - """The list of routes that define traffic to each served model.""" + """The list of routes that define traffic to each served entity.""" def as_dict(self) -> dict: """Serializes the TrafficConfig into a dictionary suitable for use as a JSON request body.""" @@ -1199,6 +2199,43 @@ def from_dict(cls, d: Dict[str, any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, 'routes', Route)) +@dataclass +class V1ResponseChoiceElement: + finish_reason: Optional[str] = None + """The finish reason returned by the endpoint.""" + + index: Optional[int] = None + """The index of the choice in the __chat or completions__ response.""" + + logprobs: Optional[int] = None + """The logprobs returned only by the __completions__ endpoint.""" + + message: Optional[ChatMessage] = None + """The message response from the __chat__ endpoint.""" + + text: Optional[str] = None + """The text response from the __completions__ endpoint.""" + + def as_dict(self) -> dict: + """Serializes the V1ResponseChoiceElement into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.finish_reason is not None: body['finishReason'] = self.finish_reason + if self.index is not None: body['index'] = self.index + if self.logprobs is not None: body['logprobs'] = self.logprobs + if self.message: body['message'] = self.message.as_dict() + if self.text is not None: body['text'] = self.text + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement: + """Deserializes the V1ResponseChoiceElement from a dictionary.""" + return cls(finish_reason=d.get('finishReason', None), + index=d.get('index', None), + logprobs=d.get('logprobs', None), + message=_from_dict(d, 'message', ChatMessage), + text=d.get('text', None)) + + class AppsAPI: """Lakehouse Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -1315,9 +2352,10 @@ class ServingEndpointsAPI: Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model - Registry, called served models. A serving endpoint can have at most ten served models. You can configure - traffic settings to define how requests should be routed to your served models behind an endpoint. - Additionally, you can configure the scale of resources that should be applied to each served model.""" + Registry, called served entities. A serving endpoint can have at most ten served entities. You can + configure traffic settings to define how requests should be routed to your served entities behind an + endpoint. Additionally, you can configure the scale of resources that should be applied to each served + entity.""" def __init__(self, api_client): self._api = api_client @@ -1377,6 +2415,7 @@ def create(self, name: str, config: EndpointCoreConfigInput, *, + rate_limits: Optional[List[RateLimit]] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: """Create a new serving endpoint. @@ -1385,6 +2424,9 @@ def create(self, workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. :param config: :class:`EndpointCoreConfigInput` The core config of the serving endpoint. + :param rate_limits: List[:class:`RateLimit`] (optional) + Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model + endpoints are supported as of now. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. @@ -1395,6 +2437,7 @@ def create(self, body = {} if config is not None: body['config'] = config.as_dict() if name is not None: body['name'] = name + if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] if tags is not None: body['tags'] = [v.as_dict() for v in tags] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } op_response = self._api.do('POST', '/api/2.0/serving-endpoints', body=body, headers=headers) @@ -1407,9 +2450,11 @@ def create_and_wait( name: str, config: EndpointCoreConfigInput, *, + rate_limits: Optional[List[RateLimit]] = None, tags: Optional[List[EndpointTag]] = None, timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: - return self.create(config=config, name=name, tags=tags).result(timeout=timeout) + return self.create(config=config, name=name, rate_limits=rate_limits, + tags=tags).result(timeout=timeout) def delete(self, name: str): """Delete a serving endpoint. @@ -1543,13 +2588,44 @@ def patch(self, res = self._api.do('PATCH', f'/api/2.0/serving-endpoints/{name}/tags', body=body, headers=headers) return [EndpointTag.from_dict(v) for v in res] + def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: + """Update the rate limits of a serving endpoint. + + Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model + endpoints are supported as of now. + + :param name: str + The name of the serving endpoint whose rate limits are being updated. This field is required. + :param rate_limits: List[:class:`RateLimit`] (optional) + The list of endpoint rate limits. + + :returns: :class:`PutResponse` + """ + body = {} + if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + res = self._api.do('PUT', + f'/api/2.0/serving-endpoints/{name}/rate-limits', + body=body, + headers=headers) + return PutResponse.from_dict(res) + def query(self, name: str, *, dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, + extra_params: Optional[Dict[str, str]] = None, + input: Optional[Any] = None, inputs: Optional[Any] = None, - instances: Optional[List[Any]] = None) -> QueryEndpointResponse: + instances: Optional[List[Any]] = None, + max_tokens: Optional[int] = None, + messages: Optional[List[ChatMessage]] = None, + n: Optional[int] = None, + prompt: Optional[Any] = None, + stop: Optional[List[str]] = None, + stream: Optional[bool] = None, + temperature: Optional[float] = None) -> QueryEndpointResponse: """Query a serving endpoint with provided model input. :param name: str @@ -1558,18 +2634,60 @@ def query(self, Pandas Dataframe input in the records orientation. :param dataframe_split: :class:`DataframeSplitInput` (optional) Pandas Dataframe input in the split orientation. + :param extra_params: Dict[str,str] (optional) + The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & + foundation model__ serving endpoints. This is a map of strings and should only be used with other + external/foundation model query fields. + :param input: Any (optional) + The input string (or array of strings) field used ONLY for __embeddings external & foundation + model__ serving endpoints and is the only field (along with extra_params if needed) used by + embeddings queries. :param inputs: Any (optional) Tensor-based input in columnar format. :param instances: List[Any] (optional) Tensor-based input in row format. + :param max_tokens: int (optional) + The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is an integer and should only be used with other chat/completions query fields. + :param messages: List[:class:`ChatMessage`] (optional) + The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a + map of strings and should only be used with other chat query fields. + :param n: int (optional) + The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation + model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be + used with other chat/completions query fields. + :param prompt: Any (optional) + The prompt string (or array of strings) field used ONLY for __completions external & foundation + model__ serving endpoints and should only be used with other completions query fields. + :param stop: List[str] (optional) + The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is a list of strings and should only be used with other chat/completions + query fields. + :param stream: bool (optional) + The stream field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a boolean defaulting to false and should only be used with other chat/completions + query fields. + :param temperature: float (optional) + The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with + other chat/completions query fields. :returns: :class:`QueryEndpointResponse` """ body = {} if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() + if extra_params is not None: body['extra_params'] = extra_params + if input is not None: body['input'] = input if inputs is not None: body['inputs'] = inputs if instances is not None: body['instances'] = [v for v in instances] + if max_tokens is not None: body['max_tokens'] = max_tokens + if messages is not None: body['messages'] = [v.as_dict() for v in messages] + if n is not None: body['n'] = n + if prompt is not None: body['prompt'] = prompt + if stop is not None: body['stop'] = [v for v in stop] + if stream is not None: body['stream'] = stream + if temperature is not None: body['temperature'] = temperature headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/serving-endpoints/{name}/invocations', body=body, headers=headers) return QueryEndpointResponse.from_dict(res) @@ -1603,20 +2721,27 @@ def set_permissions( def update_config(self, name: str, - served_models: List[ServedModelInput], + served_entities: List[ServedEntityInput], *, + auto_capture_config: Optional[AutoCaptureConfigInput] = None, + served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]: """Update a serving endpoint with a new config. - Updates any combination of the serving endpoint's served models, the compute configuration of those - served models, and the endpoint's traffic config. An endpoint that already has an update in progress + Updates any combination of the serving endpoint's served entities, the compute configuration of those + served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. :param name: str The name of the serving endpoint to update. This field is required. - :param served_models: List[:class:`ServedModelInput`] - A list of served models for the endpoint to serve. A serving endpoint can have up to 10 served - models. + :param served_entities: List[:class:`ServedEntityInput`] + A list of served entities for the endpoint to serve. A serving endpoint can have up to 10 served + entities. + :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) + Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. + :param served_models: List[:class:`ServedModelInput`] (optional) + (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A + serving endpoint can have up to 10 served models. :param traffic_config: :class:`TrafficConfig` (optional) The traffic config defining how invocations to the serving endpoint should be routed. @@ -1625,6 +2750,8 @@ def update_config(self, See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} + if auto_capture_config is not None: body['auto_capture_config'] = auto_capture_config.as_dict() + if served_entities is not None: body['served_entities'] = [v.as_dict() for v in served_entities] if served_models is not None: body['served_models'] = [v.as_dict() for v in served_models] if traffic_config is not None: body['traffic_config'] = traffic_config.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -1639,11 +2766,16 @@ def update_config(self, def update_config_and_wait( self, name: str, - served_models: List[ServedModelInput], + served_entities: List[ServedEntityInput], *, + auto_capture_config: Optional[AutoCaptureConfigInput] = None, + served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None, timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: - return self.update_config(name=name, served_models=served_models, + return self.update_config(auto_capture_config=auto_capture_config, + name=name, + served_entities=served_entities, + served_models=served_models, traffic_config=traffic_config).result(timeout=timeout) def update_permissions( diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 34ade7a09..c626f0ae7 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -14,26 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AccountNetworkPolicyMessage: - serverless_internet_access_enabled: Optional[bool] = None - """Whether or not serverless UDF can access the internet. When false, access to the internet will - be blocked from serverless clusters. Trusted traffic required by clusters for basic - functionality will not be affected.""" - - def as_dict(self) -> dict: - """Serializes the AccountNetworkPolicyMessage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.serverless_internet_access_enabled is not None: - body['serverless_internet_access_enabled'] = self.serverless_internet_access_enabled - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AccountNetworkPolicyMessage: - """Deserializes the AccountNetworkPolicyMessage from a dictionary.""" - return cls(serverless_internet_access_enabled=d.get('serverless_internet_access_enabled', None)) - - @dataclass class CreateIpAccessList: label: str @@ -277,28 +257,6 @@ def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting: setting_name=d.get('setting_name', None)) -@dataclass -class DeleteAccountNetworkPolicyResponse: - etag: str - """etag used for versioning. The response is at least as fresh as the eTag provided. This is used - for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - overwriting each other. It is strongly suggested that systems make use of the etag in the read - -> update pattern to perform setting updates in order to avoid race conditions. That is, get an - etag from a GET request, and pass it with the PATCH request to identify the setting version you - are updating.""" - - def as_dict(self) -> dict: - """Serializes the DeleteAccountNetworkPolicyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.etag is not None: body['etag'] = self.etag - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteAccountNetworkPolicyResponse: - """Deserializes the DeleteAccountNetworkPolicyResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - @dataclass class DeleteDefaultWorkspaceNamespaceResponse: etag: str @@ -1572,94 +1530,6 @@ def update(self, headers=headers) -class AccountNetworkPolicyAPI: - """Network policy is a set of rules that defines what can be accessed from your Databricks network. E.g.: You - can choose to block your SQL UDF to access internet from your Databricks serverless clusters. - - There is only one instance of this setting per account. Since this setting has a default value, this - setting is present on all accounts even though it's never set on a given account. Deletion reverts the - value of the setting back to the default value.""" - - def __init__(self, api_client): - self._api = api_client - - def delete_account_network_policy(self, etag: str) -> DeleteAccountNetworkPolicyResponse: - """Delete Account Network Policy. - - Reverts back all the account network policies back to default. - - :param etag: str - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DeleteAccountNetworkPolicyResponse` - """ - - query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json', } - res = self._api.do( - 'DELETE', - f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', - query=query, - headers=headers) - return DeleteAccountNetworkPolicyResponse.from_dict(res) - - def read_account_network_policy(self, etag: str) -> AccountNetworkPolicyMessage: - """Get Account Network Policy. - - Gets the value of Account level Network Policy. - - :param etag: str - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`AccountNetworkPolicyMessage` - """ - - query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json', } - res = self._api.do( - 'GET', - f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', - query=query, - headers=headers) - return AccountNetworkPolicyMessage.from_dict(res) - - def update_account_network_policy( - self, - *, - allow_missing: Optional[bool] = None, - setting: Optional[AccountNetworkPolicyMessage] = None) -> AccountNetworkPolicyMessage: - """Update Account Network Policy. - - Updates the policy content of Account level Network Policy. - - :param allow_missing: bool (optional) - This should always be set to true for Settings RPCs. Added for AIP compliance. - :param setting: :class:`AccountNetworkPolicyMessage` (optional) - - :returns: :class:`AccountNetworkPolicyMessage` - """ - body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - res = self._api.do( - 'PATCH', - f'/api/2.0/accounts/{self._api.account_id}/settings/types/network_policy/names/default', - body=body, - headers=headers) - return AccountNetworkPolicyMessage.from_dict(res) - - class AccountSettingsAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index fb53a510f..bcc21cf5b 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1379,9 +1379,6 @@ class UpdateCleanRoom: comment: Optional[str] = None """User-provided free-form text description.""" - name: Optional[str] = None - """Name of the clean room.""" - name_arg: Optional[str] = None """The name of the clean room.""" @@ -1393,7 +1390,6 @@ def as_dict(self) -> dict: body = {} if self.catalog_updates: body['catalog_updates'] = [v.as_dict() for v in self.catalog_updates] if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name if self.name_arg is not None: body['name_arg'] = self.name_arg if self.owner is not None: body['owner'] = self.owner return body @@ -1403,7 +1399,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoom: """Deserializes the UpdateCleanRoom from a dictionary.""" return cls(catalog_updates=_repeated_dict(d, 'catalog_updates', CleanRoomCatalogUpdate), comment=d.get('comment', None), - name=d.get('name', None), name_arg=d.get('name_arg', None), owner=d.get('owner', None)) @@ -1414,7 +1409,10 @@ class UpdateProvider: """Description about the provider.""" name: Optional[str] = None - """The name of the Provider.""" + """Name of the provider.""" + + new_name: Optional[str] = None + """New name for the provider.""" owner: Optional[str] = None """Username of Provider owner.""" @@ -1427,6 +1425,7 @@ def as_dict(self) -> dict: body = {} if self.comment is not None: body['comment'] = self.comment if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body @@ -1436,6 +1435,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateProvider: """Deserializes the UpdateProvider from a dictionary.""" return cls(comment=d.get('comment', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), recipient_profile_str=d.get('recipient_profile_str', None)) @@ -1449,7 +1449,10 @@ class UpdateRecipient: """IP Access List""" name: Optional[str] = None - """Name of Recipient.""" + """Name of the recipient.""" + + new_name: Optional[str] = None + """New name for the recipient.""" owner: Optional[str] = None """Username of the recipient owner.""" @@ -1465,6 +1468,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() return body @@ -1475,6 +1479,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient: return cls(comment=d.get('comment', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs)) @@ -1485,7 +1490,10 @@ class UpdateShare: """User-provided free-form text description.""" name: Optional[str] = None - """Name of the share.""" + """The name of the share.""" + + new_name: Optional[str] = None + """New name for the share.""" owner: Optional[str] = None """Username of current owner of share.""" @@ -1498,6 +1506,7 @@ def as_dict(self) -> dict: body = {} if self.comment is not None: body['comment'] = self.comment if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name if self.owner is not None: body['owner'] = self.owner if self.updates: body['updates'] = [v.as_dict() for v in self.updates] return body @@ -1507,6 +1516,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateShare: """Deserializes the UpdateShare from a dictionary.""" return cls(comment=d.get('comment', None), name=d.get('name', None), + new_name=d.get('new_name', None), owner=d.get('owner', None), updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate)) @@ -1643,7 +1653,6 @@ def update(self, *, catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None, comment: Optional[str] = None, - name: Optional[str] = None, owner: Optional[str] = None) -> CleanRoomInfo: """Update a clean room. @@ -1667,8 +1676,6 @@ def update(self, Array of shared data object updates. :param comment: str (optional) User-provided free-form text description. - :param name: str (optional) - Name of the clean room. :param owner: str (optional) Username of current owner of clean room. @@ -1677,7 +1684,6 @@ def update(self, body = {} if catalog_updates is not None: body['catalog_updates'] = [v.as_dict() for v in catalog_updates] if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name if owner is not None: body['owner'] = owner headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', @@ -1800,6 +1806,7 @@ def update(self, name: str, *, comment: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, recipient_profile_str: Optional[str] = None) -> ProviderInfo: """Update a provider. @@ -1809,9 +1816,11 @@ def update(self, admin and the owner of the provider. :param name: str - The name of the Provider. + Name of the provider. :param comment: str (optional) Description about the provider. + :param new_name: str (optional) + New name for the provider. :param owner: str (optional) Username of Provider owner. :param recipient_profile_str: str (optional) @@ -1821,6 +1830,7 @@ def update(self, """ body = {} if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -2045,6 +2055,7 @@ def update(self, *, comment: Optional[str] = None, ip_access_list: Optional[IpAccessList] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None): """Update a share recipient. @@ -2054,11 +2065,13 @@ def update(self, owner of the recipient. :param name: str - Name of Recipient. + Name of the recipient. :param comment: str (optional) Description about the recipient. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List + :param new_name: str (optional) + New name for the recipient. :param owner: str (optional) Username of the recipient owner. :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional) @@ -2071,6 +2084,7 @@ def update(self, body = {} if comment is not None: body['comment'] = comment if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } @@ -2174,6 +2188,7 @@ def update(self, name: str, *, comment: Optional[str] = None, + new_name: Optional[str] = None, owner: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo: """Update a share. @@ -2193,9 +2208,11 @@ def update(self, Table removals through **update** do not require additional privileges. :param name: str - Name of the share. + The name of the share. :param comment: str (optional) User-provided free-form text description. + :param new_name: str (optional) + New name for the share. :param owner: str (optional) Username of current owner of share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) @@ -2205,6 +2222,7 @@ def update(self, """ body = {} if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner if updates is not None: body['updates'] = [v.as_dict() for v in updates] headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 5b4cb740e..2b275a33d 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -270,7 +270,8 @@ class BaseChunkInfo: within a manifest, and when fetching individual chunk data or links.""" byte_count: Optional[int] = None - """The number of bytes in the result chunk.""" + """The number of bytes in the result chunk. This field is not available when using `INLINE` + disposition.""" chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" @@ -1466,7 +1467,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse: @dataclass class ExternalLink: byte_count: Optional[int] = None - """The number of bytes in the result chunk.""" + """The number of bytes in the result chunk. This field is not available when using `INLINE` + disposition.""" chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" @@ -2218,6 +2220,9 @@ class QueryFilter: query_start_time_range: Optional[TimeRange] = None + statement_ids: Optional[List[str]] = None + """A list of statement IDs.""" + statuses: Optional[List[QueryStatus]] = None user_ids: Optional[List[int]] = None @@ -2230,6 +2235,7 @@ def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() + if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] if self.statuses: body['statuses'] = [v.value for v in self.statuses] if self.user_ids: body['user_ids'] = [v for v in self.user_ids] if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] @@ -2239,6 +2245,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), + statement_ids=d.get('statement_ids', None), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) @@ -2698,7 +2705,8 @@ class ResultData: only a single link is returned.)""" byte_count: Optional[int] = None - """The number of bytes in the result chunk.""" + """The number of bytes in the result chunk. This field is not available when using `INLINE` + disposition.""" chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index ef9199407..a842d05a7 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.14.0' +__version__ = '0.15.0'