From fa46e712de5eafa75a0357cb669dc29e8f5fca31 Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 6 Nov 2024 12:29:27 +0100 Subject: [PATCH 1/2] [Release] Release v0.35.0 ### New Features and Improvements * DatabricksConfig: Add clone() support ([#376](https://github.com/databricks/databricks-sdk-java/pull/376)). ### Internal Changes * Add test instructions for external contributors ([#370](https://github.com/databricks/databricks-sdk-java/pull/370)). * Always write message for manual test integration ([#374](https://github.com/databricks/databricks-sdk-java/pull/374)). * Automatically trigger integration tests on PR ([#369](https://github.com/databricks/databricks-sdk-java/pull/369)). * Move templates in the code generator ([#373](https://github.com/databricks/databricks-sdk-java/pull/373)). ### API Changes: * Added `workspaceClient.aibiDashboardEmbeddingAccessPolicy()` service and `workspaceClient.aibiDashboardEmbeddingApprovedDomains()` service. * Added `workspaceClient.credentials()` service. * Added `appDeployment` field for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. * Added `app` field for `com.databricks.sdk.service.apps.CreateAppRequest`. * Added `app` field for `com.databricks.sdk.service.apps.UpdateAppRequest`. * Added `table` field for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. * Added `azureAad` field for `com.databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`. * Added `fullName` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. * Added `dashboard` field for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. * Added `schedule` field for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. * Added `subscription` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. * Added `warehouseId` field for `com.databricks.sdk.service.dashboards.Schedule`. * Added `dashboard` field for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. * Added `schedule` field for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. * Added `only` field for `com.databricks.sdk.service.jobs.RunNow`. * Added `pageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. * Added `nextPageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.EditPipeline`. * Added `connectionName` field for `com.databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. * Added `isNoPublicIpEnabled` field for `com.databricks.sdk.service.provisioning.CreateWorkspaceRequest`. * Added `privateAccessSettingsId` field for `com.databricks.sdk.service.provisioning.UpdateWorkspaceRequest`. * Added `externalCustomerInfo` and `isNoPublicIpEnabled` fields for `com.databricks.sdk.service.provisioning.Workspace`. * Added `lastUsedDay` field for `com.databricks.sdk.service.settings.TokenInfo`. * Changed `create()` method for `workspaceClient.apps()` service with new required argument order. * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service to type `executeMessageQuery()` method for `workspaceClient.genie()` service. * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service . New request type is `com.databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` class. * Changed `create()`, `createSchedule()`, `createSubscription()` and `updateSchedule()` methods for `workspaceClient.lakeview()` service with new required argument order. * Removed `workspaceClient.cleanRooms()` service. * Removed `deploymentId`, `mode` and `sourceCodePath` fields for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. * Removed `description`, `name` and `resources` fields for `com.databricks.sdk.service.apps.CreateAppRequest`. * Removed `description` and `resources` fields for `com.databricks.sdk.service.apps.UpdateAppRequest`. * Removed `name` and `spec` fields for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. * Removed `displayName`, `parentPath`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. * Removed `cronSchedule`, `displayName` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. * Removed `subscriber` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. * Removed `displayName`, `etag`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. * Removed `cronSchedule`, `displayName`, `etag` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. * Removed `prevPageToken` field for `com.databricks.sdk.service.jobs.Run`. OpenAPI SHA: d15ea353ad7a0279b741428a8231b84f5eb28c94, Date: 2024-11-06 --- .codegen/_openapi_sha | 2 +- .gitattributes | 62 +++-- CHANGELOG.md | 61 +++++ databricks-sdk-java/pom.xml | 2 +- .../com/databricks/sdk/WorkspaceClient.java | 65 ++--- .../databricks/sdk/core/DatabricksConfig.java | 8 +- .../com/databricks/sdk/core/UserAgent.java | 2 +- .../databricks/sdk/service/apps/AppsAPI.java | 60 ++-- .../databricks/sdk/service/apps/AppsImpl.java | 6 +- .../sdk/service/apps/AppsService.java | 4 +- .../apps/CreateAppDeploymentRequest.java | 67 ++--- .../sdk/service/apps/CreateAppRequest.java | 57 +--- .../sdk/service/apps/UpdateAppRequest.java | 50 +--- .../sdk/service/catalog/AwsIamRole.java | 78 ++++++ .../catalog/AzureActiveDirectoryToken.java | 50 ++++ .../service/catalog/AzureManagedIdentity.java | 88 ++++++ .../sdk/service/catalog/ColumnTypeName.java | 1 + .../catalog/CreateCredentialRequest.java | 125 +++++++++ .../catalog/CreateFunctionSecurityType.java | 2 +- .../catalog/CreateOnlineTableRequest.java | 38 +-- .../sdk/service/catalog/CredentialInfo.java | 259 ++++++++++++++++++ .../service/catalog/CredentialPurpose.java | 10 + .../catalog/CredentialValidationResult.java | 58 ++++ .../sdk/service/catalog/CredentialsAPI.java | 138 ++++++++++ .../sdk/service/catalog/CredentialsImpl.java | 78 ++++++ .../service/catalog/CredentialsService.java | 85 ++++++ .../catalog/DeleteCredentialRequest.java | 60 ++++ .../catalog/DeleteCredentialResponse.java | 28 ++ .../service/catalog/ExternalLocationInfo.java | 5 +- .../catalog/FunctionInfoSecurityType.java | 2 +- ...emporaryServiceCredentialAzureOptions.java | 51 ++++ ...rateTemporaryServiceCredentialRequest.java | 60 ++++ ...erateTemporaryTableCredentialResponse.java | 20 ++ .../catalog/GetBindingsSecurableType.java | 3 + .../service/catalog/GetCredentialRequest.java | 42 +++ .../sdk/service/catalog/IsolationMode.java | 3 - .../catalog/ListCredentialsRequest.java | 84 ++++++ .../ListCredentialsResponse.java} | 30 +- .../sdk/service/catalog/OnlineTablesAPI.java | 59 +++- .../sdk/service/catalog/OnlineTablesImpl.java | 2 +- .../sdk/service/catalog/SecurableType.java | 3 + .../catalog/StorageCredentialInfo.java | 21 +- .../service/catalog/TemporaryCredentials.java | 84 ++++++ .../catalog/UpdateBindingsSecurableType.java | 3 + .../catalog/UpdateCredentialRequest.java | 177 ++++++++++++ .../catalog/UpdateExternalLocation.java | 5 +- .../catalog/UpdateStorageCredential.java | 5 +- .../catalog/ValidateCredentialRequest.java | 90 ++++++ .../catalog/ValidateCredentialResponse.java | 43 +++ .../catalog/ValidateCredentialResult.java | 13 + .../service/compute/ClusterPoliciesAPI.java | 5 +- .../compute/ClusterPoliciesService.java | 5 +- .../sdk/service/compute/ClustersAPI.java | 4 +- .../sdk/service/compute/ClustersService.java | 4 +- .../service/compute/CommandExecutionAPI.java | 72 ++--- .../sdk/service/compute/EditCluster.java | 2 +- .../sdk/service/compute/InstancePoolsAPI.java | 3 +- .../service/compute/InstancePoolsService.java | 3 +- .../dashboards/CreateDashboardRequest.java | 78 +----- .../dashboards/CreateScheduleRequest.java | 52 +--- .../dashboards/CreateSubscriptionRequest.java | 21 +- .../sdk/service/dashboards/GenieAPI.java | 4 +- ...a => GenieExecuteMessageQueryRequest.java} | 12 +- .../sdk/service/dashboards/GenieImpl.java | 2 +- .../sdk/service/dashboards/GenieService.java | 2 +- .../sdk/service/dashboards/LakeviewAPI.java | 24 +- .../sdk/service/dashboards/LakeviewImpl.java | 10 +- .../service/dashboards/MessageErrorType.java | 1 + .../sdk/service/dashboards/Schedule.java | 20 +- .../dashboards/UpdateDashboardRequest.java | 86 ++---- .../dashboards/UpdateScheduleRequest.java | 72 +---- .../sdk/service/iam/PermissionsAPI.java | 5 +- .../sdk/service/iam/PermissionsService.java | 5 +- .../databricks/sdk/service/iam/UsersAPI.java | 4 +- .../sdk/service/iam/UsersService.java | 4 +- .../sdk/service/jobs/GetRunRequest.java | 4 +- .../databricks/sdk/service/jobs/JobsAPI.java | 4 +- .../sdk/service/jobs/JobsService.java | 4 +- .../sdk/service/jobs/RepairRun.java | 5 +- .../com/databricks/sdk/service/jobs/Run.java | 16 -- .../sdk/service/jobs/RunJobTask.java | 5 +- .../databricks/sdk/service/jobs/RunNow.java | 24 +- .../sdk/service/jobs/RunParameters.java | 5 +- .../databricks/sdk/service/jobs/RunTask.java | 42 +-- .../sdk/service/jobs/SubmitTask.java | 42 +-- .../com/databricks/sdk/service/jobs/Task.java | 42 +-- .../sdk/service/marketplace/AssetType.java | 1 + .../sdk/service/ml/ExperimentsAPI.java | 3 +- .../sdk/service/ml/ExperimentsService.java | 3 +- .../sdk/service/ml/ModelRegistryAPI.java | 5 +- .../sdk/service/ml/ModelRegistryService.java | 5 +- .../ListServicePrincipalSecretsRequest.java | 29 +- .../ListServicePrincipalSecretsResponse.java | 19 +- .../oauth2/ServicePrincipalSecretsAPI.java | 11 +- .../sdk/service/pipelines/CreatePipeline.java | 18 +- .../sdk/service/pipelines/EditPipeline.java | 18 +- .../service/pipelines/IngestionConfig.java | 6 +- .../IngestionGatewayPipelineDefinition.java | 28 +- .../IngestionPipelineDefinition.java | 8 +- .../sdk/service/pipelines/PipelineSpec.java | 18 +- .../sdk/service/pipelines/PipelinesAPI.java | 24 +- .../service/pipelines/PipelinesService.java | 4 +- .../sdk/service/pipelines/RestartWindow.java | 84 ++++++ .../pipelines/RestartWindowDaysOfWeek.java | 20 ++ .../provisioning/CreateWorkspaceRequest.java | 16 ++ .../provisioning/ExternalCustomerInfo.java | 74 +++++ .../provisioning/UpdateWorkspaceRequest.java | 19 ++ .../sdk/service/provisioning/Workspace.java | 35 +++ .../service/serving/ServingEndpointsAPI.java | 5 +- .../serving/ServingEndpointsService.java | 5 +- .../AibiDashboardEmbeddingAccessPolicy.java | 45 +++ ...AibiDashboardEmbeddingAccessPolicyAPI.java | 64 +++++ ...EmbeddingAccessPolicyAccessPolicyType.java | 12 + ...ibiDashboardEmbeddingAccessPolicyImpl.java | 36 +++ ...DashboardEmbeddingAccessPolicyService.java | 35 +++ ...DashboardEmbeddingAccessPolicySetting.java | 88 ++++++ ...AibiDashboardEmbeddingApprovedDomains.java | 46 ++++ ...iDashboardEmbeddingApprovedDomainsAPI.java | 66 +++++ ...DashboardEmbeddingApprovedDomainsImpl.java | 39 +++ ...hboardEmbeddingApprovedDomainsService.java | 34 +++ ...hboardEmbeddingApprovedDomainsSetting.java | 89 ++++++ ...rdEmbeddingAccessPolicySettingRequest.java | 55 ++++ ...mbeddingApprovedDomainsSettingRequest.java | 55 ++++ .../sdk/service/settings/SettingsAPI.java | 22 ++ .../sdk/service/settings/TokenInfo.java | 16 ++ .../service/settings/TokenManagementAPI.java | 4 +- .../settings/TokenManagementService.java | 4 +- ...rdEmbeddingAccessPolicySettingRequest.java | 82 ++++++ ...mbeddingApprovedDomainsSettingRequest.java | 82 ++++++ .../service/sharing/CentralCleanRoomInfo.java | 109 -------- .../service/sharing/CleanRoomAssetInfo.java | 104 ------- .../sdk/service/sharing/CleanRoomCatalog.java | 75 ----- .../sharing/CleanRoomCatalogUpdate.java | 58 ---- .../sharing/CleanRoomCollaboratorInfo.java | 65 ----- .../sdk/service/sharing/CleanRoomInfo.java | 174 ------------ .../sharing/CleanRoomNotebookInfo.java | 59 ---- .../service/sharing/CleanRoomTableInfo.java | 105 ------- .../sdk/service/sharing/CleanRoomsAPI.java | 125 --------- .../sdk/service/sharing/CleanRoomsImpl.java | 59 ---- .../service/sharing/CleanRoomsService.java | 71 ----- .../sdk/service/sharing/ColumnInfo.java | 221 --------------- .../sdk/service/sharing/ColumnMask.java | 64 ----- .../sdk/service/sharing/ColumnTypeName.java | 31 --- .../sdk/service/sharing/CreateCleanRoom.java | 74 ----- .../sharing/DeleteCleanRoomRequest.java | 42 --- .../service/sharing/GetCleanRoomRequest.java | 61 ----- .../sharing/ListCleanRoomsRequest.java | 68 ----- .../sdk/service/sharing/UpdateCleanRoom.java | 90 ------ .../sdk/service/sql/ChannelName.java | 2 +- .../service/sql/StatementExecutionAPI.java | 10 +- .../sql/StatementExecutionService.java | 10 +- .../sdk/service/sql/WarehousesAPI.java | 3 +- .../sdk/service/sql/WarehousesService.java | 3 +- .../sdk/service/workspace/ReposAPI.java | 4 +- .../sdk/service/workspace/ReposService.java | 4 +- .../sdk/service/workspace/WorkspaceAPI.java | 5 +- .../service/workspace/WorkspaceService.java | 5 +- examples/docs/pom.xml | 2 +- examples/spring-boot-oauth-u2m-demo/pom.xml | 2 +- pom.xml | 2 +- shaded/pom.xml | 2 +- 161 files changed, 3655 insertions(+), 2428 deletions(-) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/{sharing/ListCleanRoomsResponse.java => catalog/ListCredentialsResponse.java} (58%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java rename databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/{ExecuteMessageQueryRequest.java => GenieExecuteMessageQueryRequest.java} (78%) create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java delete mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 2d9cb6d86..fed5b26b3 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -cf9c61453990df0f9453670f2fe68e1b128647a2 \ No newline at end of file +d15ea353ad7a0279b741428a8231b84f5eb28c94 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index ce3319564..a1050873d 100755 --- a/.gitattributes +++ b/.gitattributes @@ -152,8 +152,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMat databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true @@ -180,6 +183,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -197,7 +201,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchem databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CurrentWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true @@ -209,6 +219,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAlias databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteAliasResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -257,6 +269,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAP databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true @@ -268,6 +282,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindings databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetByAliasRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true @@ -298,6 +313,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalog databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true @@ -419,6 +436,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true @@ -429,6 +447,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAssig databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true @@ -445,6 +464,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateWorkspaceBindingsParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java linguist-generated=true @@ -710,11 +732,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSc databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true @@ -1528,6 +1550,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Pipelines databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true @@ -1578,6 +1602,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Encryp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EndpointUseCase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ErrorType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpKeyInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpManagedNetworkConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/GcpNetworkInfo.java linguist-generated=true @@ -1729,6 +1754,17 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpA databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java linguist-generated=true @@ -1821,6 +1857,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomaticClusterUpdateSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true @@ -1915,6 +1953,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType. databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAutomaticClusterUpdateSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true @@ -1933,39 +1973,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceC databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateRecipient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateShare.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetActivationUrlInfoResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetProviderRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetRecipientSharePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/IpAccessList.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderSharesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProvidersResponse.java linguist-generated=true @@ -2008,7 +2029,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataO databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 2af228aae..07c88e430 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,66 @@ # Version changelog +## [Release] Release v0.35.0 + +### New Features and Improvements + + * DatabricksConfig: Add clone() support ([#376](https://github.com/databricks/databricks-sdk-java/pull/376)). + + +### Internal Changes + + * Add test instructions for external contributors ([#370](https://github.com/databricks/databricks-sdk-java/pull/370)). + * Always write message for manual test integration ([#374](https://github.com/databricks/databricks-sdk-java/pull/374)). + * Automatically trigger integration tests on PR ([#369](https://github.com/databricks/databricks-sdk-java/pull/369)). + * Move templates in the code generator ([#373](https://github.com/databricks/databricks-sdk-java/pull/373)). + + +### API Changes: + + * Added `workspaceClient.aibiDashboardEmbeddingAccessPolicy()` service and `workspaceClient.aibiDashboardEmbeddingApprovedDomains()` service. + * Added `workspaceClient.credentials()` service. + * Added `appDeployment` field for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Added `app` field for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Added `app` field for `com.databricks.sdk.service.apps.UpdateAppRequest`. + * Added `table` field for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Added `azureAad` field for `com.databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`. + * Added `fullName` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo`. + * Added `dashboard` field for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Added `schedule` field for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Added `subscription` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Added `warehouseId` field for `com.databricks.sdk.service.dashboards.Schedule`. + * Added `dashboard` field for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Added `schedule` field for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Added `only` field for `com.databricks.sdk.service.jobs.RunNow`. + * Added `pageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. + * Added `nextPageToken` field for `com.databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.EditPipeline`. + * Added `connectionName` field for `com.databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`. + * Added `restartWindow` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. + * Added `isNoPublicIpEnabled` field for `com.databricks.sdk.service.provisioning.CreateWorkspaceRequest`. + * Added `privateAccessSettingsId` field for `com.databricks.sdk.service.provisioning.UpdateWorkspaceRequest`. + * Added `externalCustomerInfo` and `isNoPublicIpEnabled` fields for `com.databricks.sdk.service.provisioning.Workspace`. + * Added `lastUsedDay` field for `com.databricks.sdk.service.settings.TokenInfo`. + * Changed `create()` method for `workspaceClient.apps()` service with new required argument order. + * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service to type `executeMessageQuery()` method for `workspaceClient.genie()` service. + * Changed `executeMessageQuery()` method for `workspaceClient.genie()` service . New request type is `com.databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` class. + * Changed `create()`, `createSchedule()`, `createSubscription()` and `updateSchedule()` methods for `workspaceClient.lakeview()` service with new required argument order. + * Removed `workspaceClient.cleanRooms()` service. + * Removed `deploymentId`, `mode` and `sourceCodePath` fields for `com.databricks.sdk.service.apps.CreateAppDeploymentRequest`. + * Removed `description`, `name` and `resources` fields for `com.databricks.sdk.service.apps.CreateAppRequest`. + * Removed `description` and `resources` fields for `com.databricks.sdk.service.apps.UpdateAppRequest`. + * Removed `name` and `spec` fields for `com.databricks.sdk.service.catalog.CreateOnlineTableRequest`. + * Removed `displayName`, `parentPath`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`. + * Removed `cronSchedule`, `displayName` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.CreateScheduleRequest`. + * Removed `subscriber` field for `com.databricks.sdk.service.dashboards.CreateSubscriptionRequest`. + * Removed `displayName`, `etag`, `serializedDashboard` and `warehouseId` fields for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`. + * Removed `cronSchedule`, `displayName`, `etag` and `pauseStatus` fields for `com.databricks.sdk.service.dashboards.UpdateScheduleRequest`. + * Removed `prevPageToken` field for `com.databricks.sdk.service.jobs.Run`. + +OpenAPI SHA: d15ea353ad7a0279b741428a8231b84f5eb28c94, Date: 2024-11-06 + + ## [Release] Release v0.34.0 ### New Features and Improvements diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index cba1f7855..8105d59ad 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.34.0 + 0.35.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index c9a11f3f7..655080d97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -16,6 +16,8 @@ import com.databricks.sdk.service.catalog.CatalogsService; import com.databricks.sdk.service.catalog.ConnectionsAPI; import com.databricks.sdk.service.catalog.ConnectionsService; +import com.databricks.sdk.service.catalog.CredentialsAPI; +import com.databricks.sdk.service.catalog.CredentialsService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -138,8 +140,6 @@ import com.databricks.sdk.service.settings.TokensService; import com.databricks.sdk.service.settings.WorkspaceConfAPI; import com.databricks.sdk.service.settings.WorkspaceConfService; -import com.databricks.sdk.service.sharing.CleanRoomsAPI; -import com.databricks.sdk.service.sharing.CleanRoomsService; import com.databricks.sdk.service.sharing.ProvidersAPI; import com.databricks.sdk.service.sharing.ProvidersService; import com.databricks.sdk.service.sharing.RecipientActivationAPI; @@ -199,7 +199,6 @@ public class WorkspaceClient { private AppsAPI appsAPI; private ArtifactAllowlistsAPI artifactAllowlistsAPI; private CatalogsAPI catalogsAPI; - private CleanRoomsAPI cleanRoomsAPI; private ClusterPoliciesAPI clusterPoliciesAPI; private ClustersExt clustersAPI; private CommandExecutionAPI commandExecutionAPI; @@ -209,6 +208,7 @@ public class WorkspaceClient { private ConsumerListingsAPI consumerListingsAPI; private ConsumerPersonalizationRequestsAPI consumerPersonalizationRequestsAPI; private ConsumerProvidersAPI consumerProvidersAPI; + private CredentialsAPI credentialsAPI; private CredentialsManagerAPI credentialsManagerAPI; private CurrentUserAPI currentUserAPI; private DashboardWidgetsAPI dashboardWidgetsAPI; @@ -298,7 +298,6 @@ public WorkspaceClient(DatabricksConfig config) { appsAPI = new AppsAPI(apiClient); artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient); catalogsAPI = new CatalogsAPI(apiClient); - cleanRoomsAPI = new CleanRoomsAPI(apiClient); clusterPoliciesAPI = new ClusterPoliciesAPI(apiClient); clustersAPI = new ClustersExt(apiClient); commandExecutionAPI = new CommandExecutionAPI(apiClient); @@ -308,6 +307,7 @@ public WorkspaceClient(DatabricksConfig config) { consumerListingsAPI = new ConsumerListingsAPI(apiClient); consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient); consumerProvidersAPI = new ConsumerProvidersAPI(apiClient); + credentialsAPI = new CredentialsAPI(apiClient); credentialsManagerAPI = new CredentialsManagerAPI(apiClient); currentUserAPI = new CurrentUserAPI(apiClient); dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); @@ -459,18 +459,6 @@ public CatalogsAPI catalogs() { return catalogsAPI; } - /** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the - * **CREATE_CLEAN_ROOM** privilege. - */ - public CleanRoomsAPI cleanRooms() { - return cleanRoomsAPI; - } - /** * You can use cluster policies to control users' ability to configure clusters based on a set of * rules. These rules specify which attributes or attribute values can be used during cluster @@ -580,6 +568,19 @@ public ConsumerProvidersAPI consumerProviders() { return consumerProvidersAPI; } + /** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another + * user or group to manage permissions on it + */ + public CredentialsAPI credentials() { + return credentialsAPI; + } + /** * Credentials manager interacts with with Identity Providers to to perform token exchanges using * stored credentials and refresh tokens. @@ -1453,11 +1454,11 @@ public SharesAPI shares() { * might have already completed execution when the cancel request arrives. Polling for status * until a terminal state is reached is a reliable way to determine the final state. - Wait * timeouts are approximate, occur server-side, and cannot account for things such as caller - * delays and network latency from caller to service. - The system will auto-close a statement - * after one hour if the client stops polling and thus you must poll at least once an hour. - The - * results are only available for one hour after success; polling does not extend this. - The SQL - * Execution API must be used for the entire lifecycle of the statement. For example, you cannot - * use the Jobs API to execute the command, and then the SQL Execution API to cancel it. + * delays and network latency from caller to service. - To guarantee that the statement is kept + * alive, you must poll at least once every 15 minutes. - The results are only available for one + * hour after success; polling does not extend this. - The SQL Execution API must be used for the + * entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the + * command, and then the SQL Execution API to cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html @@ -1721,17 +1722,6 @@ public WorkspaceClient withCatalogsAPI(CatalogsAPI catalogs) { return this; } - /** Replace the default CleanRoomsService with a custom implementation. */ - public WorkspaceClient withCleanRoomsImpl(CleanRoomsService cleanRooms) { - return this.withCleanRoomsAPI(new CleanRoomsAPI(cleanRooms)); - } - - /** Replace the default CleanRoomsAPI with a custom implementation. */ - public WorkspaceClient withCleanRoomsAPI(CleanRoomsAPI cleanRooms) { - this.cleanRoomsAPI = cleanRooms; - return this; - } - /** Replace the default ClusterPoliciesService with a custom implementation. */ public WorkspaceClient withClusterPoliciesImpl(ClusterPoliciesService clusterPolicies) { return this.withClusterPoliciesAPI(new ClusterPoliciesAPI(clusterPolicies)); @@ -1837,6 +1827,17 @@ public WorkspaceClient withConsumerProvidersAPI(ConsumerProvidersAPI consumerPro return this; } + /** Replace the default CredentialsService with a custom implementation. */ + public WorkspaceClient withCredentialsImpl(CredentialsService credentials) { + return this.withCredentialsAPI(new CredentialsAPI(credentials)); + } + + /** Replace the default CredentialsAPI with a custom implementation. */ + public WorkspaceClient withCredentialsAPI(CredentialsAPI credentials) { + this.credentialsAPI = credentials; + return this; + } + /** Replace the default CredentialsManagerService with a custom implementation. */ public WorkspaceClient withCredentialsManagerImpl(CredentialsManagerService credentialsManager) { return this.withCredentialsManagerAPI(new CredentialsManagerAPI(credentialsManager)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 20e7f883e..533cdd43b 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -374,13 +374,17 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** @deprecated Use {@link #getAzureUseMsi()} instead. */ + /** + * @deprecated Use {@link #getAzureUseMsi()} instead. + */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ + /** + * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. + */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 56c817d73..4d9698f8f 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -32,7 +32,7 @@ public String getValue() { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.34.0"; + private static final String version = "0.35.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java index 35d1b609a..4b611f216 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java @@ -81,23 +81,27 @@ public App waitGetAppActive(String name, Duration timeout, Consumer callbac throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public App waitGetAppStopped(String name) throws TimeoutException { - return waitGetAppStopped(name, Duration.ofMinutes(20), null); + public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId) + throws TimeoutException { + return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null); } - public App waitGetAppStopped(String name, Duration timeout, Consumer callback) + public AppDeployment waitGetDeploymentAppSucceeded( + String appName, String deploymentId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(ComputeState.STOPPED); - java.util.List failureStates = Arrays.asList(ComputeState.ERROR); + java.util.List targetStates = Arrays.asList(AppDeploymentState.SUCCEEDED); + java.util.List failureStates = Arrays.asList(AppDeploymentState.FAILED); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - App poll = get(new GetAppRequest().setName(name)); - ComputeState status = poll.getComputeStatus().getState(); + AppDeployment poll = + getDeployment( + new GetAppDeploymentRequest().setAppName(appName).setDeploymentId(deploymentId)); + AppDeploymentState status = poll.getStatus().getState(); statusMessage = String.format("current status: %s", status); - if (poll.getComputeStatus() != null) { - statusMessage = poll.getComputeStatus().getMessage(); + if (poll.getStatus() != null) { + statusMessage = poll.getStatus().getMessage(); } if (targetStates.contains(status)) { return poll; @@ -106,11 +110,11 @@ public App waitGetAppStopped(String name, Duration timeout, Consumer callba callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach STOPPED, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("name=%s", name); + String prefix = String.format("appName=%s, deploymentId=%s", appName, deploymentId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -128,27 +132,23 @@ public App waitGetAppStopped(String name, Duration timeout, Consumer callba throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public AppDeployment waitGetDeploymentAppSucceeded(String appName, String deploymentId) - throws TimeoutException { - return waitGetDeploymentAppSucceeded(appName, deploymentId, Duration.ofMinutes(20), null); + public App waitGetAppStopped(String name) throws TimeoutException { + return waitGetAppStopped(name, Duration.ofMinutes(20), null); } - public AppDeployment waitGetDeploymentAppSucceeded( - String appName, String deploymentId, Duration timeout, Consumer callback) + public App waitGetAppStopped(String name, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(AppDeploymentState.SUCCEEDED); - java.util.List failureStates = Arrays.asList(AppDeploymentState.FAILED); + java.util.List targetStates = Arrays.asList(ComputeState.STOPPED); + java.util.List failureStates = Arrays.asList(ComputeState.ERROR); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - AppDeployment poll = - getDeployment( - new GetAppDeploymentRequest().setAppName(appName).setDeploymentId(deploymentId)); - AppDeploymentState status = poll.getStatus().getState(); + App poll = get(new GetAppRequest().setName(name)); + ComputeState status = poll.getComputeStatus().getState(); statusMessage = String.format("current status: %s", status); - if (poll.getStatus() != null) { - statusMessage = poll.getStatus().getMessage(); + if (poll.getComputeStatus() != null) { + statusMessage = poll.getComputeStatus().getMessage(); } if (targetStates.contains(status)) { return poll; @@ -157,11 +157,11 @@ public AppDeployment waitGetDeploymentAppSucceeded( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach SUCCEEDED, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach STOPPED, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("appName=%s, deploymentId=%s", appName, deploymentId); + String prefix = String.format("name=%s", name); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -179,10 +179,6 @@ public AppDeployment waitGetDeploymentAppSucceeded( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public Wait create(String name) { - return create(new CreateAppRequest().setName(name)); - } - /** * Create an app. * @@ -327,7 +323,9 @@ public AppPermissions setPermissions(String appName) { /** * Set app permissions. * - *

Sets permissions on an app. Apps can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public AppPermissions setPermissions(AppPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index f6936a132..e45306647 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -21,7 +21,7 @@ public App create(CreateAppRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, App.class, headers); + return apiClient.POST(path, request.getApp(), App.class, headers); } @Override @@ -38,7 +38,7 @@ public AppDeployment deploy(CreateAppDeploymentRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, AppDeployment.class, headers); + return apiClient.POST(path, request.getAppDeployment(), AppDeployment.class, headers); } @Override @@ -125,7 +125,7 @@ public App update(UpdateAppRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, App.class, headers); + return apiClient.PATCH(path, request.getApp(), App.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java index 26e0310e1..d5909455c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java @@ -80,7 +80,9 @@ GetAppPermissionLevelsResponse getPermissionLevels( /** * Set app permissions. * - *

Sets permissions on an app. Apps can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ AppPermissions setPermissions(AppPermissionsRequest appPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java index 3952d58b3..1d0425673 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java @@ -8,28 +8,24 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create an app deployment */ @Generated public class CreateAppDeploymentRequest { + /** */ + @JsonProperty("app_deployment") + private AppDeployment appDeployment; + /** The name of the app. */ @JsonIgnore private String appName; - /** The unique id of the deployment. */ - @JsonProperty("deployment_id") - private String deploymentId; - - /** The mode of which the deployment will manage the source code. */ - @JsonProperty("mode") - private AppDeploymentMode mode; + public CreateAppDeploymentRequest setAppDeployment(AppDeployment appDeployment) { + this.appDeployment = appDeployment; + return this; + } - /** - * The workspace file system path of the source code used to create the app deployment. This is - * different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - * app. The former refers to the original source code location of the app in the workspace during - * deployment creation, whereas the latter provides a system generated stable snapshotted source - * code path used by the deployment. - */ - @JsonProperty("source_code_path") - private String sourceCodePath; + public AppDeployment getAppDeployment() { + return appDeployment; + } public CreateAppDeploymentRequest setAppName(String appName) { this.appName = appName; @@ -40,56 +36,25 @@ public String getAppName() { return appName; } - public CreateAppDeploymentRequest setDeploymentId(String deploymentId) { - this.deploymentId = deploymentId; - return this; - } - - public String getDeploymentId() { - return deploymentId; - } - - public CreateAppDeploymentRequest setMode(AppDeploymentMode mode) { - this.mode = mode; - return this; - } - - public AppDeploymentMode getMode() { - return mode; - } - - public CreateAppDeploymentRequest setSourceCodePath(String sourceCodePath) { - this.sourceCodePath = sourceCodePath; - return this; - } - - public String getSourceCodePath() { - return sourceCodePath; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppDeploymentRequest that = (CreateAppDeploymentRequest) o; - return Objects.equals(appName, that.appName) - && Objects.equals(deploymentId, that.deploymentId) - && Objects.equals(mode, that.mode) - && Objects.equals(sourceCodePath, that.sourceCodePath); + return Objects.equals(appDeployment, that.appDeployment) + && Objects.equals(appName, that.appName); } @Override public int hashCode() { - return Objects.hash(appName, deploymentId, mode, sourceCodePath); + return Objects.hash(appDeployment, appName); } @Override public String toString() { return new ToStringer(CreateAppDeploymentRequest.class) + .add("appDeployment", appDeployment) .add("appName", appName) - .add("deploymentId", deploymentId) - .add("mode", mode) - .add("sourceCodePath", sourceCodePath) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java index e835442a9..7d1076bb7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java @@ -5,51 +5,22 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; +/** Create an app */ @Generated public class CreateAppRequest { - /** The description of the app. */ - @JsonProperty("description") - private String description; + /** */ + @JsonProperty("app") + private App app; - /** - * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - * It must be unique within the workspace. - */ - @JsonProperty("name") - private String name; - - /** Resources for the app. */ - @JsonProperty("resources") - private Collection resources; - - public CreateAppRequest setDescription(String description) { - this.description = description; - return this; - } - - public String getDescription() { - return description; - } - - public CreateAppRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateAppRequest setResources(Collection resources) { - this.resources = resources; + public CreateAppRequest setApp(App app) { + this.app = app; return this; } - public Collection getResources() { - return resources; + public App getApp() { + return app; } @Override @@ -57,22 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateAppRequest that = (CreateAppRequest) o; - return Objects.equals(description, that.description) - && Objects.equals(name, that.name) - && Objects.equals(resources, that.resources); + return Objects.equals(app, that.app); } @Override public int hashCode() { - return Objects.hash(description, name, resources); + return Objects.hash(app); } @Override public String toString() { - return new ToStringer(CreateAppRequest.class) - .add("description", description) - .add("name", name) - .add("resources", resources) - .toString(); + return new ToStringer(CreateAppRequest.class).add("app", app).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java index da8b8c4ca..4727ea97d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java @@ -4,34 +4,27 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; import java.util.Objects; +/** Update an app */ @Generated public class UpdateAppRequest { - /** The description of the app. */ - @JsonProperty("description") - private String description; + /** */ + @JsonProperty("app") + private App app; - /** - * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - * It must be unique within the workspace. - */ - @JsonProperty("name") - private String name; + /** The name of the app. */ + @JsonIgnore private String name; - /** Resources for the app. */ - @JsonProperty("resources") - private Collection resources; - - public UpdateAppRequest setDescription(String description) { - this.description = description; + public UpdateAppRequest setApp(App app) { + this.app = app; return this; } - public String getDescription() { - return description; + public App getApp() { + return app; } public UpdateAppRequest setName(String name) { @@ -43,36 +36,21 @@ public String getName() { return name; } - public UpdateAppRequest setResources(Collection resources) { - this.resources = resources; - return this; - } - - public Collection getResources() { - return resources; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateAppRequest that = (UpdateAppRequest) o; - return Objects.equals(description, that.description) - && Objects.equals(name, that.name) - && Objects.equals(resources, that.resources); + return Objects.equals(app, that.app) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(description, name, resources); + return Objects.hash(app, name); } @Override public String toString() { - return new ToStringer(UpdateAppRequest.class) - .add("description", description) - .add("name", name) - .add("resources", resources) - .toString(); + return new ToStringer(UpdateAppRequest.class).add("app", app).add("name", name).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java new file mode 100755 index 000000000..628bed840 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The AWS IAM role configuration */ +@Generated +public class AwsIamRole { + /** The external ID used in role assumption to prevent the confused deputy problem. */ + @JsonProperty("external_id") + private String externalId; + + /** The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials. */ + @JsonProperty("role_arn") + private String roleArn; + + /** + * The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity + * that is going to assume the AWS IAM role. + */ + @JsonProperty("unity_catalog_iam_arn") + private String unityCatalogIamArn; + + public AwsIamRole setExternalId(String externalId) { + this.externalId = externalId; + return this; + } + + public String getExternalId() { + return externalId; + } + + public AwsIamRole setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + public AwsIamRole setUnityCatalogIamArn(String unityCatalogIamArn) { + this.unityCatalogIamArn = unityCatalogIamArn; + return this; + } + + public String getUnityCatalogIamArn() { + return unityCatalogIamArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRole that = (AwsIamRole) o; + return Objects.equals(externalId, that.externalId) + && Objects.equals(roleArn, that.roleArn) + && Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn); + } + + @Override + public int hashCode() { + return Objects.hash(externalId, roleArn, unityCatalogIamArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRole.class) + .add("externalId", externalId) + .add("roleArn", roleArn) + .add("unityCatalogIamArn", unityCatalogIamArn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java new file mode 100755 index 000000000..b545ea991 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed + * Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ +@Generated +public class AzureActiveDirectoryToken { + /** + * Opaque token that contains claims that you can use in Azure Active Directory to access cloud + * services. + */ + @JsonProperty("aad_token") + private String aadToken; + + public AzureActiveDirectoryToken setAadToken(String aadToken) { + this.aadToken = aadToken; + return this; + } + + public String getAadToken() { + return aadToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureActiveDirectoryToken that = (AzureActiveDirectoryToken) o; + return Objects.equals(aadToken, that.aadToken); + } + + @Override + public int hashCode() { + return Objects.hash(aadToken); + } + + @Override + public String toString() { + return new ToStringer(AzureActiveDirectoryToken.class).add("aadToken", aadToken).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java new file mode 100755 index 000000000..be3997823 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The Azure managed identity configuration. */ +@Generated +public class AzureManagedIdentity { + /** + * The Azure resource ID of the Azure Databricks Access Connector. Use the format + * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`. + */ + @JsonProperty("access_connector_id") + private String accessConnectorId; + + /** + * The Databricks internal ID that represents this managed identity. This field is only used to + * persist the credential_id once it is fetched from the credentials manager - as we only use the + * protobuf serializer to store credentials, this ID gets persisted to the database. . + */ + @JsonProperty("credential_id") + private String credentialId; + + /** + * The Azure resource ID of the managed identity. Use the format, + * `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` + * This is only available for user-assgined identities. For system-assigned identities, the + * access_connector_id is used to identify the identity. If this field is not provided, then we + * assume the AzureManagedIdentity is using the system-assigned identity. + */ + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentity setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentity setCredentialId(String credentialId) { + this.credentialId = credentialId; + return this; + } + + public String getCredentialId() { + return credentialId; + } + + public AzureManagedIdentity setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentity that = (AzureManagedIdentity) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(credentialId, that.credentialId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, credentialId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentity.class) + .add("accessConnectorId", accessConnectorId) + .add("credentialId", credentialId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java index 629166833..ff2e8f111 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ColumnTypeName.java @@ -28,4 +28,5 @@ public enum ColumnTypeName { TIMESTAMP, TIMESTAMP_NTZ, USER_DEFINED_TYPE, + VARIANT, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java new file mode 100755 index 000000000..b4a9bbe23 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java @@ -0,0 +1,125 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** + * The credential name. The name must be unique among storage and service credentials within the + * metastore. + */ + @JsonProperty("name") + private String name; + + /** Indicates the purpose of the credential. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + /** + * Optional. Supplying true to this argument skips validation of the created set of credentials. + */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public CreateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CreateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CreateCredentialRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreateCredentialRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CreateCredentialRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CreateCredentialRequest setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateCredentialRequest that = (CreateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(name, that.name) + && Objects.equals(purpose, that.purpose) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash(awsIamRole, azureManagedIdentity, comment, name, purpose, skipValidation); + } + + @Override + public String toString() { + return new ToStringer(CreateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("name", name) + .add("purpose", purpose) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java index ef46d6a4a..a0b13a4ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSecurityType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Function security type. */ +/** The security type of the function. */ @Generated public enum CreateFunctionSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java index 4b77e9b13..7f3a0730c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java @@ -7,33 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Online Table information. */ +/** Create an Online Table */ @Generated public class CreateOnlineTableRequest { - /** Full three-part (catalog, schema, table) name of the table. */ - @JsonProperty("name") - private String name; + /** Online Table information. */ + @JsonProperty("table") + private OnlineTable table; - /** Specification of the online table. */ - @JsonProperty("spec") - private OnlineTableSpec spec; - - public CreateOnlineTableRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateOnlineTableRequest setSpec(OnlineTableSpec spec) { - this.spec = spec; + public CreateOnlineTableRequest setTable(OnlineTable table) { + this.table = table; return this; } - public OnlineTableSpec getSpec() { - return spec; + public OnlineTable getTable() { + return table; } @Override @@ -41,19 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateOnlineTableRequest that = (CreateOnlineTableRequest) o; - return Objects.equals(name, that.name) && Objects.equals(spec, that.spec); + return Objects.equals(table, that.table); } @Override public int hashCode() { - return Objects.hash(name, spec); + return Objects.hash(table); } @Override public String toString() { - return new ToStringer(CreateOnlineTableRequest.class) - .add("name", name) - .add("spec", spec) - .toString(); + return new ToStringer(CreateOnlineTableRequest.class).add("table", table).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java new file mode 100755 index 000000000..8945b6c14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java @@ -0,0 +1,259 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CredentialInfo { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** Time at which this credential was created, in epoch milliseconds. */ + @JsonProperty("created_at") + private Long createdAt; + + /** Username of credential creator. */ + @JsonProperty("created_by") + private String createdBy; + + /** The full name of the credential. */ + @JsonProperty("full_name") + private String fullName; + + /** The unique identifier of the credential. */ + @JsonProperty("id") + private String id; + + /** + * Whether the current securable is accessible from all workspaces or a specific set of + * workspaces. + */ + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + /** Unique identifier of the parent metastore. */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** + * The credential name. The name must be unique among storage and service credentials within the + * metastore. + */ + @JsonProperty("name") + private String name; + + /** Username of current owner of credential. */ + @JsonProperty("owner") + private String owner; + + /** Indicates the purpose of the credential. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + /** Time at which this credential was last modified, in epoch milliseconds. */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** Username of user who last modified the credential. */ + @JsonProperty("updated_by") + private String updatedBy; + + public CredentialInfo setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public CredentialInfo setAzureManagedIdentity(AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public CredentialInfo setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CredentialInfo setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public CredentialInfo setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public CredentialInfo setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CredentialInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CredentialInfo setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public CredentialInfo setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public CredentialInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CredentialInfo setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public CredentialInfo setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + public CredentialInfo setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public CredentialInfo setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialInfo that = (CredentialInfo) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(fullName, that.fullName) + && Objects.equals(id, that.id) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(owner, that.owner) + && Objects.equals(purpose, that.purpose) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + comment, + createdAt, + createdBy, + fullName, + id, + isolationMode, + metastoreId, + name, + owner, + purpose, + updatedAt, + updatedBy); + } + + @Override + public String toString() { + return new ToStringer(CredentialInfo.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("fullName", fullName) + .add("id", id) + .add("isolationMode", isolationMode) + .add("metastoreId", metastoreId) + .add("name", name) + .add("owner", owner) + .add("purpose", purpose) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java new file mode 100755 index 000000000..ec09daad8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialPurpose.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum CredentialPurpose { + SERVICE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java new file mode 100755 index 000000000..a823a5348 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialValidationResult.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CredentialValidationResult { + /** Error message would exist when the result does not equal to **PASS**. */ + @JsonProperty("message") + private String message; + + /** The results of the tested operation. */ + @JsonProperty("result") + private ValidateCredentialResult result; + + public CredentialValidationResult setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public CredentialValidationResult setResult(ValidateCredentialResult result) { + this.result = result; + return this; + } + + public ValidateCredentialResult getResult() { + return result; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CredentialValidationResult that = (CredentialValidationResult) o; + return Objects.equals(message, that.message) && Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(message, result); + } + + @Override + public String toString() { + return new ToStringer(CredentialValidationResult.class) + .add("message", message) + .add("result", result) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java new file mode 100755 index 000000000..659f078a4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsAPI.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it + */ +@Generated +public class CredentialsAPI { + private static final Logger LOG = LoggerFactory.getLogger(CredentialsAPI.class); + + private final CredentialsService impl; + + /** Regular-use constructor */ + public CredentialsAPI(ApiClient apiClient) { + impl = new CredentialsImpl(apiClient); + } + + /** Constructor for mocks */ + public CredentialsAPI(CredentialsService mock) { + impl = mock; + } + + /** + * Create a credential. + * + *

Creates a new credential. + */ + public CredentialInfo createCredential(CreateCredentialRequest request) { + return impl.createCredential(request); + } + + public void deleteCredential(String nameArg) { + deleteCredential(new DeleteCredentialRequest().setNameArg(nameArg)); + } + + /** + * Delete a credential. + * + *

Deletes a credential from the metastore. The caller must be an owner of the credential. + */ + public void deleteCredential(DeleteCredentialRequest request) { + impl.deleteCredential(request); + } + + /** + * Generate a temporary service credential. + * + *

Returns a set of temporary credentials generated using the specified service credential. The + * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service + * credential. + */ + public TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest request) { + return impl.generateTemporaryServiceCredential(request); + } + + public CredentialInfo getCredential(String nameArg) { + return getCredential(new GetCredentialRequest().setNameArg(nameArg)); + } + + /** + * Get a credential. + * + *

Gets a credential from the metastore. The caller must be a metastore admin, the owner of the + * credential, or have any permission on the credential. + */ + public CredentialInfo getCredential(GetCredentialRequest request) { + return impl.getCredential(request); + } + + /** + * List credentials. + * + *

Gets an array of credentials (as __CredentialInfo__ objects). + * + *

The array is limited to only the credentials that the caller has permission to access. If + * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no + * guarantee of a specific ordering of the elements in the array. + */ + public Iterable listCredentials(ListCredentialsRequest request) { + return new Paginator<>( + request, + impl::listCredentials, + ListCredentialsResponse::getCredentials, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public CredentialInfo updateCredential(String nameArg) { + return updateCredential(new UpdateCredentialRequest().setNameArg(nameArg)); + } + + /** + * Update a credential. + * + *

Updates a credential on the metastore. + * + *

The caller must be the owner of the credential or a metastore admin or have the `MANAGE` + * permission. If the caller is a metastore admin, only the __owner__ field can be changed. + */ + public CredentialInfo updateCredential(UpdateCredentialRequest request) { + return impl.updateCredential(request); + } + + /** + * Validate a credential. + * + *

Validates a credential. + * + *

Either the __credential_name__ or the cloud-specific credential must be provided. + * + *

The caller must be a metastore admin or the credential owner. + */ + public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) { + return impl.validateCredential(request); + } + + public CredentialsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java new file mode 100755 index 000000000..b2aad2644 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of Credentials */ +@Generated +class CredentialsImpl implements CredentialsService { + private final ApiClient apiClient; + + public CredentialsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CredentialInfo createCredential(CreateCredentialRequest request) { + String path = "/api/2.1/unity-catalog/credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CredentialInfo.class, headers); + } + + @Override + public void deleteCredential(DeleteCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteCredentialResponse.class, headers); + } + + @Override + public TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest request) { + String path = "/api/2.1/unity-catalog/temporary-service-credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, TemporaryCredentials.class, headers); + } + + @Override + public CredentialInfo getCredential(GetCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, CredentialInfo.class, headers); + } + + @Override + public ListCredentialsResponse listCredentials(ListCredentialsRequest request) { + String path = "/api/2.1/unity-catalog/credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListCredentialsResponse.class, headers); + } + + @Override + public CredentialInfo updateCredential(UpdateCredentialRequest request) { + String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, CredentialInfo.class, headers); + } + + @Override + public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) { + String path = "/api/2.1/unity-catalog/validate-credentials"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, ValidateCredentialResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java new file mode 100755 index 000000000..2317a7075 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *

To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user + * or group to manage permissions on it + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface CredentialsService { + /** + * Create a credential. + * + *

Creates a new credential. + */ + CredentialInfo createCredential(CreateCredentialRequest createCredentialRequest); + + /** + * Delete a credential. + * + *

Deletes a credential from the metastore. The caller must be an owner of the credential. + */ + void deleteCredential(DeleteCredentialRequest deleteCredentialRequest); + + /** + * Generate a temporary service credential. + * + *

Returns a set of temporary credentials generated using the specified service credential. The + * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service + * credential. + */ + TemporaryCredentials generateTemporaryServiceCredential( + GenerateTemporaryServiceCredentialRequest generateTemporaryServiceCredentialRequest); + + /** + * Get a credential. + * + *

Gets a credential from the metastore. The caller must be a metastore admin, the owner of the + * credential, or have any permission on the credential. + */ + CredentialInfo getCredential(GetCredentialRequest getCredentialRequest); + + /** + * List credentials. + * + *

Gets an array of credentials (as __CredentialInfo__ objects). + * + *

The array is limited to only the credentials that the caller has permission to access. If + * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no + * guarantee of a specific ordering of the elements in the array. + */ + ListCredentialsResponse listCredentials(ListCredentialsRequest listCredentialsRequest); + + /** + * Update a credential. + * + *

Updates a credential on the metastore. + * + *

The caller must be the owner of the credential or a metastore admin or have the `MANAGE` + * permission. If the caller is a metastore admin, only the __owner__ field can be changed. + */ + CredentialInfo updateCredential(UpdateCredentialRequest updateCredentialRequest); + + /** + * Validate a credential. + * + *

Validates a credential. + * + *

Either the __credential_name__ or the cloud-specific credential must be provided. + * + *

The caller must be a metastore admin or the credential owner. + */ + ValidateCredentialResponse validateCredential( + ValidateCredentialRequest validateCredentialRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java new file mode 100755 index 000000000..a3549cb5b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a credential */ +@Generated +public class DeleteCredentialRequest { + /** Force deletion even if there are dependent services. */ + @JsonIgnore + @QueryParam("force") + private Boolean force; + + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + public DeleteCredentialRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public DeleteCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteCredentialRequest that = (DeleteCredentialRequest) o; + return Objects.equals(force, that.force) && Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(force, nameArg); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialRequest.class) + .add("force", force) + .add("nameArg", nameArg) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java new file mode 100755 index 000000000..1ad278759 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteCredentialResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteCredentialResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index 051a796ae..3fe7a3650 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -52,10 +52,7 @@ public class ExternalLocationInfo { @JsonProperty("fallback") private Boolean fallback; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java index d4ca791fc..5b45675b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Function security type. */ +/** The security type of the function. */ @Generated public enum FunctionInfoSecurityType { DEFINER, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java new file mode 100755 index 000000000..31dca4b95 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Options to customize the requested temporary credential */ +@Generated +public class GenerateTemporaryServiceCredentialAzureOptions { + /** + * The resources to which the temporary Azure credential should apply. These resources are the + * scopes that are passed to the token provider (see + * https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python) + */ + @JsonProperty("resources") + private Collection resources; + + public GenerateTemporaryServiceCredentialAzureOptions setResources(Collection resources) { + this.resources = resources; + return this; + } + + public Collection getResources() { + return resources; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialAzureOptions that = + (GenerateTemporaryServiceCredentialAzureOptions) o; + return Objects.equals(resources, that.resources); + } + + @Override + public int hashCode() { + return Objects.hash(resources); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialAzureOptions.class) + .add("resources", resources) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java new file mode 100755 index 000000000..3e1d8fc44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenerateTemporaryServiceCredentialRequest { + /** Options to customize the requested temporary credential */ + @JsonProperty("azure_options") + private GenerateTemporaryServiceCredentialAzureOptions azureOptions; + + /** The name of the service credential used to generate a temporary credential */ + @JsonProperty("credential_name") + private String credentialName; + + public GenerateTemporaryServiceCredentialRequest setAzureOptions( + GenerateTemporaryServiceCredentialAzureOptions azureOptions) { + this.azureOptions = azureOptions; + return this; + } + + public GenerateTemporaryServiceCredentialAzureOptions getAzureOptions() { + return azureOptions; + } + + public GenerateTemporaryServiceCredentialRequest setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateTemporaryServiceCredentialRequest that = (GenerateTemporaryServiceCredentialRequest) o; + return Objects.equals(azureOptions, that.azureOptions) + && Objects.equals(credentialName, that.credentialName); + } + + @Override + public int hashCode() { + return Objects.hash(azureOptions, credentialName); + } + + @Override + public String toString() { + return new ToStringer(GenerateTemporaryServiceCredentialRequest.class) + .add("azureOptions", azureOptions) + .add("credentialName", credentialName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java index a18c68dbc..be752eec7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java @@ -16,6 +16,14 @@ public class GenerateTemporaryTableCredentialResponse { @JsonProperty("aws_temp_credentials") private AwsCredentials awsTempCredentials; + /** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or + * Managed Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + /** * Azure temporary credentials for API authentication. Read more at * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas @@ -58,6 +66,15 @@ public AwsCredentials getAwsTempCredentials() { return awsTempCredentials; } + public GenerateTemporaryTableCredentialResponse setAzureAad(AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + public GenerateTemporaryTableCredentialResponse setAzureUserDelegationSas( AzureUserDelegationSas azureUserDelegationSas) { this.azureUserDelegationSas = azureUserDelegationSas; @@ -111,6 +128,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GenerateTemporaryTableCredentialResponse that = (GenerateTemporaryTableCredentialResponse) o; return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas) && Objects.equals(expirationTime, that.expirationTime) && Objects.equals(gcpOauthToken, that.gcpOauthToken) @@ -122,6 +140,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( awsTempCredentials, + azureAad, azureUserDelegationSas, expirationTime, gcpOauthToken, @@ -133,6 +152,7 @@ public int hashCode() { public String toString() { return new ToStringer(GenerateTemporaryTableCredentialResponse.class) .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) .add("azureUserDelegationSas", azureUserDelegationSas) .add("expirationTime", expirationTime) .add("gcpOauthToken", gcpOauthToken) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java index 59200e276..a90291dd0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetBindingsSecurableType.java @@ -13,6 +13,9 @@ public enum GetBindingsSecurableType { @JsonProperty("external_location") EXTERNAL_LOCATION, + @JsonProperty("service_credential") + SERVICE_CREDENTIAL, + @JsonProperty("storage_credential") STORAGE_CREDENTIAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java new file mode 100755 index 000000000..cfb1de4fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a credential */ +@Generated +public class GetCredentialRequest { + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + public GetCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCredentialRequest that = (GetCredentialRequest) o; + return Objects.equals(nameArg, that.nameArg); + } + + @Override + public int hashCode() { + return Objects.hash(nameArg); + } + + @Override + public String toString() { + return new ToStringer(GetCredentialRequest.class).add("nameArg", nameArg).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java index 1c6e3168f..db13d61b0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java @@ -4,9 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Whether the current securable is accessible from all workspaces or a specific set of workspaces. - */ @Generated public enum IsolationMode { ISOLATION_MODE_ISOLATED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java new file mode 100755 index 000000000..775a697e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List credentials */ +@Generated +public class ListCredentialsRequest { + /** + * Maximum number of credentials to return. - If not set, the default max page size is used. - + * When set to a value greater than 0, the page length is the minimum of this value and a + * server-configured value. - When set to 0, the page length is set to a server-configured value + * (recommended). - When set to a value less than 0, an invalid parameter error is returned. + */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** Opaque token to retrieve the next page of results. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** Return only credentials for the specified purpose. */ + @JsonIgnore + @QueryParam("purpose") + private CredentialPurpose purpose; + + public ListCredentialsRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListCredentialsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListCredentialsRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCredentialsRequest that = (ListCredentialsRequest) o; + return Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(purpose, that.purpose); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken, purpose); + } + + @Override + public String toString() { + return new ToStringer(ListCredentialsRequest.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .add("purpose", purpose) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java similarity index 58% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java index cadcaa6fe..79ebd1904 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; +package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -9,10 +9,10 @@ import java.util.Objects; @Generated -public class ListCleanRoomsResponse { - /** An array of clean rooms. Remote details (central) are not included. */ - @JsonProperty("clean_rooms") - private Collection cleanRooms; +public class ListCredentialsResponse { + /** */ + @JsonProperty("credentials") + private Collection credentials; /** * Opaque token to retrieve the next page of results. Absent if there are no more pages. @@ -21,16 +21,16 @@ public class ListCleanRoomsResponse { @JsonProperty("next_page_token") private String nextPageToken; - public ListCleanRoomsResponse setCleanRooms(Collection cleanRooms) { - this.cleanRooms = cleanRooms; + public ListCredentialsResponse setCredentials(Collection credentials) { + this.credentials = credentials; return this; } - public Collection getCleanRooms() { - return cleanRooms; + public Collection getCredentials() { + return credentials; } - public ListCleanRoomsResponse setNextPageToken(String nextPageToken) { + public ListCredentialsResponse setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -43,20 +43,20 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListCleanRoomsResponse that = (ListCleanRoomsResponse) o; - return Objects.equals(cleanRooms, that.cleanRooms) + ListCredentialsResponse that = (ListCredentialsResponse) o; + return Objects.equals(credentials, that.credentials) && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(cleanRooms, nextPageToken); + return Objects.hash(credentials, nextPageToken); } @Override public String toString() { - return new ToStringer(ListCleanRoomsResponse.class) - .add("cleanRooms", cleanRooms) + return new ToStringer(ListCredentialsResponse.class) + .add("credentials", credentials) .add("nextPageToken", nextPageToken) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java index 32f99a526..0a3f68864 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java @@ -2,7 +2,13 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Wait; +import java.time.Duration; +import java.util.Arrays; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -23,13 +29,62 @@ public OnlineTablesAPI(OnlineTablesService mock) { impl = mock; } + public OnlineTable waitGetOnlineTableActive(String name) throws TimeoutException { + return waitGetOnlineTableActive(name, Duration.ofMinutes(20), null); + } + + public OnlineTable waitGetOnlineTableActive( + String name, Duration timeout, Consumer callback) throws TimeoutException { + long deadline = System.currentTimeMillis() + timeout.toMillis(); + java.util.List targetStates = + Arrays.asList(ProvisioningInfoState.ACTIVE); + java.util.List failureStates = + Arrays.asList(ProvisioningInfoState.FAILED); + String statusMessage = "polling..."; + int attempt = 1; + while (System.currentTimeMillis() < deadline) { + OnlineTable poll = get(new GetOnlineTableRequest().setName(name)); + ProvisioningInfoState status = poll.getUnityCatalogProvisioningState(); + statusMessage = String.format("current status: %s", status); + if (targetStates.contains(status)) { + return poll; + } + if (callback != null) { + callback.accept(poll); + } + if (failureStates.contains(status)) { + String msg = String.format("failed to reach ACTIVE, got %s: %s", status, statusMessage); + throw new IllegalStateException(msg); + } + + String prefix = String.format("name=%s", name); + int sleep = attempt; + if (sleep > 10) { + // sleep 10s max per attempt + sleep = 10; + } + LOG.info("{}: ({}) {} (sleeping ~{}s)", prefix, status, statusMessage, sleep); + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); + } + /** * Create an Online Table. * *

Create a new Online Table. */ - public OnlineTable create(CreateOnlineTableRequest request) { - return impl.create(request); + public Wait create(CreateOnlineTableRequest request) { + OnlineTable response = impl.create(request); + return new Wait<>( + (timeout, callback) -> waitGetOnlineTableActive(response.getName(), timeout, callback), + response); } public void delete(String name) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index 3b29957f1..a1d482fa6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -21,7 +21,7 @@ public OnlineTable create(CreateOnlineTableRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, OnlineTable.class, headers); + return apiClient.POST(path, request.getTable(), OnlineTable.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java index ccbf21e31..76c85e2cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java @@ -14,6 +14,9 @@ public enum SecurableType { @JsonProperty("connection") CONNECTION, + @JsonProperty("credential") + CREDENTIAL, + @JsonProperty("external_location") EXTERNAL_LOCATION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java index 12b687e66..b3d32add0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java @@ -41,14 +41,15 @@ public class StorageCredentialInfo { @JsonProperty("databricks_gcp_service_account") private DatabricksGcpServiceAccountResponse databricksGcpServiceAccount; + /** The full name of the credential. */ + @JsonProperty("full_name") + private String fullName; + /** The unique identifier of the credential. */ @JsonProperty("id") private String id; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; @@ -155,6 +156,15 @@ public DatabricksGcpServiceAccountResponse getDatabricksGcpServiceAccount() { return databricksGcpServiceAccount; } + public StorageCredentialInfo setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + public StorageCredentialInfo setId(String id) { this.id = id; return this; @@ -249,6 +259,7 @@ public boolean equals(Object o) { && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) && Objects.equals(databricksGcpServiceAccount, that.databricksGcpServiceAccount) + && Objects.equals(fullName, that.fullName) && Objects.equals(id, that.id) && Objects.equals(isolationMode, that.isolationMode) && Objects.equals(metastoreId, that.metastoreId) @@ -271,6 +282,7 @@ public int hashCode() { createdAt, createdBy, databricksGcpServiceAccount, + fullName, id, isolationMode, metastoreId, @@ -293,6 +305,7 @@ public String toString() { .add("createdAt", createdAt) .add("createdBy", createdBy) .add("databricksGcpServiceAccount", databricksGcpServiceAccount) + .add("fullName", fullName) .add("id", id) .add("isolationMode", isolationMode) .add("metastoreId", metastoreId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java new file mode 100755 index 000000000..a42b25727 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryCredentials.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TemporaryCredentials { + /** + * AWS temporary credentials for API authentication. Read more at + * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html. + */ + @JsonProperty("aws_temp_credentials") + private AwsCredentials awsTempCredentials; + + /** + * Azure Active Directory token, essentially the Oauth token for Azure Service Principal or + * Managed Identity. Read more at + * https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token + */ + @JsonProperty("azure_aad") + private AzureActiveDirectoryToken azureAad; + + /** + * Server time when the credential will expire, in epoch milliseconds. The API client is advised + * to cache the credential given this expiration time. + */ + @JsonProperty("expiration_time") + private Long expirationTime; + + public TemporaryCredentials setAwsTempCredentials(AwsCredentials awsTempCredentials) { + this.awsTempCredentials = awsTempCredentials; + return this; + } + + public AwsCredentials getAwsTempCredentials() { + return awsTempCredentials; + } + + public TemporaryCredentials setAzureAad(AzureActiveDirectoryToken azureAad) { + this.azureAad = azureAad; + return this; + } + + public AzureActiveDirectoryToken getAzureAad() { + return azureAad; + } + + public TemporaryCredentials setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TemporaryCredentials that = (TemporaryCredentials) o; + return Objects.equals(awsTempCredentials, that.awsTempCredentials) + && Objects.equals(azureAad, that.azureAad) + && Objects.equals(expirationTime, that.expirationTime); + } + + @Override + public int hashCode() { + return Objects.hash(awsTempCredentials, azureAad, expirationTime); + } + + @Override + public String toString() { + return new ToStringer(TemporaryCredentials.class) + .add("awsTempCredentials", awsTempCredentials) + .add("azureAad", azureAad) + .add("expirationTime", expirationTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java index 1b462a9b0..ab1a503f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateBindingsSecurableType.java @@ -13,6 +13,9 @@ public enum UpdateBindingsSecurableType { @JsonProperty("external_location") EXTERNAL_LOCATION, + @JsonProperty("service_credential") + SERVICE_CREDENTIAL, + @JsonProperty("storage_credential") STORAGE_CREDENTIAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java new file mode 100755 index 000000000..fdaf16438 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java @@ -0,0 +1,177 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Comment associated with the credential. */ + @JsonProperty("comment") + private String comment; + + /** Force update even if there are dependent services. */ + @JsonProperty("force") + private Boolean force; + + /** + * Whether the current securable is accessible from all workspaces or a specific set of + * workspaces. + */ + @JsonProperty("isolation_mode") + private IsolationMode isolationMode; + + /** Name of the credential. */ + @JsonIgnore private String nameArg; + + /** New name of credential. */ + @JsonProperty("new_name") + private String newName; + + /** Username of current owner of credential. */ + @JsonProperty("owner") + private String owner; + + /** Supply true to this argument to skip validation of the updated credential. */ + @JsonProperty("skip_validation") + private Boolean skipValidation; + + public UpdateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public UpdateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public UpdateCredentialRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public UpdateCredentialRequest setForce(Boolean force) { + this.force = force; + return this; + } + + public Boolean getForce() { + return force; + } + + public UpdateCredentialRequest setIsolationMode(IsolationMode isolationMode) { + this.isolationMode = isolationMode; + return this; + } + + public IsolationMode getIsolationMode() { + return isolationMode; + } + + public UpdateCredentialRequest setNameArg(String nameArg) { + this.nameArg = nameArg; + return this; + } + + public String getNameArg() { + return nameArg; + } + + public UpdateCredentialRequest setNewName(String newName) { + this.newName = newName; + return this; + } + + public String getNewName() { + return newName; + } + + public UpdateCredentialRequest setOwner(String owner) { + this.owner = owner; + return this; + } + + public String getOwner() { + return owner; + } + + public UpdateCredentialRequest setSkipValidation(Boolean skipValidation) { + this.skipValidation = skipValidation; + return this; + } + + public Boolean getSkipValidation() { + return skipValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCredentialRequest that = (UpdateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(comment, that.comment) + && Objects.equals(force, that.force) + && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(nameArg, that.nameArg) + && Objects.equals(newName, that.newName) + && Objects.equals(owner, that.owner) + && Objects.equals(skipValidation, that.skipValidation); + } + + @Override + public int hashCode() { + return Objects.hash( + awsIamRole, + azureManagedIdentity, + comment, + force, + isolationMode, + nameArg, + newName, + owner, + skipValidation); + } + + @Override + public String toString() { + return new ToStringer(UpdateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("comment", comment) + .add("force", force) + .add("isolationMode", isolationMode) + .add("nameArg", nameArg) + .add("newName", newName) + .add("owner", owner) + .add("skipValidation", skipValidation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java index 3c749ac0f..c4017fb49 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java @@ -38,10 +38,7 @@ public class UpdateExternalLocation { @JsonProperty("force") private Boolean force; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java index 14f92001a..504151504 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java @@ -38,10 +38,7 @@ public class UpdateStorageCredential { @JsonProperty("force") private Boolean force; - /** - * Whether the current securable is accessible from all workspaces or a specific set of - * workspaces. - */ + /** */ @JsonProperty("isolation_mode") private IsolationMode isolationMode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java new file mode 100755 index 000000000..8bb5ff664 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ValidateCredentialRequest { + /** The AWS IAM role configuration */ + @JsonProperty("aws_iam_role") + private AwsIamRole awsIamRole; + + /** The Azure managed identity configuration. */ + @JsonProperty("azure_managed_identity") + private AzureManagedIdentity azureManagedIdentity; + + /** Required. The name of an existing credential or long-lived cloud credential to validate. */ + @JsonProperty("credential_name") + private String credentialName; + + /** The purpose of the credential. This should only be used when the credential is specified. */ + @JsonProperty("purpose") + private CredentialPurpose purpose; + + public ValidateCredentialRequest setAwsIamRole(AwsIamRole awsIamRole) { + this.awsIamRole = awsIamRole; + return this; + } + + public AwsIamRole getAwsIamRole() { + return awsIamRole; + } + + public ValidateCredentialRequest setAzureManagedIdentity( + AzureManagedIdentity azureManagedIdentity) { + this.azureManagedIdentity = azureManagedIdentity; + return this; + } + + public AzureManagedIdentity getAzureManagedIdentity() { + return azureManagedIdentity; + } + + public ValidateCredentialRequest setCredentialName(String credentialName) { + this.credentialName = credentialName; + return this; + } + + public String getCredentialName() { + return credentialName; + } + + public ValidateCredentialRequest setPurpose(CredentialPurpose purpose) { + this.purpose = purpose; + return this; + } + + public CredentialPurpose getPurpose() { + return purpose; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialRequest that = (ValidateCredentialRequest) o; + return Objects.equals(awsIamRole, that.awsIamRole) + && Objects.equals(azureManagedIdentity, that.azureManagedIdentity) + && Objects.equals(credentialName, that.credentialName) + && Objects.equals(purpose, that.purpose); + } + + @Override + public int hashCode() { + return Objects.hash(awsIamRole, azureManagedIdentity, credentialName, purpose); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialRequest.class) + .add("awsIamRole", awsIamRole) + .add("azureManagedIdentity", azureManagedIdentity) + .add("credentialName", credentialName) + .add("purpose", purpose) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java new file mode 100755 index 000000000..ef23a12c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ValidateCredentialResponse { + /** The results of the validation check. */ + @JsonProperty("results") + private Collection results; + + public ValidateCredentialResponse setResults(Collection results) { + this.results = results; + return this; + } + + public Collection getResults() { + return results; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ValidateCredentialResponse that = (ValidateCredentialResponse) o; + return Objects.equals(results, that.results); + } + + @Override + public int hashCode() { + return Objects.hash(results); + } + + @Override + public String toString() { + return new ToStringer(ValidateCredentialResponse.class).add("results", results).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java new file mode 100755 index 000000000..7fa55d344 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialResult.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** A enum represents the result of the file operation */ +@Generated +public enum ValidateCredentialResult { + FAIL, + PASS, + SKIP, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java index 936bbaf05..f42ce8e40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java @@ -142,8 +142,9 @@ public ClusterPolicyPermissions setPermissions(String clusterPolicyId) { /** * Set cluster policy permissions. * - *

Sets permissions on a cluster policy. Cluster policies can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java index 10654b89f..64f2a13f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java @@ -87,8 +87,9 @@ ClusterPolicyPermissions getPermissions( /** * Set cluster policy permissions. * - *

Sets permissions on a cluster policy. Cluster policies can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ClusterPolicyPermissions setPermissions( ClusterPolicyPermissionsRequest clusterPolicyPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java index 2fe2801c5..285d93495 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java @@ -394,7 +394,9 @@ public ClusterPermissions setPermissions(String clusterId) { /** * Set cluster permissions. * - *

Sets permissions on a cluster. Clusters can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ClusterPermissions setPermissions(ClusterPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java index f257b5f61..b85b439b9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java @@ -180,7 +180,9 @@ GetClusterPermissionLevelsResponse getPermissionLevels( /** * Set cluster permissions. * - *

Sets permissions on a cluster. Clusters can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ClusterPermissions setPermissions(ClusterPermissionsRequest clusterPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java index ff2644f73..886970e07 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java @@ -93,34 +93,27 @@ public CommandStatusResponse waitCommandStatusCommandExecutionCancelled( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( - String clusterId, String commandId, String contextId) throws TimeoutException { - return waitCommandStatusCommandExecutionFinishedOrError( - clusterId, commandId, contextId, Duration.ofMinutes(20), null); + public ContextStatusResponse waitContextStatusCommandExecutionRunning( + String clusterId, String contextId) throws TimeoutException { + return waitContextStatusCommandExecutionRunning( + clusterId, contextId, Duration.ofMinutes(20), null); } - public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( + public ContextStatusResponse waitContextStatusCommandExecutionRunning( String clusterId, - String commandId, String contextId, Duration timeout, - Consumer callback) + Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = - Arrays.asList(CommandStatus.FINISHED, CommandStatus.ERROR); - java.util.List failureStates = - Arrays.asList(CommandStatus.CANCELLED, CommandStatus.CANCELLING); + java.util.List targetStates = Arrays.asList(ContextStatus.RUNNING); + java.util.List failureStates = Arrays.asList(ContextStatus.ERROR); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - CommandStatusResponse poll = - commandStatus( - new CommandStatusRequest() - .setClusterId(clusterId) - .setCommandId(commandId) - .setContextId(contextId)); - CommandStatus status = poll.getStatus(); + ContextStatusResponse poll = + contextStatus(new ContextStatusRequest().setClusterId(clusterId).setContextId(contextId)); + ContextStatus status = poll.getStatus(); statusMessage = String.format("current status: %s", status); if (targetStates.contains(status)) { return poll; @@ -129,14 +122,11 @@ public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = - String.format("failed to reach FINISHED or ERROR, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = - String.format( - "clusterId=%s, commandId=%s, contextId=%s", clusterId, commandId, contextId); + String prefix = String.format("clusterId=%s, contextId=%s", clusterId, contextId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt @@ -154,27 +144,34 @@ public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public ContextStatusResponse waitContextStatusCommandExecutionRunning( - String clusterId, String contextId) throws TimeoutException { - return waitContextStatusCommandExecutionRunning( - clusterId, contextId, Duration.ofMinutes(20), null); + public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( + String clusterId, String commandId, String contextId) throws TimeoutException { + return waitCommandStatusCommandExecutionFinishedOrError( + clusterId, commandId, contextId, Duration.ofMinutes(20), null); } - public ContextStatusResponse waitContextStatusCommandExecutionRunning( + public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError( String clusterId, + String commandId, String contextId, Duration timeout, - Consumer callback) + Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(ContextStatus.RUNNING); - java.util.List failureStates = Arrays.asList(ContextStatus.ERROR); + java.util.List targetStates = + Arrays.asList(CommandStatus.FINISHED, CommandStatus.ERROR); + java.util.List failureStates = + Arrays.asList(CommandStatus.CANCELLED, CommandStatus.CANCELLING); String statusMessage = "polling..."; int attempt = 1; while (System.currentTimeMillis() < deadline) { - ContextStatusResponse poll = - contextStatus(new ContextStatusRequest().setClusterId(clusterId).setContextId(contextId)); - ContextStatus status = poll.getStatus(); + CommandStatusResponse poll = + commandStatus( + new CommandStatusRequest() + .setClusterId(clusterId) + .setCommandId(commandId) + .setContextId(contextId)); + CommandStatus status = poll.getStatus(); statusMessage = String.format("current status: %s", status); if (targetStates.contains(status)) { return poll; @@ -183,11 +180,14 @@ public ContextStatusResponse waitContextStatusCommandExecutionRunning( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); + String msg = + String.format("failed to reach FINISHED or ERROR, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } - String prefix = String.format("clusterId=%s, contextId=%s", clusterId, contextId); + String prefix = + String.format( + "clusterId=%s, commandId=%s, contextId=%s", clusterId, commandId, contextId); int sleep = attempt; if (sleep > 10) { // sleep 10s max per attempt diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 8fb986e43..794e1f595 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -48,7 +48,7 @@ public class EditCluster { @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; - /** ID of the cluser */ + /** ID of the cluster */ @JsonProperty("cluster_id") private String clusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java index 11431a3c3..435127f2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java @@ -145,7 +145,8 @@ public InstancePoolPermissions setPermissions(String instancePoolId) { /** * Set instance pool permissions. * - *

Sets permissions on an instance pool. Instance pools can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java index ae5e55252..0a7d03ead 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java @@ -82,7 +82,8 @@ InstancePoolPermissions getPermissions( /** * Set instance pool permissions. * - *

Sets permissions on an instance pool. Instance pools can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ InstancePoolPermissions setPermissions( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java index 62d74c45d..9c6ddecd1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java @@ -7,68 +7,20 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create dashboard */ @Generated public class CreateDashboardRequest { - /** The display name of the dashboard. */ - @JsonProperty("display_name") - private String displayName; + /** */ + @JsonProperty("dashboard") + private Dashboard dashboard; - /** - * The workspace path of the folder containing the dashboard. Includes leading slash and no - * trailing slash. This field is excluded in List Dashboards responses. - */ - @JsonProperty("parent_path") - private String parentPath; - - /** - * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - * includes the `serialized_dashboard` field. This field provides the structure of the JSON string - * that represents the dashboard's layout and components. - * - *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - */ - @JsonProperty("serialized_dashboard") - private String serializedDashboard; - - /** The warehouse ID used to run the dashboard. */ - @JsonProperty("warehouse_id") - private String warehouseId; - - public CreateDashboardRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public CreateDashboardRequest setParentPath(String parentPath) { - this.parentPath = parentPath; - return this; - } - - public String getParentPath() { - return parentPath; - } - - public CreateDashboardRequest setSerializedDashboard(String serializedDashboard) { - this.serializedDashboard = serializedDashboard; - return this; - } - - public String getSerializedDashboard() { - return serializedDashboard; - } - - public CreateDashboardRequest setWarehouseId(String warehouseId) { - this.warehouseId = warehouseId; + public CreateDashboardRequest setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; return this; } - public String getWarehouseId() { - return warehouseId; + public Dashboard getDashboard() { + return dashboard; } @Override @@ -76,24 +28,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateDashboardRequest that = (CreateDashboardRequest) o; - return Objects.equals(displayName, that.displayName) - && Objects.equals(parentPath, that.parentPath) - && Objects.equals(serializedDashboard, that.serializedDashboard) - && Objects.equals(warehouseId, that.warehouseId); + return Objects.equals(dashboard, that.dashboard); } @Override public int hashCode() { - return Objects.hash(displayName, parentPath, serializedDashboard, warehouseId); + return Objects.hash(dashboard); } @Override public String toString() { - return new ToStringer(CreateDashboardRequest.class) - .add("displayName", displayName) - .add("parentPath", parentPath) - .add("serializedDashboard", serializedDashboard) - .add("warehouseId", warehouseId) - .toString(); + return new ToStringer(CreateDashboardRequest.class).add("dashboard", dashboard).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java index 8e1d57167..1c364865f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java @@ -8,31 +8,15 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create dashboard schedule */ @Generated public class CreateScheduleRequest { - /** The cron expression describing the frequency of the periodic refresh for this schedule. */ - @JsonProperty("cron_schedule") - private CronSchedule cronSchedule; - /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** The display name for schedule. */ - @JsonProperty("display_name") - private String displayName; - - /** The status indicates whether this schedule is paused or not. */ - @JsonProperty("pause_status") - private SchedulePauseStatus pauseStatus; - - public CreateScheduleRequest setCronSchedule(CronSchedule cronSchedule) { - this.cronSchedule = cronSchedule; - return this; - } - - public CronSchedule getCronSchedule() { - return cronSchedule; - } + /** */ + @JsonProperty("schedule") + private Schedule schedule; public CreateScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -43,22 +27,13 @@ public String getDashboardId() { return dashboardId; } - public CreateScheduleRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public CreateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) { - this.pauseStatus = pauseStatus; + public CreateScheduleRequest setSchedule(Schedule schedule) { + this.schedule = schedule; return this; } - public SchedulePauseStatus getPauseStatus() { - return pauseStatus; + public Schedule getSchedule() { + return schedule; } @Override @@ -66,24 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateScheduleRequest that = (CreateScheduleRequest) o; - return Objects.equals(cronSchedule, that.cronSchedule) - && Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(pauseStatus, that.pauseStatus); + return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(schedule, that.schedule); } @Override public int hashCode() { - return Objects.hash(cronSchedule, dashboardId, displayName, pauseStatus); + return Objects.hash(dashboardId, schedule); } @Override public String toString() { return new ToStringer(CreateScheduleRequest.class) - .add("cronSchedule", cronSchedule) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("pauseStatus", pauseStatus) + .add("schedule", schedule) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java index 9ece761be..66ce04221 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java @@ -8,6 +8,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Create schedule subscription */ @Generated public class CreateSubscriptionRequest { /** UUID identifying the dashboard to which the subscription belongs. */ @@ -16,9 +17,9 @@ public class CreateSubscriptionRequest { /** UUID identifying the schedule to which the subscription belongs. */ @JsonIgnore private String scheduleId; - /** Subscriber details for users and destinations to be added as subscribers to the schedule. */ - @JsonProperty("subscriber") - private Subscriber subscriber; + /** */ + @JsonProperty("subscription") + private Subscription subscription; public CreateSubscriptionRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -38,13 +39,13 @@ public String getScheduleId() { return scheduleId; } - public CreateSubscriptionRequest setSubscriber(Subscriber subscriber) { - this.subscriber = subscriber; + public CreateSubscriptionRequest setSubscription(Subscription subscription) { + this.subscription = subscription; return this; } - public Subscriber getSubscriber() { - return subscriber; + public Subscription getSubscription() { + return subscription; } @Override @@ -54,12 +55,12 @@ public boolean equals(Object o) { CreateSubscriptionRequest that = (CreateSubscriptionRequest) o; return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(scheduleId, that.scheduleId) - && Objects.equals(subscriber, that.subscriber); + && Objects.equals(subscription, that.subscription); } @Override public int hashCode() { - return Objects.hash(dashboardId, scheduleId, subscriber); + return Objects.hash(dashboardId, scheduleId, subscription); } @Override @@ -67,7 +68,7 @@ public String toString() { return new ToStringer(CreateSubscriptionRequest.class) .add("dashboardId", dashboardId) .add("scheduleId", scheduleId) - .add("subscriber", subscriber) + .add("subscription", subscription) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index c81ff268b..4b8dd23c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -124,7 +124,7 @@ public Wait createMessage( public GenieGetMessageQueryResultResponse executeMessageQuery( String spaceId, String conversationId, String messageId) { return executeMessageQuery( - new ExecuteMessageQueryRequest() + new GenieExecuteMessageQueryRequest() .setSpaceId(spaceId) .setConversationId(conversationId) .setMessageId(messageId)); @@ -136,7 +136,7 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( *

Execute the SQL query in the message. */ public GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest request) { + GenieExecuteMessageQueryRequest request) { return impl.executeMessageQuery(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java similarity index 78% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java index 9c47c9e63..4ad41a28b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java @@ -9,7 +9,7 @@ /** Execute SQL query in a conversation message */ @Generated -public class ExecuteMessageQueryRequest { +public class GenieExecuteMessageQueryRequest { /** Conversation ID */ @JsonIgnore private String conversationId; @@ -19,7 +19,7 @@ public class ExecuteMessageQueryRequest { /** Genie space ID */ @JsonIgnore private String spaceId; - public ExecuteMessageQueryRequest setConversationId(String conversationId) { + public GenieExecuteMessageQueryRequest setConversationId(String conversationId) { this.conversationId = conversationId; return this; } @@ -28,7 +28,7 @@ public String getConversationId() { return conversationId; } - public ExecuteMessageQueryRequest setMessageId(String messageId) { + public GenieExecuteMessageQueryRequest setMessageId(String messageId) { this.messageId = messageId; return this; } @@ -37,7 +37,7 @@ public String getMessageId() { return messageId; } - public ExecuteMessageQueryRequest setSpaceId(String spaceId) { + public GenieExecuteMessageQueryRequest setSpaceId(String spaceId) { this.spaceId = spaceId; return this; } @@ -50,7 +50,7 @@ public String getSpaceId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ExecuteMessageQueryRequest that = (ExecuteMessageQueryRequest) o; + GenieExecuteMessageQueryRequest that = (GenieExecuteMessageQueryRequest) o; return Objects.equals(conversationId, that.conversationId) && Objects.equals(messageId, that.messageId) && Objects.equals(spaceId, that.spaceId); @@ -63,7 +63,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ExecuteMessageQueryRequest.class) + return new ToStringer(GenieExecuteMessageQueryRequest.class) .add("conversationId", conversationId) .add("messageId", messageId) .add("spaceId", spaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 7a282ccd9..fc0c9236f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -29,7 +29,7 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) @Override public GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest request) { + GenieExecuteMessageQueryRequest request) { String path = String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/execute-query", diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index 325c0df27..5dad69dc0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -30,7 +30,7 @@ GenieMessage createMessage( *

Execute the SQL query in the message. */ GenieGetMessageQueryResultResponse executeMessageQuery( - ExecuteMessageQueryRequest executeMessageQueryRequest); + GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest); /** * Get conversation message. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java index 8f7f66050..6f4978b04 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java @@ -27,10 +27,6 @@ public LakeviewAPI(LakeviewService mock) { impl = mock; } - public Dashboard create(String displayName) { - return create(new CreateDashboardRequest().setDisplayName(displayName)); - } - /** * Create dashboard. * @@ -40,9 +36,8 @@ public Dashboard create(CreateDashboardRequest request) { return impl.create(request); } - public Schedule createSchedule(String dashboardId, CronSchedule cronSchedule) { - return createSchedule( - new CreateScheduleRequest().setDashboardId(dashboardId).setCronSchedule(cronSchedule)); + public Schedule createSchedule(String dashboardId) { + return createSchedule(new CreateScheduleRequest().setDashboardId(dashboardId)); } /** Create dashboard schedule. */ @@ -50,13 +45,9 @@ public Schedule createSchedule(CreateScheduleRequest request) { return impl.createSchedule(request); } - public Subscription createSubscription( - String dashboardId, String scheduleId, Subscriber subscriber) { + public Subscription createSubscription(String dashboardId, String scheduleId) { return createSubscription( - new CreateSubscriptionRequest() - .setDashboardId(dashboardId) - .setScheduleId(scheduleId) - .setSubscriber(subscriber)); + new CreateSubscriptionRequest().setDashboardId(dashboardId).setScheduleId(scheduleId)); } /** Create schedule subscription. */ @@ -256,12 +247,9 @@ public Dashboard update(UpdateDashboardRequest request) { return impl.update(request); } - public Schedule updateSchedule(String dashboardId, String scheduleId, CronSchedule cronSchedule) { + public Schedule updateSchedule(String dashboardId, String scheduleId) { return updateSchedule( - new UpdateScheduleRequest() - .setDashboardId(dashboardId) - .setScheduleId(scheduleId) - .setCronSchedule(cronSchedule)); + new UpdateScheduleRequest().setDashboardId(dashboardId).setScheduleId(scheduleId)); } /** Update dashboard schedule. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index f6b468526..3d9689b2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -21,7 +21,7 @@ public Dashboard create(CreateDashboardRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Dashboard.class, headers); + return apiClient.POST(path, request.getDashboard(), Dashboard.class, headers); } @Override @@ -31,7 +31,7 @@ public Schedule createSchedule(CreateScheduleRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Schedule.class, headers); + return apiClient.POST(path, request.getSchedule(), Schedule.class, headers); } @Override @@ -43,7 +43,7 @@ public Subscription createSubscription(CreateSubscriptionRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Subscription.class, headers); + return apiClient.POST(path, request.getSubscription(), Subscription.class, headers); } @Override @@ -177,7 +177,7 @@ public Dashboard update(UpdateDashboardRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, Dashboard.class, headers); + return apiClient.PATCH(path, request.getDashboard(), Dashboard.class, headers); } @Override @@ -189,6 +189,6 @@ public Schedule updateSchedule(UpdateScheduleRequest request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, Schedule.class, headers); + return apiClient.PUT(path, request.getSchedule(), Schedule.class, headers); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index 5e1c94580..12c0af08b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -31,6 +31,7 @@ public enum MessageErrorType { LOCAL_CONTEXT_EXCEEDED_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, + NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE, NO_QUERY_TO_VISUALIZE_EXCEPTION, NO_TABLES_TO_QUERY_EXCEPTION, RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java index 8897fe162..0cf82ee0d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java @@ -45,6 +45,10 @@ public class Schedule { @JsonProperty("update_time") private String updateTime; + /** The warehouse id to run the dashboard with for the schedule. */ + @JsonProperty("warehouse_id") + private String warehouseId; + public Schedule setCreateTime(String createTime) { this.createTime = createTime; return this; @@ -117,6 +121,15 @@ public String getUpdateTime() { return updateTime; } + public Schedule setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -129,7 +142,8 @@ public boolean equals(Object o) { && Objects.equals(etag, that.etag) && Objects.equals(pauseStatus, that.pauseStatus) && Objects.equals(scheduleId, that.scheduleId) - && Objects.equals(updateTime, that.updateTime); + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); } @Override @@ -142,7 +156,8 @@ public int hashCode() { etag, pauseStatus, scheduleId, - updateTime); + updateTime, + warehouseId); } @Override @@ -156,6 +171,7 @@ public String toString() { .add("pauseStatus", pauseStatus) .add("scheduleId", scheduleId) .add("updateTime", updateTime) + .add("warehouseId", warehouseId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java index f9821b02a..84298ffd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -8,36 +8,24 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Update dashboard */ @Generated public class UpdateDashboardRequest { + /** */ + @JsonProperty("dashboard") + private Dashboard dashboard; + /** UUID identifying the dashboard. */ @JsonIgnore private String dashboardId; - /** The display name of the dashboard. */ - @JsonProperty("display_name") - private String displayName; - - /** - * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard - * has not been modified since the last read. This field is excluded in List Dashboards responses. - */ - @JsonProperty("etag") - private String etag; - - /** - * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which - * includes the `serialized_dashboard` field. This field provides the structure of the JSON string - * that represents the dashboard's layout and components. - * - *

[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get - */ - @JsonProperty("serialized_dashboard") - private String serializedDashboard; + public UpdateDashboardRequest setDashboard(Dashboard dashboard) { + this.dashboard = dashboard; + return this; + } - /** The warehouse ID used to run the dashboard. */ - @JsonProperty("warehouse_id") - private String warehouseId; + public Dashboard getDashboard() { + return dashboard; + } public UpdateDashboardRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; @@ -48,67 +36,25 @@ public String getDashboardId() { return dashboardId; } - public UpdateDashboardRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public UpdateDashboardRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - public UpdateDashboardRequest setSerializedDashboard(String serializedDashboard) { - this.serializedDashboard = serializedDashboard; - return this; - } - - public String getSerializedDashboard() { - return serializedDashboard; - } - - public UpdateDashboardRequest setWarehouseId(String warehouseId) { - this.warehouseId = warehouseId; - return this; - } - - public String getWarehouseId() { - return warehouseId; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateDashboardRequest that = (UpdateDashboardRequest) o; - return Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(etag, that.etag) - && Objects.equals(serializedDashboard, that.serializedDashboard) - && Objects.equals(warehouseId, that.warehouseId); + return Objects.equals(dashboard, that.dashboard) + && Objects.equals(dashboardId, that.dashboardId); } @Override public int hashCode() { - return Objects.hash(dashboardId, displayName, etag, serializedDashboard, warehouseId); + return Objects.hash(dashboard, dashboardId); } @Override public String toString() { return new ToStringer(UpdateDashboardRequest.class) + .add("dashboard", dashboard) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("etag", etag) - .add("serializedDashboard", serializedDashboard) - .add("warehouseId", warehouseId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java index cfc48f115..cffdc6370 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java @@ -8,43 +8,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Update dashboard schedule */ @Generated public class UpdateScheduleRequest { - /** The cron expression describing the frequency of the periodic refresh for this schedule. */ - @JsonProperty("cron_schedule") - private CronSchedule cronSchedule; - /** UUID identifying the dashboard to which the schedule belongs. */ @JsonIgnore private String dashboardId; - /** The display name for schedule. */ - @JsonProperty("display_name") - private String displayName; - - /** - * The etag for the schedule. Must be left empty on create, must be provided on updates to ensure - * that the schedule has not been modified since the last read, and can be optionally provided on - * delete. - */ - @JsonProperty("etag") - private String etag; - - /** The status indicates whether this schedule is paused or not. */ - @JsonProperty("pause_status") - private SchedulePauseStatus pauseStatus; + /** */ + @JsonProperty("schedule") + private Schedule schedule; /** UUID identifying the schedule. */ @JsonIgnore private String scheduleId; - public UpdateScheduleRequest setCronSchedule(CronSchedule cronSchedule) { - this.cronSchedule = cronSchedule; - return this; - } - - public CronSchedule getCronSchedule() { - return cronSchedule; - } - public UpdateScheduleRequest setDashboardId(String dashboardId) { this.dashboardId = dashboardId; return this; @@ -54,31 +30,13 @@ public String getDashboardId() { return dashboardId; } - public UpdateScheduleRequest setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public UpdateScheduleRequest setEtag(String etag) { - this.etag = etag; - return this; - } - - public String getEtag() { - return etag; - } - - public UpdateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) { - this.pauseStatus = pauseStatus; + public UpdateScheduleRequest setSchedule(Schedule schedule) { + this.schedule = schedule; return this; } - public SchedulePauseStatus getPauseStatus() { - return pauseStatus; + public Schedule getSchedule() { + return schedule; } public UpdateScheduleRequest setScheduleId(String scheduleId) { @@ -95,27 +53,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdateScheduleRequest that = (UpdateScheduleRequest) o; - return Objects.equals(cronSchedule, that.cronSchedule) - && Objects.equals(dashboardId, that.dashboardId) - && Objects.equals(displayName, that.displayName) - && Objects.equals(etag, that.etag) - && Objects.equals(pauseStatus, that.pauseStatus) + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(schedule, that.schedule) && Objects.equals(scheduleId, that.scheduleId); } @Override public int hashCode() { - return Objects.hash(cronSchedule, dashboardId, displayName, etag, pauseStatus, scheduleId); + return Objects.hash(dashboardId, schedule, scheduleId); } @Override public String toString() { return new ToStringer(UpdateScheduleRequest.class) - .add("cronSchedule", cronSchedule) .add("dashboardId", dashboardId) - .add("displayName", displayName) - .add("etag", etag) - .add("pauseStatus", pauseStatus) + .add("schedule", schedule) .add("scheduleId", scheduleId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java index 2eb2601d9..521824587 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java @@ -118,8 +118,9 @@ public ObjectPermissions set(String requestObjectType, String requestObjectId) { /** * Set object permissions. * - *

Sets permissions on an object. Objects can inherit permissions from their parent objects or - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ public ObjectPermissions set(PermissionsRequest request) { return impl.set(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java index e641809ea..03ccea94e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java @@ -80,8 +80,9 @@ GetPermissionLevelsResponse getPermissionLevels( /** * Set object permissions. * - *

Sets permissions on an object. Objects can inherit permissions from their parent objects or - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ ObjectPermissions set(PermissionsRequest permissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java index e2764fd02..d079aba02 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java @@ -132,7 +132,9 @@ public void patch(PartialUpdate request) { /** * Set password permissions. * - *

Sets permissions on all passwords. Passwords can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public PasswordPermissions setPermissions(PasswordPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java index f922d8310..83d9421eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java @@ -76,7 +76,9 @@ public interface UsersService { /** * Set password permissions. * - *

Sets permissions on all passwords. Passwords can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ PasswordPermissions setPermissions(PasswordPermissionsRequest passwordPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java index 88691b4d7..bf870d3f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java @@ -22,8 +22,8 @@ public class GetRunRequest { private Boolean includeResolvedValues; /** - * To list the next page or the previous page of job tasks, set this field to the value of the - * `next_page_token` or `prev_page_token` returned in the GetJob response. + * To list the next page of job tasks, set this field to the value of the `next_page_token` + * returned in the GetJob response. */ @JsonIgnore @QueryParam("page_token") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java index a5ec8c8b3..5b0ce638a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java @@ -341,7 +341,9 @@ public JobPermissions setPermissions(String jobId) { /** * Set job permissions. * - *

Sets permissions on a job. Jobs can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public JobPermissions setPermissions(JobPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java index 57433b9a8..46696459b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java @@ -156,7 +156,9 @@ GetJobPermissionLevelsResponse getPermissionLevels( /** * Set job permissions. * - *

Sets permissions on a job. Jobs can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ JobPermissions setPermissions(JobPermissionsRequest jobPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java index d79a76992..7fb9ace32 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java @@ -25,8 +25,9 @@ public class RepairRun { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index 20ace3a6b..f076ba72e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -130,10 +130,6 @@ public class Run { @JsonProperty("overriding_parameters") private RunParameters overridingParameters; - /** A token that can be used to list the previous page of sub-resources. */ - @JsonProperty("prev_page_token") - private String prevPageToken; - /** The time in milliseconds that the run has spent in the queue. */ @JsonProperty("queue_duration") private Long queueDuration; @@ -386,15 +382,6 @@ public RunParameters getOverridingParameters() { return overridingParameters; } - public Run setPrevPageToken(String prevPageToken) { - this.prevPageToken = prevPageToken; - return this; - } - - public String getPrevPageToken() { - return prevPageToken; - } - public Run setQueueDuration(Long queueDuration) { this.queueDuration = queueDuration; return this; @@ -553,7 +540,6 @@ public boolean equals(Object o) { && Objects.equals(numberInJob, that.numberInJob) && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) && Objects.equals(overridingParameters, that.overridingParameters) - && Objects.equals(prevPageToken, that.prevPageToken) && Objects.equals(queueDuration, that.queueDuration) && Objects.equals(repairHistory, that.repairHistory) && Objects.equals(runDuration, that.runDuration) @@ -592,7 +578,6 @@ public int hashCode() { numberInJob, originalAttemptRunId, overridingParameters, - prevPageToken, queueDuration, repairHistory, runDuration, @@ -631,7 +616,6 @@ public String toString() { .add("numberInJob", numberInJob) .add("originalAttemptRunId", originalAttemptRunId) .add("overridingParameters", overridingParameters) - .add("prevPageToken", prevPageToken) .add("queueDuration", queueDuration) .add("repairHistory", repairHistory) .add("runDuration", runDuration) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java index eb1f27f01..593a3dc97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java @@ -25,8 +25,9 @@ public class RunJobTask { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index fce617b43..d99cb5c75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -42,8 +42,9 @@ public class RunNow { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; @@ -76,6 +77,13 @@ public class RunNow { @JsonProperty("notebook_params") private Map notebookParams; + /** + * A list of task keys to run inside of the job. If this field is not provided, all tasks in the + * job will be run. + */ + @JsonProperty("only") + private Collection only; + /** Controls whether the pipeline should perform a full refresh */ @JsonProperty("pipeline_params") private PipelineParams pipelineParams; @@ -188,6 +196,15 @@ public Map getNotebookParams() { return notebookParams; } + public RunNow setOnly(Collection only) { + this.only = only; + return this; + } + + public Collection getOnly() { + return only; + } + public RunNow setPipelineParams(PipelineParams pipelineParams) { this.pipelineParams = pipelineParams; return this; @@ -253,6 +270,7 @@ public boolean equals(Object o) { && Objects.equals(jobId, that.jobId) && Objects.equals(jobParameters, that.jobParameters) && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(only, that.only) && Objects.equals(pipelineParams, that.pipelineParams) && Objects.equals(pythonNamedParams, that.pythonNamedParams) && Objects.equals(pythonParams, that.pythonParams) @@ -270,6 +288,7 @@ public int hashCode() { jobId, jobParameters, notebookParams, + only, pipelineParams, pythonNamedParams, pythonParams, @@ -287,6 +306,7 @@ public String toString() { .add("jobId", jobId) .add("jobParameters", jobParameters) .add("notebookParams", notebookParams) + .add("only", only) .add("pipelineParams", pipelineParams) .add("pythonNamedParams", pythonNamedParams) .add("pythonParams", pythonParams) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java index b76cd8392..be39a1c4e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java @@ -25,8 +25,9 @@ public class RunParameters { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters - * containing information about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("jar_params") private Collection jarParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index 4a4b436a0..07c7e410a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -38,16 +38,16 @@ public class RunTask { private ClusterInstance clusterInstance; /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private RunConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -103,7 +103,10 @@ public class RunTask { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private RunForEachTask forEachTask; @@ -136,10 +139,7 @@ public class RunTask { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -150,11 +150,14 @@ public class RunTask { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -182,7 +185,7 @@ public class RunTask { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; @@ -200,17 +203,17 @@ public class RunTask { @JsonProperty("setup_duration") private Long setupDuration; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -228,7 +231,10 @@ public class RunTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 6f8a12c10..be1e79187 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -11,16 +11,16 @@ @Generated public class SubmitTask { /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private ConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -59,7 +59,10 @@ public class SubmitTask { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -78,10 +81,7 @@ public class SubmitTask { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -92,11 +92,14 @@ public class SubmitTask { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -108,21 +111,21 @@ public class SubmitTask { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -140,7 +143,10 @@ public class SubmitTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 0f9026396..011b3ee30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -11,16 +11,16 @@ @Generated public class Task { /** - * If condition_task, specifies a condition with an outcome that can be used to control the - * execution of other tasks. Does not require a cluster to execute and does not support retries or - * notifications. + * The task evaluates a condition that can be used to control the execution of other tasks when + * the `condition_task` field is present. The condition task does not require a cluster to execute + * and does not support retries or notifications. */ @JsonProperty("condition_task") private ConditionTask conditionTask; /** - * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and - * the ability to use a serverless or a pro SQL warehouse. + * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task + * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. */ @JsonProperty("dbt_task") private DbtTask dbtTask; @@ -64,7 +64,10 @@ public class Task { @JsonProperty("existing_cluster_id") private String existingClusterId; - /** If for_each_task, indicates that this task must execute the nested task within it. */ + /** + * The task executes a nested task for every input provided when the `for_each_task` field is + * present. + */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -106,10 +109,7 @@ public class Task { @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; - /** - * If notebook_task, indicates that this task must run a notebook. This field may not be specified - * in conjunction with spark_jar_task. - */ + /** The task runs a notebook when the `notebook_task` field is present. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; @@ -120,11 +120,14 @@ public class Task { @JsonProperty("notification_settings") private TaskNotificationSettings notificationSettings; - /** If pipeline_task, indicates that this task must execute a Pipeline. */ + /** + * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines + * configured to use triggered more are supported. + */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; - /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + /** The task runs a Python wheel when the `python_wheel_task` field is present. */ @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; @@ -148,21 +151,21 @@ public class Task { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this task must execute another job. */ + /** The task triggers another job when the `run_job_task` field is present. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; - /** If spark_jar_task, indicates that this task must run a JAR. */ + /** The task runs a JAR when the `spark_jar_task` field is present. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this task must run a Python file. */ + /** The task runs a Python file when the `spark_python_task` field is present. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; /** - * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. - * This task can run only on new clusters. + * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. + * This task can run only on new clusters and is not compatible with serverless compute. * *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark @@ -180,7 +183,10 @@ public class Task { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL task. */ + /** + * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when + * the `sql_task` field is present. + */ @JsonProperty("sql_task") private SqlTask sqlTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java index e37118d9e..699094949 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java @@ -11,4 +11,5 @@ public enum AssetType { ASSET_TYPE_MEDIA, ASSET_TYPE_MODEL, ASSET_TYPE_NOTEBOOK, + ASSET_TYPE_PARTNER_INTEGRATION, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 7f3923351..8c4c27e8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -463,7 +463,8 @@ public ExperimentPermissions setPermissions(String experimentId) { /** * Set experiment permissions. * - *

Sets permissions on an experiment. Experiments can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index c4a641627..323c848c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -274,7 +274,8 @@ ExperimentPermissions getPermissions( /** * Set experiment permissions. * - *

Sets permissions on an experiment. Experiments can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ ExperimentPermissions setPermissions(ExperimentPermissionsRequest experimentPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java index e70d42047..9c4325eb9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java @@ -478,8 +478,9 @@ public RegisteredModelPermissions setPermissions(String registeredModelId) { /** * Set registered model permissions. * - *

Sets permissions on a registered model. Registered models can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public RegisteredModelPermissions setPermissions(RegisteredModelPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java index bf3535616..3f9dfae1a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java @@ -241,8 +241,9 @@ SearchModelVersionsResponse searchModelVersions( /** * Set registered model permissions. * - *

Sets permissions on a registered model. Registered models can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ RegisteredModelPermissions setPermissions( RegisteredModelPermissionsRequest registeredModelPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java index 3ac7226b8..816fb09bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; @@ -10,9 +11,31 @@ /** List service principal secrets */ @Generated public class ListServicePrincipalSecretsRequest { + /** + * An opaque page token which was the `next_page_token` in the response of the previous request to + * list the secrets for this service principal. Provide this token to retrieve the next page of + * secret entries. When providing a `page_token`, all other parameters provided to the request + * must match the previous request. To list all of the secrets for a service principal, it is + * necessary to continue requesting pages of entries until the response contains no + * `next_page_token`. Note that the number of entries returned must not be used to determine when + * the listing is complete. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + /** The service principal ID. */ @JsonIgnore private Long servicePrincipalId; + public ListServicePrincipalSecretsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public ListServicePrincipalSecretsRequest setServicePrincipalId(Long servicePrincipalId) { this.servicePrincipalId = servicePrincipalId; return this; @@ -27,17 +50,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListServicePrincipalSecretsRequest that = (ListServicePrincipalSecretsRequest) o; - return Objects.equals(servicePrincipalId, that.servicePrincipalId); + return Objects.equals(pageToken, that.pageToken) + && Objects.equals(servicePrincipalId, that.servicePrincipalId); } @Override public int hashCode() { - return Objects.hash(servicePrincipalId); + return Objects.hash(pageToken, servicePrincipalId); } @Override public String toString() { return new ToStringer(ListServicePrincipalSecretsRequest.class) + .add("pageToken", pageToken) .add("servicePrincipalId", servicePrincipalId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java index 1b0d38c8c..dd971e938 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java @@ -10,10 +10,23 @@ @Generated public class ListServicePrincipalSecretsResponse { + /** A token, which can be sent as `page_token` to retrieve the next page. */ + @JsonProperty("next_page_token") + private String nextPageToken; + /** List of the secrets */ @JsonProperty("secrets") private Collection secrets; + public ListServicePrincipalSecretsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + public ListServicePrincipalSecretsResponse setSecrets(Collection secrets) { this.secrets = secrets; return this; @@ -28,17 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListServicePrincipalSecretsResponse that = (ListServicePrincipalSecretsResponse) o; - return Objects.equals(secrets, that.secrets); + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(secrets, that.secrets); } @Override public int hashCode() { - return Objects.hash(secrets); + return Objects.hash(nextPageToken, secrets); } @Override public String toString() { return new ToStringer(ListServicePrincipalSecretsResponse.class) + .add("nextPageToken", nextPageToken) .add("secrets", secrets) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java index bbadb41e0..534617edb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java @@ -80,7 +80,16 @@ public Iterable list(long servicePrincipalId) { */ public Iterable list(ListServicePrincipalSecretsRequest request) { return new Paginator<>( - request, impl::list, ListServicePrincipalSecretsResponse::getSecrets, response -> null); + request, + impl::list, + ListServicePrincipalSecretsResponse::getSecrets, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } public ServicePrincipalSecretsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index e51d9ff19..6a35d6632 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -64,7 +64,7 @@ public class CreatePipeline { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -95,6 +95,10 @@ public class CreatePipeline { @JsonProperty("photon") private Boolean photon; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -293,6 +297,15 @@ public Boolean getPhoton() { return photon; } + public CreatePipeline setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public CreatePipeline setSchema(String schema) { this.schema = schema; return this; @@ -362,6 +375,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -391,6 +405,7 @@ public int hashCode() { name, notifications, photon, + restartWindow, schema, serverless, storage, @@ -420,6 +435,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 52cbcee7c..878e76bb3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -69,7 +69,7 @@ public class EditPipeline { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -104,6 +104,10 @@ public class EditPipeline { @JsonProperty("pipeline_id") private String pipelineId; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -311,6 +315,15 @@ public String getPipelineId() { return pipelineId; } + public EditPipeline setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public EditPipeline setSchema(String schema) { this.schema = schema; return this; @@ -381,6 +394,7 @@ public boolean equals(Object o) { && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) && Objects.equals(pipelineId, that.pipelineId) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -411,6 +425,7 @@ public int hashCode() { notifications, photon, pipelineId, + restartWindow, schema, serverless, storage, @@ -441,6 +456,7 @@ public String toString() { .add("notifications", notifications) .add("photon", photon) .add("pipelineId", pipelineId) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java index 92f853aed..c1ed47fe4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java @@ -9,15 +9,15 @@ @Generated public class IngestionConfig { - /** Select tables from a specific source report. */ + /** Select a specific source report. */ @JsonProperty("report") private ReportSpec report; - /** Select tables from a specific source schema. */ + /** Select all tables from a specific source schema. */ @JsonProperty("schema") private SchemaSpec schema; - /** Select tables from a specific source table. */ + /** Select a specific source table. */ @JsonProperty("table") private TableSpec table; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java index e989241e4..58142fafd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java @@ -10,12 +10,19 @@ @Generated public class IngestionGatewayPipelineDefinition { /** - * Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the - * source. + * [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this + * gateway pipeline uses to communicate with the source. */ @JsonProperty("connection_id") private String connectionId; + /** + * Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the + * source. + */ + @JsonProperty("connection_name") + private String connectionName; + /** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */ @JsonProperty("gateway_storage_catalog") private String gatewayStorageCatalog; @@ -41,6 +48,15 @@ public String getConnectionId() { return connectionId; } + public IngestionGatewayPipelineDefinition setConnectionName(String connectionName) { + this.connectionName = connectionName; + return this; + } + + public String getConnectionName() { + return connectionName; + } + public IngestionGatewayPipelineDefinition setGatewayStorageCatalog(String gatewayStorageCatalog) { this.gatewayStorageCatalog = gatewayStorageCatalog; return this; @@ -74,6 +90,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; IngestionGatewayPipelineDefinition that = (IngestionGatewayPipelineDefinition) o; return Objects.equals(connectionId, that.connectionId) + && Objects.equals(connectionName, that.connectionName) && Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog) && Objects.equals(gatewayStorageName, that.gatewayStorageName) && Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema); @@ -82,13 +99,18 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - connectionId, gatewayStorageCatalog, gatewayStorageName, gatewayStorageSchema); + connectionId, + connectionName, + gatewayStorageCatalog, + gatewayStorageName, + gatewayStorageSchema); } @Override public String toString() { return new ToStringer(IngestionGatewayPipelineDefinition.class) .add("connectionId", connectionId) + .add("connectionName", connectionName) .add("gatewayStorageCatalog", gatewayStorageCatalog) .add("gatewayStorageName", gatewayStorageName) .add("gatewayStorageSchema", gatewayStorageSchema) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index 7fa3f4d9e..8cd10563f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -11,15 +11,15 @@ @Generated public class IngestionPipelineDefinition { /** - * Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - * source. Specify either ingestion_gateway_id or connection_name. + * Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with + * the source. This is used with connectors for applications like Salesforce, Workday, and so on. */ @JsonProperty("connection_name") private String connectionName; /** - * Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - * with the source. Specify either ingestion_gateway_id or connection_name. + * Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate + * with the source database. This is used with connectors to databases like SQL Server. */ @JsonProperty("ingestion_gateway_id") private String ingestionGatewayId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index c880ee65e..26aecddf5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -56,7 +56,7 @@ public class PipelineSpec { @JsonProperty("filters") private Filters filters; - /** The definition of a gateway pipeline to support CDC. */ + /** The definition of a gateway pipeline to support change data capture. */ @JsonProperty("gateway_definition") private IngestionGatewayPipelineDefinition gatewayDefinition; @@ -87,6 +87,10 @@ public class PipelineSpec { @JsonProperty("photon") private Boolean photon; + /** Restart window of this pipeline. */ + @JsonProperty("restart_window") + private RestartWindow restartWindow; + /** * The default schema (database) where tables are read from or published to. The presence of this * field implies that the pipeline is in direct publishing mode. @@ -267,6 +271,15 @@ public Boolean getPhoton() { return photon; } + public PipelineSpec setRestartWindow(RestartWindow restartWindow) { + this.restartWindow = restartWindow; + return this; + } + + public RestartWindow getRestartWindow() { + return restartWindow; + } + public PipelineSpec setSchema(String schema) { this.schema = schema; return this; @@ -334,6 +347,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(notifications, that.notifications) && Objects.equals(photon, that.photon) + && Objects.equals(restartWindow, that.restartWindow) && Objects.equals(schema, that.schema) && Objects.equals(serverless, that.serverless) && Objects.equals(storage, that.storage) @@ -361,6 +375,7 @@ public int hashCode() { name, notifications, photon, + restartWindow, schema, serverless, storage, @@ -388,6 +403,7 @@ public String toString() { .add("name", name) .add("notifications", notifications) .add("photon", photon) + .add("restartWindow", restartWindow) .add("schema", schema) .add("serverless", serverless) .add("storage", storage) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 9e00ea3cd..92117d392 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -44,15 +44,15 @@ public PipelinesAPI(PipelinesService mock) { impl = mock; } - public GetPipelineResponse waitGetPipelineIdle(String pipelineId) throws TimeoutException { - return waitGetPipelineIdle(pipelineId, Duration.ofMinutes(20), null); + public GetPipelineResponse waitGetPipelineRunning(String pipelineId) throws TimeoutException { + return waitGetPipelineRunning(pipelineId, Duration.ofMinutes(20), null); } - public GetPipelineResponse waitGetPipelineIdle( + public GetPipelineResponse waitGetPipelineRunning( String pipelineId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(PipelineState.IDLE); + java.util.List targetStates = Arrays.asList(PipelineState.RUNNING); java.util.List failureStates = Arrays.asList(PipelineState.FAILED); String statusMessage = "polling..."; int attempt = 1; @@ -67,7 +67,7 @@ public GetPipelineResponse waitGetPipelineIdle( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach IDLE, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } @@ -89,15 +89,15 @@ public GetPipelineResponse waitGetPipelineIdle( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } - public GetPipelineResponse waitGetPipelineRunning(String pipelineId) throws TimeoutException { - return waitGetPipelineRunning(pipelineId, Duration.ofMinutes(20), null); + public GetPipelineResponse waitGetPipelineIdle(String pipelineId) throws TimeoutException { + return waitGetPipelineIdle(pipelineId, Duration.ofMinutes(20), null); } - public GetPipelineResponse waitGetPipelineRunning( + public GetPipelineResponse waitGetPipelineIdle( String pipelineId, Duration timeout, Consumer callback) throws TimeoutException { long deadline = System.currentTimeMillis() + timeout.toMillis(); - java.util.List targetStates = Arrays.asList(PipelineState.RUNNING); + java.util.List targetStates = Arrays.asList(PipelineState.IDLE); java.util.List failureStates = Arrays.asList(PipelineState.FAILED); String statusMessage = "polling..."; int attempt = 1; @@ -112,7 +112,7 @@ public GetPipelineResponse waitGetPipelineRunning( callback.accept(poll); } if (failureStates.contains(status)) { - String msg = String.format("failed to reach RUNNING, got %s: %s", status, statusMessage); + String msg = String.format("failed to reach IDLE, got %s: %s", status, statusMessage); throw new IllegalStateException(msg); } @@ -269,7 +269,9 @@ public PipelinePermissions setPermissions(String pipelineId) { /** * Set pipeline permissions. * - *

Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public PipelinePermissions setPermissions(PipelinePermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index 127bbb06d..332eabdcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -90,7 +90,9 @@ ListPipelineEventsResponse listPipelineEvents( /** * Set pipeline permissions. * - *

Sets permissions on a pipeline. Pipelines can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ PipelinePermissions setPermissions(PipelinePermissionsRequest pipelinePermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java new file mode 100755 index 000000000..3156277a6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RestartWindow { + /** + * Days of week in which the restart is allowed to happen (within a five-hour window starting at + * start_hour). If not specified all days of the week will be used. + */ + @JsonProperty("days_of_week") + private RestartWindowDaysOfWeek daysOfWeek; + + /** + * An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. + * Continuous pipeline restart is triggered only within a five-hour window starting at this hour. + */ + @JsonProperty("start_hour") + private Long startHour; + + /** + * Time zone id of restart window. See + * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html + * for details. If not specified, UTC will be used. + */ + @JsonProperty("time_zone_id") + private String timeZoneId; + + public RestartWindow setDaysOfWeek(RestartWindowDaysOfWeek daysOfWeek) { + this.daysOfWeek = daysOfWeek; + return this; + } + + public RestartWindowDaysOfWeek getDaysOfWeek() { + return daysOfWeek; + } + + public RestartWindow setStartHour(Long startHour) { + this.startHour = startHour; + return this; + } + + public Long getStartHour() { + return startHour; + } + + public RestartWindow setTimeZoneId(String timeZoneId) { + this.timeZoneId = timeZoneId; + return this; + } + + public String getTimeZoneId() { + return timeZoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestartWindow that = (RestartWindow) o; + return Objects.equals(daysOfWeek, that.daysOfWeek) + && Objects.equals(startHour, that.startHour) + && Objects.equals(timeZoneId, that.timeZoneId); + } + + @Override + public int hashCode() { + return Objects.hash(daysOfWeek, startHour, timeZoneId); + } + + @Override + public String toString() { + return new ToStringer(RestartWindow.class) + .add("daysOfWeek", daysOfWeek) + .add("startHour", startHour) + .add("timeZoneId", timeZoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java new file mode 100755 index 000000000..37bf738a0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** + * Days of week in which the restart is allowed to happen (within a five-hour window starting at + * start_hour). If not specified all days of the week will be used. + */ +@Generated +public enum RestartWindowDaysOfWeek { + FRIDAY, + MONDAY, + SATURDAY, + SUNDAY, + THURSDAY, + TUESDAY, + WEDNESDAY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java index 1d928eaad..2cc6ec80a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java @@ -93,6 +93,10 @@ public class CreateWorkspaceRequest { @JsonProperty("gke_config") private GkeConfig gkeConfig; + /** Whether no public IP is enabled for the workspace. */ + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + /** * The Google Cloud region of the workspace data plane in your Google account. For example, * `us-east4`. @@ -225,6 +229,15 @@ public GkeConfig getGkeConfig() { return gkeConfig; } + public CreateWorkspaceRequest setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + public CreateWorkspaceRequest setLocation(String location) { this.location = location; return this; @@ -311,6 +324,7 @@ public boolean equals(Object o) { && Objects.equals(deploymentName, that.deploymentName) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) @@ -333,6 +347,7 @@ public int hashCode() { deploymentName, gcpManagedNetworkConfig, gkeConfig, + isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, networkId, @@ -354,6 +369,7 @@ public String toString() { .add("deploymentName", deploymentName) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkId", networkId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java new file mode 100755 index 000000000..7654c68e7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.provisioning; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalCustomerInfo { + /** Email of the authoritative user. */ + @JsonProperty("authoritative_user_email") + private String authoritativeUserEmail; + + /** The authoritative user full name. */ + @JsonProperty("authoritative_user_full_name") + private String authoritativeUserFullName; + + /** The legal entity name for the external workspace */ + @JsonProperty("customer_name") + private String customerName; + + public ExternalCustomerInfo setAuthoritativeUserEmail(String authoritativeUserEmail) { + this.authoritativeUserEmail = authoritativeUserEmail; + return this; + } + + public String getAuthoritativeUserEmail() { + return authoritativeUserEmail; + } + + public ExternalCustomerInfo setAuthoritativeUserFullName(String authoritativeUserFullName) { + this.authoritativeUserFullName = authoritativeUserFullName; + return this; + } + + public String getAuthoritativeUserFullName() { + return authoritativeUserFullName; + } + + public ExternalCustomerInfo setCustomerName(String customerName) { + this.customerName = customerName; + return this; + } + + public String getCustomerName() { + return customerName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalCustomerInfo that = (ExternalCustomerInfo) o; + return Objects.equals(authoritativeUserEmail, that.authoritativeUserEmail) + && Objects.equals(authoritativeUserFullName, that.authoritativeUserFullName) + && Objects.equals(customerName, that.customerName); + } + + @Override + public int hashCode() { + return Objects.hash(authoritativeUserEmail, authoritativeUserFullName, customerName); + } + + @Override + public String toString() { + return new ToStringer(ExternalCustomerInfo.class) + .add("authoritativeUserEmail", authoritativeUserEmail) + .add("authoritativeUserFullName", authoritativeUserFullName) + .add("customerName", customerName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java index cfb8817af..a690adac9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java @@ -52,6 +52,13 @@ public class UpdateWorkspaceRequest { @JsonProperty("network_id") private String networkId; + /** + * The ID of the workspace's private access settings configuration object. This parameter is + * available only for updating failed workspaces. + */ + @JsonProperty("private_access_settings_id") + private String privateAccessSettingsId; + /** * The ID of the workspace's storage configuration object. This parameter is available only for * updating failed workspaces. @@ -124,6 +131,15 @@ public String getNetworkId() { return networkId; } + public UpdateWorkspaceRequest setPrivateAccessSettingsId(String privateAccessSettingsId) { + this.privateAccessSettingsId = privateAccessSettingsId; + return this; + } + + public String getPrivateAccessSettingsId() { + return privateAccessSettingsId; + } + public UpdateWorkspaceRequest setStorageConfigurationId(String storageConfigurationId) { this.storageConfigurationId = storageConfigurationId; return this; @@ -163,6 +179,7 @@ public boolean equals(Object o) { managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(networkId, that.networkId) + && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId) && Objects.equals(storageConfigurationId, that.storageConfigurationId) && Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId) && Objects.equals(workspaceId, that.workspaceId); @@ -177,6 +194,7 @@ public int hashCode() { managedServicesCustomerManagedKeyId, networkConnectivityConfigId, networkId, + privateAccessSettingsId, storageConfigurationId, storageCustomerManagedKeyId, workspaceId); @@ -191,6 +209,7 @@ public String toString() { .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("networkId", networkId) + .add("privateAccessSettingsId", privateAccessSettingsId) .add("storageConfigurationId", storageConfigurationId) .add("storageCustomerManagedKeyId", storageCustomerManagedKeyId) .add("workspaceId", workspaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java index b6abc5a88..4d6b61c9d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java @@ -55,6 +55,13 @@ public class Workspace { @JsonProperty("deployment_name") private String deploymentName; + /** + * If this workspace is for a external customer, then external_customer_info is populated. If this + * workspace is not for a external customer, then external_customer_info is empty. + */ + @JsonProperty("external_customer_info") + private ExternalCustomerInfo externalCustomerInfo; + /** * The network settings for the workspace. The configurations are only for Databricks-managed * VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the @@ -83,6 +90,10 @@ public class Workspace { @JsonProperty("gke_config") private GkeConfig gkeConfig; + /** Whether no public IP is enabled for the workspace. */ + @JsonProperty("is_no_public_ip_enabled") + private Boolean isNoPublicIpEnabled; + /** * The Google Cloud region of the workspace data plane in your Google account (for example, * `us-east4`). @@ -231,6 +242,15 @@ public String getDeploymentName() { return deploymentName; } + public Workspace setExternalCustomerInfo(ExternalCustomerInfo externalCustomerInfo) { + this.externalCustomerInfo = externalCustomerInfo; + return this; + } + + public ExternalCustomerInfo getExternalCustomerInfo() { + return externalCustomerInfo; + } + public Workspace setGcpManagedNetworkConfig(GcpManagedNetworkConfig gcpManagedNetworkConfig) { this.gcpManagedNetworkConfig = gcpManagedNetworkConfig; return this; @@ -249,6 +269,15 @@ public GkeConfig getGkeConfig() { return gkeConfig; } + public Workspace setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) { + this.isNoPublicIpEnabled = isNoPublicIpEnabled; + return this; + } + + public Boolean getIsNoPublicIpEnabled() { + return isNoPublicIpEnabled; + } + public Workspace setLocation(String location) { this.location = location; return this; @@ -363,8 +392,10 @@ public boolean equals(Object o) { && Objects.equals(credentialsId, that.credentialsId) && Objects.equals(customTags, that.customTags) && Objects.equals(deploymentName, that.deploymentName) + && Objects.equals(externalCustomerInfo, that.externalCustomerInfo) && Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig) && Objects.equals(gkeConfig, that.gkeConfig) + && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled) && Objects.equals(location, that.location) && Objects.equals( managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId) @@ -391,8 +422,10 @@ public int hashCode() { credentialsId, customTags, deploymentName, + externalCustomerInfo, gcpManagedNetworkConfig, gkeConfig, + isNoPublicIpEnabled, location, managedServicesCustomerManagedKeyId, networkId, @@ -418,8 +451,10 @@ public String toString() { .add("credentialsId", credentialsId) .add("customTags", customTags) .add("deploymentName", deploymentName) + .add("externalCustomerInfo", externalCustomerInfo) .add("gcpManagedNetworkConfig", gcpManagedNetworkConfig) .add("gkeConfig", gkeConfig) + .add("isNoPublicIpEnabled", isNoPublicIpEnabled) .add("location", location) .add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId) .add("networkId", networkId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index 5ef50b903..37c8b27b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -276,8 +276,9 @@ public ServingEndpointPermissions setPermissions(String servingEndpointId) { /** * Set serving endpoint permissions. * - *

Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public ServingEndpointPermissions setPermissions(ServingEndpointPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index f35cb2a7c..5a42d11ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -114,8 +114,9 @@ ServingEndpointPermissions getPermissions( /** * Set serving endpoint permissions. * - *

Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their - * root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ ServingEndpointPermissions setPermissions( ServingEndpointPermissionsRequest servingEndpointPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java new file mode 100755 index 000000000..698c78634 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingAccessPolicy { + /** */ + @JsonProperty("access_policy_type") + private AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType; + + public AibiDashboardEmbeddingAccessPolicy setAccessPolicyType( + AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType) { + this.accessPolicyType = accessPolicyType; + return this; + } + + public AibiDashboardEmbeddingAccessPolicyAccessPolicyType getAccessPolicyType() { + return accessPolicyType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicy that = (AibiDashboardEmbeddingAccessPolicy) o; + return Objects.equals(accessPolicyType, that.accessPolicyType); + } + + @Override + public int hashCode() { + return Objects.hash(accessPolicyType); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicy.class) + .add("accessPolicyType", accessPolicyType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java new file mode 100755 index 000000000..35af0c8fa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. By default, this setting is conditionally enabled + * (ALLOW_APPROVED_DOMAINS). + */ +@Generated +public class AibiDashboardEmbeddingAccessPolicyAPI { + private static final Logger LOG = + LoggerFactory.getLogger(AibiDashboardEmbeddingAccessPolicyAPI.class); + + private final AibiDashboardEmbeddingAccessPolicyService impl; + + /** Regular-use constructor */ + public AibiDashboardEmbeddingAccessPolicyAPI(ApiClient apiClient) { + impl = new AibiDashboardEmbeddingAccessPolicyImpl(apiClient); + } + + /** Constructor for mocks */ + public AibiDashboardEmbeddingAccessPolicyAPI(AibiDashboardEmbeddingAccessPolicyService mock) { + impl = mock; + } + + /** + * Retrieve the AI/BI dashboard embedding access policy. + * + *

Retrieves the AI/BI dashboard embedding access policy. The default setting is + * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. + */ + public AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest request) { + return impl.get(request); + } + + public AibiDashboardEmbeddingAccessPolicySetting update( + boolean allowMissing, AibiDashboardEmbeddingAccessPolicySetting setting, String fieldMask) { + return update( + new UpdateAibiDashboardEmbeddingAccessPolicySettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the AI/BI dashboard embedding access policy. + * + *

Updates the AI/BI dashboard embedding access policy at the workspace level. + */ + public AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) { + return impl.update(request); + } + + public AibiDashboardEmbeddingAccessPolicyService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java new file mode 100755 index 000000000..7fc964b78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AibiDashboardEmbeddingAccessPolicyAccessPolicyType { + ALLOW_ALL_DOMAINS, + ALLOW_APPROVED_DOMAINS, + DENY_ALL_DOMAINS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java new file mode 100755 index 000000000..b27367992 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java @@ -0,0 +1,36 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of AibiDashboardEmbeddingAccessPolicy */ +@Generated +class AibiDashboardEmbeddingAccessPolicyImpl implements AibiDashboardEmbeddingAccessPolicyService { + private final ApiClient apiClient; + + public AibiDashboardEmbeddingAccessPolicyImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + } + + @Override + public AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java new file mode 100755 index 000000000..cf7a24f62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyService.java @@ -0,0 +1,35 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. By default, this setting is conditionally enabled + * (ALLOW_APPROVED_DOMAINS). + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface AibiDashboardEmbeddingAccessPolicyService { + /** + * Retrieve the AI/BI dashboard embedding access policy. + * + *

Retrieves the AI/BI dashboard embedding access policy. The default setting is + * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. + */ + AibiDashboardEmbeddingAccessPolicySetting get( + GetAibiDashboardEmbeddingAccessPolicySettingRequest + getAibiDashboardEmbeddingAccessPolicySettingRequest); + + /** + * Update the AI/BI dashboard embedding access policy. + * + *

Updates the AI/BI dashboard embedding access policy at the workspace level. + */ + AibiDashboardEmbeddingAccessPolicySetting update( + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest + updateAibiDashboardEmbeddingAccessPolicySettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java new file mode 100755 index 000000000..ead0e1b14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java @@ -0,0 +1,88 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingAccessPolicySetting { + /** */ + @JsonProperty("aibi_dashboard_embedding_access_policy") + private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingAccessPolicySetting setAibiDashboardEmbeddingAccessPolicy( + AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy) { + this.aibiDashboardEmbeddingAccessPolicy = aibiDashboardEmbeddingAccessPolicy; + return this; + } + + public AibiDashboardEmbeddingAccessPolicy getAibiDashboardEmbeddingAccessPolicy() { + return aibiDashboardEmbeddingAccessPolicy; + } + + public AibiDashboardEmbeddingAccessPolicySetting setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingAccessPolicySetting setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingAccessPolicySetting that = (AibiDashboardEmbeddingAccessPolicySetting) o; + return Objects.equals( + aibiDashboardEmbeddingAccessPolicy, that.aibiDashboardEmbeddingAccessPolicy) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingAccessPolicy, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingAccessPolicySetting.class) + .add("aibiDashboardEmbeddingAccessPolicy", aibiDashboardEmbeddingAccessPolicy) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java new file mode 100755 index 000000000..ecfa50971 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingApprovedDomains { + /** */ + @JsonProperty("approved_domains") + private Collection approvedDomains; + + public AibiDashboardEmbeddingApprovedDomains setApprovedDomains( + Collection approvedDomains) { + this.approvedDomains = approvedDomains; + return this; + } + + public Collection getApprovedDomains() { + return approvedDomains; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomains that = (AibiDashboardEmbeddingApprovedDomains) o; + return Objects.equals(approvedDomains, that.approvedDomains); + } + + @Override + public int hashCode() { + return Objects.hash(approvedDomains); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomains.class) + .add("approvedDomains", approvedDomains) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java new file mode 100755 index 000000000..6a096aca6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsAPI.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains + * list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + */ +@Generated +public class AibiDashboardEmbeddingApprovedDomainsAPI { + private static final Logger LOG = + LoggerFactory.getLogger(AibiDashboardEmbeddingApprovedDomainsAPI.class); + + private final AibiDashboardEmbeddingApprovedDomainsService impl; + + /** Regular-use constructor */ + public AibiDashboardEmbeddingApprovedDomainsAPI(ApiClient apiClient) { + impl = new AibiDashboardEmbeddingApprovedDomainsImpl(apiClient); + } + + /** Constructor for mocks */ + public AibiDashboardEmbeddingApprovedDomainsAPI( + AibiDashboardEmbeddingApprovedDomainsService mock) { + impl = mock; + } + + /** + * Retrieve the list of domains approved to host embedded AI/BI dashboards. + * + *

Retrieves the list of domains approved to host embedded AI/BI dashboards. + */ + public AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + return impl.get(request); + } + + public AibiDashboardEmbeddingApprovedDomainsSetting update( + boolean allowMissing, + AibiDashboardEmbeddingApprovedDomainsSetting setting, + String fieldMask) { + return update( + new UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the list of domains approved to host embedded AI/BI dashboards. + * + *

Updates the list of domains approved to host embedded AI/BI dashboards. This update will + * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. + */ + public AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + return impl.update(request); + } + + public AibiDashboardEmbeddingApprovedDomainsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java new file mode 100755 index 000000000..e026484e4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java @@ -0,0 +1,39 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of AibiDashboardEmbeddingApprovedDomains */ +@Generated +class AibiDashboardEmbeddingApprovedDomainsImpl + implements AibiDashboardEmbeddingApprovedDomainsService { + private final ApiClient apiClient; + + public AibiDashboardEmbeddingApprovedDomainsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET( + path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + } + + @Override + public AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { + String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH( + path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java new file mode 100755 index 000000000..ec55a9a7c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsService.java @@ -0,0 +1,34 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains + * list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface AibiDashboardEmbeddingApprovedDomainsService { + /** + * Retrieve the list of domains approved to host embedded AI/BI dashboards. + * + *

Retrieves the list of domains approved to host embedded AI/BI dashboards. + */ + AibiDashboardEmbeddingApprovedDomainsSetting get( + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest + getAibiDashboardEmbeddingApprovedDomainsSettingRequest); + + /** + * Update the list of domains approved to host embedded AI/BI dashboards. + * + *

Updates the list of domains approved to host embedded AI/BI dashboards. This update will + * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. + */ + AibiDashboardEmbeddingApprovedDomainsSetting update( + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest + updateAibiDashboardEmbeddingApprovedDomainsSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java new file mode 100755 index 000000000..14c060819 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AibiDashboardEmbeddingApprovedDomainsSetting { + /** */ + @JsonProperty("aibi_dashboard_embedding_approved_domains") + private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains; + + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + * etag from a GET request, and pass it with the PATCH request to identify the setting version you + * are updating. + */ + @JsonProperty("etag") + private String etag; + + /** + * Name of the corresponding setting. This field is populated in the response, but it will not be + * respected even if it's set in the request body. The setting name in the path parameter will be + * respected instead. Setting name is required to be 'default' if the setting only has one + * instance per workspace. + */ + @JsonProperty("setting_name") + private String settingName; + + public AibiDashboardEmbeddingApprovedDomainsSetting setAibiDashboardEmbeddingApprovedDomains( + AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains) { + this.aibiDashboardEmbeddingApprovedDomains = aibiDashboardEmbeddingApprovedDomains; + return this; + } + + public AibiDashboardEmbeddingApprovedDomains getAibiDashboardEmbeddingApprovedDomains() { + return aibiDashboardEmbeddingApprovedDomains; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting setSettingName(String settingName) { + this.settingName = settingName; + return this; + } + + public String getSettingName() { + return settingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AibiDashboardEmbeddingApprovedDomainsSetting that = + (AibiDashboardEmbeddingApprovedDomainsSetting) o; + return Objects.equals( + aibiDashboardEmbeddingApprovedDomains, that.aibiDashboardEmbeddingApprovedDomains) + && Objects.equals(etag, that.etag) + && Objects.equals(settingName, that.settingName); + } + + @Override + public int hashCode() { + return Objects.hash(aibiDashboardEmbeddingApprovedDomains, etag, settingName); + } + + @Override + public String toString() { + return new ToStringer(AibiDashboardEmbeddingApprovedDomainsSetting.class) + .add("aibiDashboardEmbeddingApprovedDomains", aibiDashboardEmbeddingApprovedDomains) + .add("etag", etag) + .add("settingName", settingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java new file mode 100755 index 000000000..48a8c3910 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the AI/BI dashboard embedding access policy */ +@Generated +public class GetAibiDashboardEmbeddingAccessPolicySettingRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingAccessPolicySettingRequest that = + (GetAibiDashboardEmbeddingAccessPolicySettingRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingAccessPolicySettingRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java new file mode 100755 index 000000000..a9db24cfa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Retrieve the list of domains approved to host embedded AI/BI dashboards */ +@Generated +public class GetAibiDashboardEmbeddingApprovedDomainsSettingRequest { + /** + * etag used for versioning. The response is at least as fresh as the eTag provided. This is used + * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + * overwriting each other. It is strongly suggested that systems make use of the etag in the read + * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + * an etag from a GET request, and pass it with the DELETE request to identify the rule set + * version you are deleting. + */ + @JsonIgnore + @QueryParam("etag") + private String etag; + + public GetAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetAibiDashboardEmbeddingApprovedDomainsSettingRequest that = + (GetAibiDashboardEmbeddingApprovedDomainsSettingRequest) o; + return Objects.equals(etag, that.etag); + } + + @Override + public int hashCode() { + return Objects.hash(etag); + } + + @Override + public String toString() { + return new ToStringer(GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.class) + .add("etag", etag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index e7f1c92a4..f1ba301de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -13,6 +13,10 @@ public class SettingsAPI { private final SettingsService impl; + private AibiDashboardEmbeddingAccessPolicyAPI aibiDashboardEmbeddingAccessPolicyAPI; + + private AibiDashboardEmbeddingApprovedDomainsAPI aibiDashboardEmbeddingApprovedDomainsAPI; + private AutomaticClusterUpdateAPI automaticClusterUpdateAPI; private ComplianceSecurityProfileAPI complianceSecurityProfileAPI; @@ -31,6 +35,11 @@ public class SettingsAPI { public SettingsAPI(ApiClient apiClient) { impl = new SettingsImpl(apiClient); + aibiDashboardEmbeddingAccessPolicyAPI = new AibiDashboardEmbeddingAccessPolicyAPI(apiClient); + + aibiDashboardEmbeddingApprovedDomainsAPI = + new AibiDashboardEmbeddingApprovedDomainsAPI(apiClient); + automaticClusterUpdateAPI = new AutomaticClusterUpdateAPI(apiClient); complianceSecurityProfileAPI = new ComplianceSecurityProfileAPI(apiClient); @@ -51,6 +60,19 @@ public SettingsAPI(SettingsService mock) { impl = mock; } + /** + * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or + * disabled at the workspace level. + */ + public AibiDashboardEmbeddingAccessPolicyAPI AibiDashboardEmbeddingAccessPolicy() { + return aibiDashboardEmbeddingAccessPolicyAPI; + } + + /** Controls the list of domains approved to host the embedded AI/BI dashboards. */ + public AibiDashboardEmbeddingApprovedDomainsAPI AibiDashboardEmbeddingApprovedDomains() { + return aibiDashboardEmbeddingApprovedDomainsAPI; + } + /** Controls whether automatic cluster update is enabled for the current workspace. */ public AutomaticClusterUpdateAPI AutomaticClusterUpdate() { return automaticClusterUpdateAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java index 20c4a6bfa..08e227383 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java @@ -29,6 +29,10 @@ public class TokenInfo { @JsonProperty("expiry_time") private Long expiryTime; + /** Approximate timestamp for the day the token was last used. Accurate up to 1 day. */ + @JsonProperty("last_used_day") + private Long lastUsedDay; + /** User ID of the user that owns the token. */ @JsonProperty("owner_id") private Long ownerId; @@ -86,6 +90,15 @@ public Long getExpiryTime() { return expiryTime; } + public TokenInfo setLastUsedDay(Long lastUsedDay) { + this.lastUsedDay = lastUsedDay; + return this; + } + + public Long getLastUsedDay() { + return lastUsedDay; + } + public TokenInfo setOwnerId(Long ownerId) { this.ownerId = ownerId; return this; @@ -123,6 +136,7 @@ public boolean equals(Object o) { && Objects.equals(createdByUsername, that.createdByUsername) && Objects.equals(creationTime, that.creationTime) && Objects.equals(expiryTime, that.expiryTime) + && Objects.equals(lastUsedDay, that.lastUsedDay) && Objects.equals(ownerId, that.ownerId) && Objects.equals(tokenId, that.tokenId) && Objects.equals(workspaceId, that.workspaceId); @@ -136,6 +150,7 @@ public int hashCode() { createdByUsername, creationTime, expiryTime, + lastUsedDay, ownerId, tokenId, workspaceId); @@ -149,6 +164,7 @@ public String toString() { .add("createdByUsername", createdByUsername) .add("creationTime", creationTime) .add("expiryTime", expiryTime) + .add("lastUsedDay", lastUsedDay) .add("ownerId", ownerId) .add("tokenId", tokenId) .add("workspaceId", workspaceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java index f105eef54..5e5311539 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java @@ -97,7 +97,9 @@ public Iterable list(ListTokenManagementRequest request) { /** * Set token permissions. * - *

Sets permissions on all tokens. Tokens can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public TokenPermissions setPermissions(TokenPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java index ab02b56ae..ad46b6abf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java @@ -58,7 +58,9 @@ public interface TokenManagementService { /** * Set token permissions. * - *

Sets permissions on all tokens. Tokens can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ TokenPermissions setPermissions(TokenPermissionsRequest tokenPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java new file mode 100755 index 000000000..9e8a2ff89 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. The field mask needs to be supplied as single string. + * To specify multiple fields in the field mask, use comma as the separator (no space). + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private AibiDashboardEmbeddingAccessPolicySetting setting; + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setSetting( + AibiDashboardEmbeddingAccessPolicySetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingAccessPolicySetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingAccessPolicySettingRequest that = + (UpdateAibiDashboardEmbeddingAccessPolicySettingRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java new file mode 100755 index 000000000..a3e7de0dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java @@ -0,0 +1,82 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Details required to update a setting. */ +@Generated +public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest { + /** This should always be set to true for Settings API. Added for AIP compliance. */ + @JsonProperty("allow_missing") + private Boolean allowMissing; + + /** + * Field mask is required to be passed into the PATCH request. Field mask specifies which fields + * of the setting payload will be updated. The field mask needs to be supplied as single string. + * To specify multiple fields in the field mask, use comma as the separator (no space). + */ + @JsonProperty("field_mask") + private String fieldMask; + + /** */ + @JsonProperty("setting") + private AibiDashboardEmbeddingApprovedDomainsSetting setting; + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setAllowMissing( + Boolean allowMissing) { + this.allowMissing = allowMissing; + return this; + } + + public Boolean getAllowMissing() { + return allowMissing; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setFieldMask(String fieldMask) { + this.fieldMask = fieldMask; + return this; + } + + public String getFieldMask() { + return fieldMask; + } + + public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setSetting( + AibiDashboardEmbeddingApprovedDomainsSetting setting) { + this.setting = setting; + return this; + } + + public AibiDashboardEmbeddingApprovedDomainsSetting getSetting() { + return setting; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest that = + (UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest) o; + return Objects.equals(allowMissing, that.allowMissing) + && Objects.equals(fieldMask, that.fieldMask) + && Objects.equals(setting, that.setting); + } + + @Override + public int hashCode() { + return Objects.hash(allowMissing, fieldMask, setting); + } + + @Override + public String toString() { + return new ToStringer(UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.class) + .add("allowMissing", allowMissing) + .add("fieldMask", fieldMask) + .add("setting", setting) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java deleted file mode 100755 index b9bc1f6b6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java +++ /dev/null @@ -1,109 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CentralCleanRoomInfo { - /** - * All assets from all collaborators that are available in the clean room. Only one of table_info - * or notebook_info will be filled in. - */ - @JsonProperty("clean_room_assets") - private Collection cleanRoomAssets; - - /** All collaborators who are in the clean room. */ - @JsonProperty("collaborators") - private Collection collaborators; - - /** The collaborator who created the clean room. */ - @JsonProperty("creator") - private CleanRoomCollaboratorInfo creator; - - /** The cloud where clean room tasks will be run. */ - @JsonProperty("station_cloud") - private String stationCloud; - - /** The region where clean room tasks will be run. */ - @JsonProperty("station_region") - private String stationRegion; - - public CentralCleanRoomInfo setCleanRoomAssets(Collection cleanRoomAssets) { - this.cleanRoomAssets = cleanRoomAssets; - return this; - } - - public Collection getCleanRoomAssets() { - return cleanRoomAssets; - } - - public CentralCleanRoomInfo setCollaborators( - Collection collaborators) { - this.collaborators = collaborators; - return this; - } - - public Collection getCollaborators() { - return collaborators; - } - - public CentralCleanRoomInfo setCreator(CleanRoomCollaboratorInfo creator) { - this.creator = creator; - return this; - } - - public CleanRoomCollaboratorInfo getCreator() { - return creator; - } - - public CentralCleanRoomInfo setStationCloud(String stationCloud) { - this.stationCloud = stationCloud; - return this; - } - - public String getStationCloud() { - return stationCloud; - } - - public CentralCleanRoomInfo setStationRegion(String stationRegion) { - this.stationRegion = stationRegion; - return this; - } - - public String getStationRegion() { - return stationRegion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CentralCleanRoomInfo that = (CentralCleanRoomInfo) o; - return Objects.equals(cleanRoomAssets, that.cleanRoomAssets) - && Objects.equals(collaborators, that.collaborators) - && Objects.equals(creator, that.creator) - && Objects.equals(stationCloud, that.stationCloud) - && Objects.equals(stationRegion, that.stationRegion); - } - - @Override - public int hashCode() { - return Objects.hash(cleanRoomAssets, collaborators, creator, stationCloud, stationRegion); - } - - @Override - public String toString() { - return new ToStringer(CentralCleanRoomInfo.class) - .add("cleanRoomAssets", cleanRoomAssets) - .add("collaborators", collaborators) - .add("creator", creator) - .add("stationCloud", stationCloud) - .add("stationRegion", stationRegion) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java deleted file mode 100755 index bd09e02c3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomAssetInfo.java +++ /dev/null @@ -1,104 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomAssetInfo { - /** Time at which this asset was added, in epoch milliseconds. */ - @JsonProperty("added_at") - private Long addedAt; - - /** Details about the notebook asset. */ - @JsonProperty("notebook_info") - private CleanRoomNotebookInfo notebookInfo; - - /** The collaborator who owns the asset. */ - @JsonProperty("owner") - private CleanRoomCollaboratorInfo owner; - - /** Details about the table asset. */ - @JsonProperty("table_info") - private CleanRoomTableInfo tableInfo; - - /** Time at which this asset was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") - private Long updatedAt; - - public CleanRoomAssetInfo setAddedAt(Long addedAt) { - this.addedAt = addedAt; - return this; - } - - public Long getAddedAt() { - return addedAt; - } - - public CleanRoomAssetInfo setNotebookInfo(CleanRoomNotebookInfo notebookInfo) { - this.notebookInfo = notebookInfo; - return this; - } - - public CleanRoomNotebookInfo getNotebookInfo() { - return notebookInfo; - } - - public CleanRoomAssetInfo setOwner(CleanRoomCollaboratorInfo owner) { - this.owner = owner; - return this; - } - - public CleanRoomCollaboratorInfo getOwner() { - return owner; - } - - public CleanRoomAssetInfo setTableInfo(CleanRoomTableInfo tableInfo) { - this.tableInfo = tableInfo; - return this; - } - - public CleanRoomTableInfo getTableInfo() { - return tableInfo; - } - - public CleanRoomAssetInfo setUpdatedAt(Long updatedAt) { - this.updatedAt = updatedAt; - return this; - } - - public Long getUpdatedAt() { - return updatedAt; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomAssetInfo that = (CleanRoomAssetInfo) o; - return Objects.equals(addedAt, that.addedAt) - && Objects.equals(notebookInfo, that.notebookInfo) - && Objects.equals(owner, that.owner) - && Objects.equals(tableInfo, that.tableInfo) - && Objects.equals(updatedAt, that.updatedAt); - } - - @Override - public int hashCode() { - return Objects.hash(addedAt, notebookInfo, owner, tableInfo, updatedAt); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomAssetInfo.class) - .add("addedAt", addedAt) - .add("notebookInfo", notebookInfo) - .add("owner", owner) - .add("tableInfo", tableInfo) - .add("updatedAt", updatedAt) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java deleted file mode 100755 index a0f1a3d67..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalog.java +++ /dev/null @@ -1,75 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomCatalog { - /** Name of the catalog in the clean room station. Empty for notebooks. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The details of the shared notebook files. */ - @JsonProperty("notebook_files") - private Collection notebookFiles; - - /** The details of the shared tables. */ - @JsonProperty("tables") - private Collection tables; - - public CleanRoomCatalog setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomCatalog setNotebookFiles(Collection notebookFiles) { - this.notebookFiles = notebookFiles; - return this; - } - - public Collection getNotebookFiles() { - return notebookFiles; - } - - public CleanRoomCatalog setTables(Collection tables) { - this.tables = tables; - return this; - } - - public Collection getTables() { - return tables; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCatalog that = (CleanRoomCatalog) o; - return Objects.equals(catalogName, that.catalogName) - && Objects.equals(notebookFiles, that.notebookFiles) - && Objects.equals(tables, that.tables); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, notebookFiles, tables); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCatalog.class) - .add("catalogName", catalogName) - .add("notebookFiles", notebookFiles) - .add("tables", tables) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java deleted file mode 100755 index 1a72b29c9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCatalogUpdate.java +++ /dev/null @@ -1,58 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomCatalogUpdate { - /** The name of the catalog to update assets. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The updates to the assets in the catalog. */ - @JsonProperty("updates") - private SharedDataObjectUpdate updates; - - public CleanRoomCatalogUpdate setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomCatalogUpdate setUpdates(SharedDataObjectUpdate updates) { - this.updates = updates; - return this; - } - - public SharedDataObjectUpdate getUpdates() { - return updates; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCatalogUpdate that = (CleanRoomCatalogUpdate) o; - return Objects.equals(catalogName, that.catalogName) && Objects.equals(updates, that.updates); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, updates); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCatalogUpdate.class) - .add("catalogName", catalogName) - .add("updates", updates) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java deleted file mode 100755 index b484fd762..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomCollaboratorInfo.java +++ /dev/null @@ -1,65 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomCollaboratorInfo { - /** - * The global Unity Catalog metastore id of the collaborator. Also known as the sharing - * identifier. The identifier is of format __cloud__:__region__:__metastore-uuid__. - */ - @JsonProperty("global_metastore_id") - private String globalMetastoreId; - - /** - * The organization name of the collaborator. This is configured in the metastore for Delta - * Sharing and is used to identify the organization to other collaborators. - */ - @JsonProperty("organization_name") - private String organizationName; - - public CleanRoomCollaboratorInfo setGlobalMetastoreId(String globalMetastoreId) { - this.globalMetastoreId = globalMetastoreId; - return this; - } - - public String getGlobalMetastoreId() { - return globalMetastoreId; - } - - public CleanRoomCollaboratorInfo setOrganizationName(String organizationName) { - this.organizationName = organizationName; - return this; - } - - public String getOrganizationName() { - return organizationName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomCollaboratorInfo that = (CleanRoomCollaboratorInfo) o; - return Objects.equals(globalMetastoreId, that.globalMetastoreId) - && Objects.equals(organizationName, that.organizationName); - } - - @Override - public int hashCode() { - return Objects.hash(globalMetastoreId, organizationName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomCollaboratorInfo.class) - .add("globalMetastoreId", globalMetastoreId) - .add("organizationName", organizationName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java deleted file mode 100755 index 1f0c8628a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomInfo.java +++ /dev/null @@ -1,174 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomInfo { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** Time at which this clean room was created, in epoch milliseconds. */ - @JsonProperty("created_at") - private Long createdAt; - - /** Username of clean room creator. */ - @JsonProperty("created_by") - private String createdBy; - - /** Catalog aliases shared by the current collaborator with asset details. */ - @JsonProperty("local_catalogs") - private Collection localCatalogs; - - /** Name of the clean room. */ - @JsonProperty("name") - private String name; - - /** Username of current owner of clean room. */ - @JsonProperty("owner") - private String owner; - - /** Central clean room details. */ - @JsonProperty("remote_detailed_info") - private CentralCleanRoomInfo remoteDetailedInfo; - - /** Time at which this clean room was updated, in epoch milliseconds. */ - @JsonProperty("updated_at") - private Long updatedAt; - - /** Username of clean room updater. */ - @JsonProperty("updated_by") - private String updatedBy; - - public CleanRoomInfo setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public CleanRoomInfo setCreatedAt(Long createdAt) { - this.createdAt = createdAt; - return this; - } - - public Long getCreatedAt() { - return createdAt; - } - - public CleanRoomInfo setCreatedBy(String createdBy) { - this.createdBy = createdBy; - return this; - } - - public String getCreatedBy() { - return createdBy; - } - - public CleanRoomInfo setLocalCatalogs(Collection localCatalogs) { - this.localCatalogs = localCatalogs; - return this; - } - - public Collection getLocalCatalogs() { - return localCatalogs; - } - - public CleanRoomInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CleanRoomInfo setOwner(String owner) { - this.owner = owner; - return this; - } - - public String getOwner() { - return owner; - } - - public CleanRoomInfo setRemoteDetailedInfo(CentralCleanRoomInfo remoteDetailedInfo) { - this.remoteDetailedInfo = remoteDetailedInfo; - return this; - } - - public CentralCleanRoomInfo getRemoteDetailedInfo() { - return remoteDetailedInfo; - } - - public CleanRoomInfo setUpdatedAt(Long updatedAt) { - this.updatedAt = updatedAt; - return this; - } - - public Long getUpdatedAt() { - return updatedAt; - } - - public CleanRoomInfo setUpdatedBy(String updatedBy) { - this.updatedBy = updatedBy; - return this; - } - - public String getUpdatedBy() { - return updatedBy; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomInfo that = (CleanRoomInfo) o; - return Objects.equals(comment, that.comment) - && Objects.equals(createdAt, that.createdAt) - && Objects.equals(createdBy, that.createdBy) - && Objects.equals(localCatalogs, that.localCatalogs) - && Objects.equals(name, that.name) - && Objects.equals(owner, that.owner) - && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo) - && Objects.equals(updatedAt, that.updatedAt) - && Objects.equals(updatedBy, that.updatedBy); - } - - @Override - public int hashCode() { - return Objects.hash( - comment, - createdAt, - createdBy, - localCatalogs, - name, - owner, - remoteDetailedInfo, - updatedAt, - updatedBy); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomInfo.class) - .add("comment", comment) - .add("createdAt", createdAt) - .add("createdBy", createdBy) - .add("localCatalogs", localCatalogs) - .add("name", name) - .add("owner", owner) - .add("remoteDetailedInfo", remoteDetailedInfo) - .add("updatedAt", updatedAt) - .add("updatedBy", updatedBy) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java deleted file mode 100755 index eaf19d6a6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomNotebookInfo.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CleanRoomNotebookInfo { - /** The base64 representation of the notebook content in HTML. */ - @JsonProperty("notebook_content") - private String notebookContent; - - /** The name of the notebook. */ - @JsonProperty("notebook_name") - private String notebookName; - - public CleanRoomNotebookInfo setNotebookContent(String notebookContent) { - this.notebookContent = notebookContent; - return this; - } - - public String getNotebookContent() { - return notebookContent; - } - - public CleanRoomNotebookInfo setNotebookName(String notebookName) { - this.notebookName = notebookName; - return this; - } - - public String getNotebookName() { - return notebookName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomNotebookInfo that = (CleanRoomNotebookInfo) o; - return Objects.equals(notebookContent, that.notebookContent) - && Objects.equals(notebookName, that.notebookName); - } - - @Override - public int hashCode() { - return Objects.hash(notebookContent, notebookName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomNotebookInfo.class) - .add("notebookContent", notebookContent) - .add("notebookName", notebookName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java deleted file mode 100755 index 6581fdd55..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomTableInfo.java +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CleanRoomTableInfo { - /** Name of parent catalog. */ - @JsonProperty("catalog_name") - private String catalogName; - - /** The array of __ColumnInfo__ definitions of the table's columns. */ - @JsonProperty("columns") - private Collection columns; - - /** Full name of table, in form of __catalog_name__.__schema_name__.__table_name__ */ - @JsonProperty("full_name") - private String fullName; - - /** Name of table, relative to parent schema. */ - @JsonProperty("name") - private String name; - - /** Name of parent schema relative to its parent catalog. */ - @JsonProperty("schema_name") - private String schemaName; - - public CleanRoomTableInfo setCatalogName(String catalogName) { - this.catalogName = catalogName; - return this; - } - - public String getCatalogName() { - return catalogName; - } - - public CleanRoomTableInfo setColumns(Collection columns) { - this.columns = columns; - return this; - } - - public Collection getColumns() { - return columns; - } - - public CleanRoomTableInfo setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } - - public CleanRoomTableInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CleanRoomTableInfo setSchemaName(String schemaName) { - this.schemaName = schemaName; - return this; - } - - public String getSchemaName() { - return schemaName; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CleanRoomTableInfo that = (CleanRoomTableInfo) o; - return Objects.equals(catalogName, that.catalogName) - && Objects.equals(columns, that.columns) - && Objects.equals(fullName, that.fullName) - && Objects.equals(name, that.name) - && Objects.equals(schemaName, that.schemaName); - } - - @Override - public int hashCode() { - return Objects.hash(catalogName, columns, fullName, name, schemaName); - } - - @Override - public String toString() { - return new ToStringer(CleanRoomTableInfo.class) - .add("catalogName", catalogName) - .add("columns", columns) - .add("fullName", fullName) - .add("name", name) - .add("schemaName", schemaName) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java deleted file mode 100755 index 09b1944a0..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsAPI.java +++ /dev/null @@ -1,125 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.Paginator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** - * privilege. - */ -@Generated -public class CleanRoomsAPI { - private static final Logger LOG = LoggerFactory.getLogger(CleanRoomsAPI.class); - - private final CleanRoomsService impl; - - /** Regular-use constructor */ - public CleanRoomsAPI(ApiClient apiClient) { - impl = new CleanRoomsImpl(apiClient); - } - - /** Constructor for mocks */ - public CleanRoomsAPI(CleanRoomsService mock) { - impl = mock; - } - - public CleanRoomInfo create(String name, CentralCleanRoomInfo remoteDetailedInfo) { - return create(new CreateCleanRoom().setName(name).setRemoteDetailedInfo(remoteDetailedInfo)); - } - - /** - * Create a clean room. - * - *

Creates a new clean room with specified colaborators. The caller must be a metastore admin - * or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - */ - public CleanRoomInfo create(CreateCleanRoom request) { - return impl.create(request); - } - - public void delete(String name) { - delete(new DeleteCleanRoomRequest().setName(name)); - } - - /** - * Delete a clean room. - * - *

Deletes a data object clean room from the metastore. The caller must be an owner of the - * clean room. - */ - public void delete(DeleteCleanRoomRequest request) { - impl.delete(request); - } - - public CleanRoomInfo get(String name) { - return get(new GetCleanRoomRequest().setName(name)); - } - - /** - * Get a clean room. - * - *

Gets a data object clean room from the metastore. The caller must be a metastore admin or - * the owner of the clean room. - */ - public CleanRoomInfo get(GetCleanRoomRequest request) { - return impl.get(request); - } - - /** - * List clean rooms. - * - *

Gets an array of data object clean rooms from the metastore. The caller must be a metastore - * admin or the owner of the clean room. There is no guarantee of a specific ordering of the - * elements in the array. - */ - public Iterable list(ListCleanRoomsRequest request) { - return new Paginator<>( - request, - impl::list, - ListCleanRoomsResponse::getCleanRooms, - response -> { - String token = response.getNextPageToken(); - if (token == null || token.isEmpty()) { - return null; - } - return request.setPageToken(token); - }); - } - - public CleanRoomInfo update(String name) { - return update(new UpdateCleanRoom().setName(name)); - } - - /** - * Update a clean room. - * - *

Updates the clean room with the changes and data objects in the request. The caller must be - * the owner of the clean room or a metastore admin. - * - *

When the caller is a metastore admin, only the __owner__ field can be updated. - * - *

In the case that the clean room name is changed **updateCleanRoom** requires that the caller - * is both the clean room owner and a metastore admin. - * - *

For each table that is added through this method, the clean room owner must also have - * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients - * to be able to access the table. Typically, you should use a group as the clean room owner. - * - *

Table removals through **update** do not require additional privileges. - */ - public CleanRoomInfo update(UpdateCleanRoom request) { - return impl.update(request); - } - - public CleanRoomsService impl() { - return impl; - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java deleted file mode 100755 index 8e0e85e68..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; - -/** Package-local implementation of CleanRooms */ -@Generated -class CleanRoomsImpl implements CleanRoomsService { - private final ApiClient apiClient; - - public CleanRoomsImpl(ApiClient apiClient) { - this.apiClient = apiClient; - } - - @Override - public CleanRoomInfo create(CreateCleanRoom request) { - String path = "/api/2.1/unity-catalog/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CleanRoomInfo.class, headers); - } - - @Override - public void delete(DeleteCleanRoomRequest request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); - } - - @Override - public CleanRoomInfo get(GetCleanRoomRequest request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CleanRoomInfo.class, headers); - } - - @Override - public ListCleanRoomsResponse list(ListCleanRoomsRequest request) { - String path = "/api/2.1/unity-catalog/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCleanRoomsResponse.class, headers); - } - - @Override - public CleanRoomInfo update(UpdateCleanRoom request) { - String path = String.format("/api/2.1/unity-catalog/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CleanRoomInfo.class, headers); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java deleted file mode 100755 index fd3ef23a9..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsService.java +++ /dev/null @@ -1,71 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; - -/** - * A clean room is a secure, privacy-protecting environment where two or more parties can share - * sensitive enterprise data, including customer data, for measurements, insights, activation and - * other use cases. - * - *

To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** - * privilege. - * - *

This is the high-level interface, that contains generated methods. - * - *

Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface CleanRoomsService { - /** - * Create a clean room. - * - *

Creates a new clean room with specified colaborators. The caller must be a metastore admin - * or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - */ - CleanRoomInfo create(CreateCleanRoom createCleanRoom); - - /** - * Delete a clean room. - * - *

Deletes a data object clean room from the metastore. The caller must be an owner of the - * clean room. - */ - void delete(DeleteCleanRoomRequest deleteCleanRoomRequest); - - /** - * Get a clean room. - * - *

Gets a data object clean room from the metastore. The caller must be a metastore admin or - * the owner of the clean room. - */ - CleanRoomInfo get(GetCleanRoomRequest getCleanRoomRequest); - - /** - * List clean rooms. - * - *

Gets an array of data object clean rooms from the metastore. The caller must be a metastore - * admin or the owner of the clean room. There is no guarantee of a specific ordering of the - * elements in the array. - */ - ListCleanRoomsResponse list(ListCleanRoomsRequest listCleanRoomsRequest); - - /** - * Update a clean room. - * - *

Updates the clean room with the changes and data objects in the request. The caller must be - * the owner of the clean room or a metastore admin. - * - *

When the caller is a metastore admin, only the __owner__ field can be updated. - * - *

In the case that the clean room name is changed **updateCleanRoom** requires that the caller - * is both the clean room owner and a metastore admin. - * - *

For each table that is added through this method, the clean room owner must also have - * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients - * to be able to access the table. Typically, you should use a group as the clean room owner. - * - *

Table removals through **update** do not require additional privileges. - */ - CleanRoomInfo update(UpdateCleanRoom updateCleanRoom); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java deleted file mode 100755 index 40abbd429..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java +++ /dev/null @@ -1,221 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class ColumnInfo { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** */ - @JsonProperty("mask") - private ColumnMask mask; - - /** Name of Column. */ - @JsonProperty("name") - private String name; - - /** Whether field may be Null (default: true). */ - @JsonProperty("nullable") - private Boolean nullable; - - /** Partition index for column. */ - @JsonProperty("partition_index") - private Long partitionIndex; - - /** Ordinal position of column (starting at position 0). */ - @JsonProperty("position") - private Long position; - - /** Format of IntervalType. */ - @JsonProperty("type_interval_type") - private String typeIntervalType; - - /** Full data type specification, JSON-serialized. */ - @JsonProperty("type_json") - private String typeJson; - - /** Name of type (INT, STRUCT, MAP, etc.). */ - @JsonProperty("type_name") - private ColumnTypeName typeName; - - /** Digits of precision; required for DecimalTypes. */ - @JsonProperty("type_precision") - private Long typePrecision; - - /** Digits to right of decimal; Required for DecimalTypes. */ - @JsonProperty("type_scale") - private Long typeScale; - - /** Full data type specification as SQL/catalogString text. */ - @JsonProperty("type_text") - private String typeText; - - public ColumnInfo setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public ColumnInfo setMask(ColumnMask mask) { - this.mask = mask; - return this; - } - - public ColumnMask getMask() { - return mask; - } - - public ColumnInfo setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public ColumnInfo setNullable(Boolean nullable) { - this.nullable = nullable; - return this; - } - - public Boolean getNullable() { - return nullable; - } - - public ColumnInfo setPartitionIndex(Long partitionIndex) { - this.partitionIndex = partitionIndex; - return this; - } - - public Long getPartitionIndex() { - return partitionIndex; - } - - public ColumnInfo setPosition(Long position) { - this.position = position; - return this; - } - - public Long getPosition() { - return position; - } - - public ColumnInfo setTypeIntervalType(String typeIntervalType) { - this.typeIntervalType = typeIntervalType; - return this; - } - - public String getTypeIntervalType() { - return typeIntervalType; - } - - public ColumnInfo setTypeJson(String typeJson) { - this.typeJson = typeJson; - return this; - } - - public String getTypeJson() { - return typeJson; - } - - public ColumnInfo setTypeName(ColumnTypeName typeName) { - this.typeName = typeName; - return this; - } - - public ColumnTypeName getTypeName() { - return typeName; - } - - public ColumnInfo setTypePrecision(Long typePrecision) { - this.typePrecision = typePrecision; - return this; - } - - public Long getTypePrecision() { - return typePrecision; - } - - public ColumnInfo setTypeScale(Long typeScale) { - this.typeScale = typeScale; - return this; - } - - public Long getTypeScale() { - return typeScale; - } - - public ColumnInfo setTypeText(String typeText) { - this.typeText = typeText; - return this; - } - - public String getTypeText() { - return typeText; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ColumnInfo that = (ColumnInfo) o; - return Objects.equals(comment, that.comment) - && Objects.equals(mask, that.mask) - && Objects.equals(name, that.name) - && Objects.equals(nullable, that.nullable) - && Objects.equals(partitionIndex, that.partitionIndex) - && Objects.equals(position, that.position) - && Objects.equals(typeIntervalType, that.typeIntervalType) - && Objects.equals(typeJson, that.typeJson) - && Objects.equals(typeName, that.typeName) - && Objects.equals(typePrecision, that.typePrecision) - && Objects.equals(typeScale, that.typeScale) - && Objects.equals(typeText, that.typeText); - } - - @Override - public int hashCode() { - return Objects.hash( - comment, - mask, - name, - nullable, - partitionIndex, - position, - typeIntervalType, - typeJson, - typeName, - typePrecision, - typeScale, - typeText); - } - - @Override - public String toString() { - return new ToStringer(ColumnInfo.class) - .add("comment", comment) - .add("mask", mask) - .add("name", name) - .add("nullable", nullable) - .add("partitionIndex", partitionIndex) - .add("position", position) - .add("typeIntervalType", typeIntervalType) - .add("typeJson", typeJson) - .add("typeName", typeName) - .add("typePrecision", typePrecision) - .add("typeScale", typeScale) - .add("typeText", typeText) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java deleted file mode 100755 index 9c0e3e84b..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java +++ /dev/null @@ -1,64 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ColumnMask { - /** The full name of the column mask SQL UDF. */ - @JsonProperty("function_name") - private String functionName; - - /** - * The list of additional table columns to be passed as input to the column mask function. The - * first arg of the mask function should be of the type of the column being masked and the types - * of the rest of the args should match the types of columns in 'using_column_names'. - */ - @JsonProperty("using_column_names") - private Collection usingColumnNames; - - public ColumnMask setFunctionName(String functionName) { - this.functionName = functionName; - return this; - } - - public String getFunctionName() { - return functionName; - } - - public ColumnMask setUsingColumnNames(Collection usingColumnNames) { - this.usingColumnNames = usingColumnNames; - return this; - } - - public Collection getUsingColumnNames() { - return usingColumnNames; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ColumnMask that = (ColumnMask) o; - return Objects.equals(functionName, that.functionName) - && Objects.equals(usingColumnNames, that.usingColumnNames); - } - - @Override - public int hashCode() { - return Objects.hash(functionName, usingColumnNames); - } - - @Override - public String toString() { - return new ToStringer(ColumnMask.class) - .add("functionName", functionName) - .add("usingColumnNames", usingColumnNames) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java deleted file mode 100755 index 7586ecb3a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; - -/** Name of type (INT, STRUCT, MAP, etc.). */ -@Generated -public enum ColumnTypeName { - ARRAY, - BINARY, - BOOLEAN, - BYTE, - CHAR, - DATE, - DECIMAL, - DOUBLE, - FLOAT, - INT, - INTERVAL, - LONG, - MAP, - NULL, - SHORT, - STRING, - STRUCT, - TABLE_TYPE, - TIMESTAMP, - TIMESTAMP_NTZ, - USER_DEFINED_TYPE, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java deleted file mode 100755 index 3b769a756..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateCleanRoom.java +++ /dev/null @@ -1,74 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CreateCleanRoom { - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** Name of the clean room. */ - @JsonProperty("name") - private String name; - - /** Central clean room details. */ - @JsonProperty("remote_detailed_info") - private CentralCleanRoomInfo remoteDetailedInfo; - - public CreateCleanRoom setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public CreateCleanRoom setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public CreateCleanRoom setRemoteDetailedInfo(CentralCleanRoomInfo remoteDetailedInfo) { - this.remoteDetailedInfo = remoteDetailedInfo; - return this; - } - - public CentralCleanRoomInfo getRemoteDetailedInfo() { - return remoteDetailedInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CreateCleanRoom that = (CreateCleanRoom) o; - return Objects.equals(comment, that.comment) - && Objects.equals(name, that.name) - && Objects.equals(remoteDetailedInfo, that.remoteDetailedInfo); - } - - @Override - public int hashCode() { - return Objects.hash(comment, name, remoteDetailedInfo); - } - - @Override - public String toString() { - return new ToStringer(CreateCleanRoom.class) - .add("comment", comment) - .add("name", name) - .add("remoteDetailedInfo", remoteDetailedInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java deleted file mode 100755 index 1fd0c92e4..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteCleanRoomRequest.java +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Delete a clean room */ -@Generated -public class DeleteCleanRoomRequest { - /** The name of the clean room. */ - @JsonIgnore private String name; - - public DeleteCleanRoomRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteCleanRoomRequest that = (DeleteCleanRoomRequest) o; - return Objects.equals(name, that.name); - } - - @Override - public int hashCode() { - return Objects.hash(name); - } - - @Override - public String toString() { - return new ToStringer(DeleteCleanRoomRequest.class).add("name", name).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java deleted file mode 100755 index 982732d12..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/GetCleanRoomRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get a clean room */ -@Generated -public class GetCleanRoomRequest { - /** Whether to include remote details (central) on the clean room. */ - @JsonIgnore - @QueryParam("include_remote_details") - private Boolean includeRemoteDetails; - - /** The name of the clean room. */ - @JsonIgnore private String name; - - public GetCleanRoomRequest setIncludeRemoteDetails(Boolean includeRemoteDetails) { - this.includeRemoteDetails = includeRemoteDetails; - return this; - } - - public Boolean getIncludeRemoteDetails() { - return includeRemoteDetails; - } - - public GetCleanRoomRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCleanRoomRequest that = (GetCleanRoomRequest) o; - return Objects.equals(includeRemoteDetails, that.includeRemoteDetails) - && Objects.equals(name, that.name); - } - - @Override - public int hashCode() { - return Objects.hash(includeRemoteDetails, name); - } - - @Override - public String toString() { - return new ToStringer(GetCleanRoomRequest.class) - .add("includeRemoteDetails", includeRemoteDetails) - .add("name", name) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java deleted file mode 100755 index c58abe94d..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListCleanRoomsRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** List clean rooms */ -@Generated -public class ListCleanRoomsRequest { - /** - * Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not - * recommended). - when set to a value greater than 0, the page length is the minimum of this - * value and a server configured value; - when set to 0, the page length is set to a server - * configured value (recommended); - when set to a value less than 0, an invalid parameter error - * is returned; - */ - @JsonIgnore - @QueryParam("max_results") - private Long maxResults; - - /** Opaque pagination token to go to next page based on previous query. */ - @JsonIgnore - @QueryParam("page_token") - private String pageToken; - - public ListCleanRoomsRequest setMaxResults(Long maxResults) { - this.maxResults = maxResults; - return this; - } - - public Long getMaxResults() { - return maxResults; - } - - public ListCleanRoomsRequest setPageToken(String pageToken) { - this.pageToken = pageToken; - return this; - } - - public String getPageToken() { - return pageToken; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ListCleanRoomsRequest that = (ListCleanRoomsRequest) o; - return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); - } - - @Override - public int hashCode() { - return Objects.hash(maxResults, pageToken); - } - - @Override - public String toString() { - return new ToStringer(ListCleanRoomsRequest.class) - .add("maxResults", maxResults) - .add("pageToken", pageToken) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java deleted file mode 100755 index 033dc5192..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateCleanRoom.java +++ /dev/null @@ -1,90 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.sharing; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class UpdateCleanRoom { - /** Array of shared data object updates. */ - @JsonProperty("catalog_updates") - private Collection catalogUpdates; - - /** User-provided free-form text description. */ - @JsonProperty("comment") - private String comment; - - /** The name of the clean room. */ - @JsonIgnore private String name; - - /** Username of current owner of clean room. */ - @JsonProperty("owner") - private String owner; - - public UpdateCleanRoom setCatalogUpdates(Collection catalogUpdates) { - this.catalogUpdates = catalogUpdates; - return this; - } - - public Collection getCatalogUpdates() { - return catalogUpdates; - } - - public UpdateCleanRoom setComment(String comment) { - this.comment = comment; - return this; - } - - public String getComment() { - return comment; - } - - public UpdateCleanRoom setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public UpdateCleanRoom setOwner(String owner) { - this.owner = owner; - return this; - } - - public String getOwner() { - return owner; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpdateCleanRoom that = (UpdateCleanRoom) o; - return Objects.equals(catalogUpdates, that.catalogUpdates) - && Objects.equals(comment, that.comment) - && Objects.equals(name, that.name) - && Objects.equals(owner, that.owner); - } - - @Override - public int hashCode() { - return Objects.hash(catalogUpdates, comment, name, owner); - } - - @Override - public String toString() { - return new ToStringer(UpdateCleanRoom.class) - .add("catalogUpdates", catalogUpdates) - .add("comment", comment) - .add("name", name) - .add("owner", owner) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java index 82eb48e5c..a1900b66f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java @@ -9,5 +9,5 @@ public enum ChannelName { CHANNEL_NAME_CURRENT, CHANNEL_NAME_CUSTOM, CHANNEL_NAME_PREVIEW, - CHANNEL_NAME_UNSPECIFIED, + CHANNEL_NAME_PREVIOUS, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java index 8e579b470..8d25fa978 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java @@ -87,11 +87,11 @@ * completed execution when the cancel request arrives. Polling for status until a terminal state is * reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur * server-side, and cannot account for things such as caller delays and network latency from caller - * to service. - The system will auto-close a statement after one hour if the client stops polling - * and thus you must poll at least once an hour. - The results are only available for one hour after - * success; polling does not extend this. - The SQL Execution API must be used for the entire - * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and - * then the SQL Execution API to cancel it. + * to service. - To guarantee that the statement is kept alive, you must poll at least once every 15 + * minutes. - The results are only available for one hour after success; polling does not extend + * this. - The SQL Execution API must be used for the entire lifecycle of the statement. For + * example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to + * cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java index b22e9dbe0..5132b0354 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java @@ -84,11 +84,11 @@ * completed execution when the cancel request arrives. Polling for status until a terminal state is * reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur * server-side, and cannot account for things such as caller delays and network latency from caller - * to service. - The system will auto-close a statement after one hour if the client stops polling - * and thus you must poll at least once an hour. - The results are only available for one hour after - * success; polling does not extend this. - The SQL Execution API must be used for the entire - * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and - * then the SQL Execution API to cancel it. + * to service. - To guarantee that the statement is kept alive, you must poll at least once every 15 + * minutes. - The results are only available for one hour after success; polling does not extend + * this. - The SQL Execution API must be used for the entire lifecycle of the statement. For + * example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to + * cancel it. * *

[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement * Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java index 9564b525a..00b0c3941 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java @@ -233,7 +233,8 @@ public WarehousePermissions setPermissions(String warehouseId) { /** * Set SQL warehouse permissions. * - *

Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ public WarehousePermissions setPermissions(WarehousePermissionsRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java index c1da3aedc..8b18fcca2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java @@ -76,7 +76,8 @@ WarehousePermissions getPermissions( /** * Set SQL warehouse permissions. * - *

Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root * object. */ WarehousePermissions setPermissions(WarehousePermissionsRequest warehousePermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java index dab9e912e..8a51b47a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java @@ -130,7 +130,9 @@ public RepoPermissions setPermissions(String repoId) { /** * Set repo permissions. * - *

Sets permissions on a repo. Repos can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ public RepoPermissions setPermissions(RepoPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java index 313477542..188c2d30d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java @@ -70,7 +70,9 @@ GetRepoPermissionLevelsResponse getPermissionLevels( /** * Set repo permissions. * - *

Sets permissions on a repo. Repos can inherit permissions from their root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their root + * object. */ RepoPermissions setPermissions(RepoPermissionsRequest repoPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 486826182..3933eeff3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -175,8 +175,9 @@ public WorkspaceObjectPermissions setPermissions( /** * Set workspace object permissions. * - *

Sets permissions on a workspace object. Workspace objects can inherit permissions from their - * parent objects or root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ public WorkspaceObjectPermissions setPermissions(WorkspaceObjectPermissionsRequest request) { return impl.setPermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java index e6e7be354..93dc98423 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java @@ -98,8 +98,9 @@ WorkspaceObjectPermissions getPermissions( /** * Set workspace object permissions. * - *

Sets permissions on a workspace object. Workspace objects can inherit permissions from their - * parent objects or root object. + *

Sets permissions on an object, replacing existing permissions if they exist. Deletes all + * direct permissions if none are specified. Objects can inherit permissions from their parent + * objects or root object. */ WorkspaceObjectPermissions setPermissions( WorkspaceObjectPermissionsRequest workspaceObjectPermissionsRequest); diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 32731d879..89e86caf3 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.34.0 + 0.35.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index 4a89d1033..b8a37bffd 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.34.0 + 0.35.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index 789662edd..6b6116a71 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.34.0 + 0.35.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for diff --git a/shaded/pom.xml b/shaded/pom.xml index 21713f8e5..8c3d0966b 100644 --- a/shaded/pom.xml +++ b/shaded/pom.xml @@ -4,7 +4,7 @@ 4.0.0 - 0.34.0 + 0.35.0 com.databricks From 56d095b8aa0ecf0215d6949d62dbcc53ae34e6ec Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 6 Nov 2024 12:30:35 +0100 Subject: [PATCH 2/2] Fix formatting --- .../java/com/databricks/sdk/core/DatabricksConfig.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 533cdd43b..20e7f883e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -374,17 +374,13 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) { return this; } - /** - * @deprecated Use {@link #getAzureUseMsi()} instead. - */ + /** @deprecated Use {@link #getAzureUseMsi()} instead. */ @Deprecated() public boolean getAzureUseMSI() { return azureUseMsi; } - /** - * @deprecated Use {@link #setAzureUseMsi(boolean)} instead. - */ + /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */ @Deprecated public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) { this.azureUseMsi = azureUseMsi;