To create clean rooms, you must be a metastore admin or a user with the - * **CREATE_CLEAN_ROOM** privilege. - */ - public CleanRoomsAPI cleanRooms() { - return cleanRoomsAPI; - } - /** * You can use cluster policies to control users' ability to configure clusters based on a set of * rules. These rules specify which attributes or attribute values can be used during cluster @@ -580,6 +568,19 @@ public ConsumerProvidersAPI consumerProviders() { return consumerProvidersAPI; } + /** + * A credential represents an authentication and authorization mechanism for accessing services on + * your cloud tenant. Each credential is subject to Unity Catalog access-control policies that + * control which users and groups can access the credential. + * + *
To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE + * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another + * user or group to manage permissions on it + */ + public CredentialsAPI credentials() { + return credentialsAPI; + } + /** * Credentials manager interacts with with Identity Providers to to perform token exchanges using * stored credentials and refresh tokens. @@ -1453,11 +1454,11 @@ public SharesAPI shares() { * might have already completed execution when the cancel request arrives. Polling for status * until a terminal state is reached is a reliable way to determine the final state. - Wait * timeouts are approximate, occur server-side, and cannot account for things such as caller - * delays and network latency from caller to service. - The system will auto-close a statement - * after one hour if the client stops polling and thus you must poll at least once an hour. - The - * results are only available for one hour after success; polling does not extend this. - The SQL - * Execution API must be used for the entire lifecycle of the statement. For example, you cannot - * use the Jobs API to execute the command, and then the SQL Execution API to cancel it. + * delays and network latency from caller to service. - To guarantee that the statement is kept + * alive, you must poll at least once every 15 minutes. - The results are only available for one + * hour after success; polling does not extend this. - The SQL Execution API must be used for the + * entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the + * command, and then the SQL Execution API to cancel it. * *
[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
@@ -1721,17 +1722,6 @@ public WorkspaceClient withCatalogsAPI(CatalogsAPI catalogs) {
return this;
}
- /** Replace the default CleanRoomsService with a custom implementation. */
- public WorkspaceClient withCleanRoomsImpl(CleanRoomsService cleanRooms) {
- return this.withCleanRoomsAPI(new CleanRoomsAPI(cleanRooms));
- }
-
- /** Replace the default CleanRoomsAPI with a custom implementation. */
- public WorkspaceClient withCleanRoomsAPI(CleanRoomsAPI cleanRooms) {
- this.cleanRoomsAPI = cleanRooms;
- return this;
- }
-
/** Replace the default ClusterPoliciesService with a custom implementation. */
public WorkspaceClient withClusterPoliciesImpl(ClusterPoliciesService clusterPolicies) {
return this.withClusterPoliciesAPI(new ClusterPoliciesAPI(clusterPolicies));
@@ -1837,6 +1827,17 @@ public WorkspaceClient withConsumerProvidersAPI(ConsumerProvidersAPI consumerPro
return this;
}
+ /** Replace the default CredentialsService with a custom implementation. */
+ public WorkspaceClient withCredentialsImpl(CredentialsService credentials) {
+ return this.withCredentialsAPI(new CredentialsAPI(credentials));
+ }
+
+ /** Replace the default CredentialsAPI with a custom implementation. */
+ public WorkspaceClient withCredentialsAPI(CredentialsAPI credentials) {
+ this.credentialsAPI = credentials;
+ return this;
+ }
+
/** Replace the default CredentialsManagerService with a custom implementation. */
public WorkspaceClient withCredentialsManagerImpl(CredentialsManagerService credentialsManager) {
return this.withCredentialsManagerAPI(new CredentialsManagerAPI(credentialsManager));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
index 20e7f883e..533cdd43b 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java
@@ -374,13 +374,17 @@ public DatabricksConfig setAzureUseMsi(boolean azureUseMsi) {
return this;
}
- /** @deprecated Use {@link #getAzureUseMsi()} instead. */
+ /**
+ * @deprecated Use {@link #getAzureUseMsi()} instead.
+ */
@Deprecated()
public boolean getAzureUseMSI() {
return azureUseMsi;
}
- /** @deprecated Use {@link #setAzureUseMsi(boolean)} instead. */
+ /**
+ * @deprecated Use {@link #setAzureUseMsi(boolean)} instead.
+ */
@Deprecated
public DatabricksConfig setAzureUseMSI(boolean azureUseMsi) {
this.azureUseMsi = azureUseMsi;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
index 56c817d73..4d9698f8f 100644
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java
@@ -32,7 +32,7 @@ public String getValue() {
// TODO: check if reading from
// /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties
// or getClass().getPackage().getImplementationVersion() is enough.
- private static final String version = "0.34.0";
+ private static final String version = "0.35.0";
public static void withProduct(String product, String productVersion) {
UserAgent.product = product;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
index 35d1b609a..4b611f216 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
@@ -81,23 +81,27 @@ public App waitGetAppActive(String name, Duration timeout, Consumer Sets permissions on an app. Apps can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public AppPermissions setPermissions(AppPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
index f6936a132..e45306647 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java
@@ -21,7 +21,7 @@ public App create(CreateAppRequest request) {
Map Sets permissions on an app. Apps can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
AppPermissions setPermissions(AppPermissionsRequest appPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
index 3952d58b3..1d0425673 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
@@ -8,28 +8,24 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create an app deployment */
@Generated
public class CreateAppDeploymentRequest {
+ /** */
+ @JsonProperty("app_deployment")
+ private AppDeployment appDeployment;
+
/** The name of the app. */
@JsonIgnore private String appName;
- /** The unique id of the deployment. */
- @JsonProperty("deployment_id")
- private String deploymentId;
-
- /** The mode of which the deployment will manage the source code. */
- @JsonProperty("mode")
- private AppDeploymentMode mode;
+ public CreateAppDeploymentRequest setAppDeployment(AppDeployment appDeployment) {
+ this.appDeployment = appDeployment;
+ return this;
+ }
- /**
- * The workspace file system path of the source code used to create the app deployment. This is
- * different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
- * app. The former refers to the original source code location of the app in the workspace during
- * deployment creation, whereas the latter provides a system generated stable snapshotted source
- * code path used by the deployment.
- */
- @JsonProperty("source_code_path")
- private String sourceCodePath;
+ public AppDeployment getAppDeployment() {
+ return appDeployment;
+ }
public CreateAppDeploymentRequest setAppName(String appName) {
this.appName = appName;
@@ -40,56 +36,25 @@ public String getAppName() {
return appName;
}
- public CreateAppDeploymentRequest setDeploymentId(String deploymentId) {
- this.deploymentId = deploymentId;
- return this;
- }
-
- public String getDeploymentId() {
- return deploymentId;
- }
-
- public CreateAppDeploymentRequest setMode(AppDeploymentMode mode) {
- this.mode = mode;
- return this;
- }
-
- public AppDeploymentMode getMode() {
- return mode;
- }
-
- public CreateAppDeploymentRequest setSourceCodePath(String sourceCodePath) {
- this.sourceCodePath = sourceCodePath;
- return this;
- }
-
- public String getSourceCodePath() {
- return sourceCodePath;
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateAppDeploymentRequest that = (CreateAppDeploymentRequest) o;
- return Objects.equals(appName, that.appName)
- && Objects.equals(deploymentId, that.deploymentId)
- && Objects.equals(mode, that.mode)
- && Objects.equals(sourceCodePath, that.sourceCodePath);
+ return Objects.equals(appDeployment, that.appDeployment)
+ && Objects.equals(appName, that.appName);
}
@Override
public int hashCode() {
- return Objects.hash(appName, deploymentId, mode, sourceCodePath);
+ return Objects.hash(appDeployment, appName);
}
@Override
public String toString() {
return new ToStringer(CreateAppDeploymentRequest.class)
+ .add("appDeployment", appDeployment)
.add("appName", appName)
- .add("deploymentId", deploymentId)
- .add("mode", mode)
- .add("sourceCodePath", sourceCodePath)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
index e835442a9..7d1076bb7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java
@@ -5,51 +5,22 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
import java.util.Objects;
+/** Create an app */
@Generated
public class CreateAppRequest {
- /** The description of the app. */
- @JsonProperty("description")
- private String description;
+ /** */
+ @JsonProperty("app")
+ private App app;
- /**
- * The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
- * It must be unique within the workspace.
- */
- @JsonProperty("name")
- private String name;
-
- /** Resources for the app. */
- @JsonProperty("resources")
- private Collection To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE
+ * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user
+ * or group to manage permissions on it
+ */
+@Generated
+public class CredentialsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(CredentialsAPI.class);
+
+ private final CredentialsService impl;
+
+ /** Regular-use constructor */
+ public CredentialsAPI(ApiClient apiClient) {
+ impl = new CredentialsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public CredentialsAPI(CredentialsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Create a credential.
+ *
+ * Creates a new credential.
+ */
+ public CredentialInfo createCredential(CreateCredentialRequest request) {
+ return impl.createCredential(request);
+ }
+
+ public void deleteCredential(String nameArg) {
+ deleteCredential(new DeleteCredentialRequest().setNameArg(nameArg));
+ }
+
+ /**
+ * Delete a credential.
+ *
+ * Deletes a credential from the metastore. The caller must be an owner of the credential.
+ */
+ public void deleteCredential(DeleteCredentialRequest request) {
+ impl.deleteCredential(request);
+ }
+
+ /**
+ * Generate a temporary service credential.
+ *
+ * Returns a set of temporary credentials generated using the specified service credential. The
+ * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service
+ * credential.
+ */
+ public TemporaryCredentials generateTemporaryServiceCredential(
+ GenerateTemporaryServiceCredentialRequest request) {
+ return impl.generateTemporaryServiceCredential(request);
+ }
+
+ public CredentialInfo getCredential(String nameArg) {
+ return getCredential(new GetCredentialRequest().setNameArg(nameArg));
+ }
+
+ /**
+ * Get a credential.
+ *
+ * Gets a credential from the metastore. The caller must be a metastore admin, the owner of the
+ * credential, or have any permission on the credential.
+ */
+ public CredentialInfo getCredential(GetCredentialRequest request) {
+ return impl.getCredential(request);
+ }
+
+ /**
+ * List credentials.
+ *
+ * Gets an array of credentials (as __CredentialInfo__ objects).
+ *
+ * The array is limited to only the credentials that the caller has permission to access. If
+ * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no
+ * guarantee of a specific ordering of the elements in the array.
+ */
+ public Iterable Updates a credential on the metastore.
+ *
+ * The caller must be the owner of the credential or a metastore admin or have the `MANAGE`
+ * permission. If the caller is a metastore admin, only the __owner__ field can be changed.
+ */
+ public CredentialInfo updateCredential(UpdateCredentialRequest request) {
+ return impl.updateCredential(request);
+ }
+
+ /**
+ * Validate a credential.
+ *
+ * Validates a credential.
+ *
+ * Either the __credential_name__ or the cloud-specific credential must be provided.
+ *
+ * The caller must be a metastore admin or the credential owner.
+ */
+ public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) {
+ return impl.validateCredential(request);
+ }
+
+ public CredentialsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java
new file mode 100755
index 000000000..b2aad2644
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of Credentials */
+@Generated
+class CredentialsImpl implements CredentialsService {
+ private final ApiClient apiClient;
+
+ public CredentialsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CredentialInfo createCredential(CreateCredentialRequest request) {
+ String path = "/api/2.1/unity-catalog/credentials";
+ Map To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE
+ * CREDENTIAL privilege. The user who creates the credential can delegate ownership to another user
+ * or group to manage permissions on it
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface CredentialsService {
+ /**
+ * Create a credential.
+ *
+ * Creates a new credential.
+ */
+ CredentialInfo createCredential(CreateCredentialRequest createCredentialRequest);
+
+ /**
+ * Delete a credential.
+ *
+ * Deletes a credential from the metastore. The caller must be an owner of the credential.
+ */
+ void deleteCredential(DeleteCredentialRequest deleteCredentialRequest);
+
+ /**
+ * Generate a temporary service credential.
+ *
+ * Returns a set of temporary credentials generated using the specified service credential. The
+ * caller must be a metastore admin or have the metastore privilege **ACCESS** on the service
+ * credential.
+ */
+ TemporaryCredentials generateTemporaryServiceCredential(
+ GenerateTemporaryServiceCredentialRequest generateTemporaryServiceCredentialRequest);
+
+ /**
+ * Get a credential.
+ *
+ * Gets a credential from the metastore. The caller must be a metastore admin, the owner of the
+ * credential, or have any permission on the credential.
+ */
+ CredentialInfo getCredential(GetCredentialRequest getCredentialRequest);
+
+ /**
+ * List credentials.
+ *
+ * Gets an array of credentials (as __CredentialInfo__ objects).
+ *
+ * The array is limited to only the credentials that the caller has permission to access. If
+ * the caller is a metastore admin, retrieval of credentials is unrestricted. There is no
+ * guarantee of a specific ordering of the elements in the array.
+ */
+ ListCredentialsResponse listCredentials(ListCredentialsRequest listCredentialsRequest);
+
+ /**
+ * Update a credential.
+ *
+ * Updates a credential on the metastore.
+ *
+ * The caller must be the owner of the credential or a metastore admin or have the `MANAGE`
+ * permission. If the caller is a metastore admin, only the __owner__ field can be changed.
+ */
+ CredentialInfo updateCredential(UpdateCredentialRequest updateCredentialRequest);
+
+ /**
+ * Validate a credential.
+ *
+ * Validates a credential.
+ *
+ * Either the __credential_name__ or the cloud-specific credential must be provided.
+ *
+ * The caller must be a metastore admin or the credential owner.
+ */
+ ValidateCredentialResponse validateCredential(
+ ValidateCredentialRequest validateCredentialRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java
new file mode 100755
index 000000000..a3549cb5b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a credential */
+@Generated
+public class DeleteCredentialRequest {
+ /** Force deletion even if there are dependent services. */
+ @JsonIgnore
+ @QueryParam("force")
+ private Boolean force;
+
+ /** Name of the credential. */
+ @JsonIgnore private String nameArg;
+
+ public DeleteCredentialRequest setForce(Boolean force) {
+ this.force = force;
+ return this;
+ }
+
+ public Boolean getForce() {
+ return force;
+ }
+
+ public DeleteCredentialRequest setNameArg(String nameArg) {
+ this.nameArg = nameArg;
+ return this;
+ }
+
+ public String getNameArg() {
+ return nameArg;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteCredentialRequest that = (DeleteCredentialRequest) o;
+ return Objects.equals(force, that.force) && Objects.equals(nameArg, that.nameArg);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(force, nameArg);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteCredentialRequest.class)
+ .add("force", force)
+ .add("nameArg", nameArg)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java
new file mode 100755
index 000000000..1ad278759
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeleteCredentialResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteCredentialResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
index 051a796ae..3fe7a3650 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java
@@ -52,10 +52,7 @@ public class ExternalLocationInfo {
@JsonProperty("fallback")
private Boolean fallback;
- /**
- * Whether the current securable is accessible from all workspaces or a specific set of
- * workspaces.
- */
+ /** */
@JsonProperty("isolation_mode")
private IsolationMode isolationMode;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
index d4ca791fc..5b45675b4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfoSecurityType.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Function security type. */
+/** The security type of the function. */
@Generated
public enum FunctionInfoSecurityType {
DEFINER,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java
new file mode 100755
index 000000000..31dca4b95
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialAzureOptions.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Options to customize the requested temporary credential */
+@Generated
+public class GenerateTemporaryServiceCredentialAzureOptions {
+ /**
+ * The resources to which the temporary Azure credential should apply. These resources are the
+ * scopes that are passed to the token provider (see
+ * https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)
+ */
+ @JsonProperty("resources")
+ private Collection Create a new Online Table.
*/
- public OnlineTable create(CreateOnlineTableRequest request) {
- return impl.create(request);
+ public Wait Sets permissions on a cluster policy. Cluster policies can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
index 10654b89f..64f2a13f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java
@@ -87,8 +87,9 @@ ClusterPolicyPermissions getPermissions(
/**
* Set cluster policy permissions.
*
- * Sets permissions on a cluster policy. Cluster policies can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
ClusterPolicyPermissions setPermissions(
ClusterPolicyPermissionsRequest clusterPolicyPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
index 2fe2801c5..285d93495 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
@@ -394,7 +394,9 @@ public ClusterPermissions setPermissions(String clusterId) {
/**
* Set cluster permissions.
*
- * Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public ClusterPermissions setPermissions(ClusterPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
index f257b5f61..b85b439b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java
@@ -180,7 +180,9 @@ GetClusterPermissionLevelsResponse getPermissionLevels(
/**
* Set cluster permissions.
*
- * Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
ClusterPermissions setPermissions(ClusterPermissionsRequest clusterPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
index ff2644f73..886970e07 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionAPI.java
@@ -93,34 +93,27 @@ public CommandStatusResponse waitCommandStatusCommandExecutionCancelled(
throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
}
- public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError(
- String clusterId, String commandId, String contextId) throws TimeoutException {
- return waitCommandStatusCommandExecutionFinishedOrError(
- clusterId, commandId, contextId, Duration.ofMinutes(20), null);
+ public ContextStatusResponse waitContextStatusCommandExecutionRunning(
+ String clusterId, String contextId) throws TimeoutException {
+ return waitContextStatusCommandExecutionRunning(
+ clusterId, contextId, Duration.ofMinutes(20), null);
}
- public CommandStatusResponse waitCommandStatusCommandExecutionFinishedOrError(
+ public ContextStatusResponse waitContextStatusCommandExecutionRunning(
String clusterId,
- String commandId,
String contextId,
Duration timeout,
- Consumer Sets permissions on an instance pool. Instance pools can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
index ae5e55252..0a7d03ead 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsService.java
@@ -82,7 +82,8 @@ InstancePoolPermissions getPermissions(
/**
* Set instance pool permissions.
*
- * Sets permissions on an instance pool. Instance pools can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
InstancePoolPermissions setPermissions(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java
index 62d74c45d..9c6ddecd1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java
@@ -7,68 +7,20 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create dashboard */
@Generated
public class CreateDashboardRequest {
- /** The display name of the dashboard. */
- @JsonProperty("display_name")
- private String displayName;
+ /** */
+ @JsonProperty("dashboard")
+ private Dashboard dashboard;
- /**
- * The workspace path of the folder containing the dashboard. Includes leading slash and no
- * trailing slash. This field is excluded in List Dashboards responses.
- */
- @JsonProperty("parent_path")
- private String parentPath;
-
- /**
- * The contents of the dashboard in serialized string form. This field is excluded in List
- * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
- * includes the `serialized_dashboard` field. This field provides the structure of the JSON string
- * that represents the dashboard's layout and components.
- *
- * [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
- */
- @JsonProperty("serialized_dashboard")
- private String serializedDashboard;
-
- /** The warehouse ID used to run the dashboard. */
- @JsonProperty("warehouse_id")
- private String warehouseId;
-
- public CreateDashboardRequest setDisplayName(String displayName) {
- this.displayName = displayName;
- return this;
- }
-
- public String getDisplayName() {
- return displayName;
- }
-
- public CreateDashboardRequest setParentPath(String parentPath) {
- this.parentPath = parentPath;
- return this;
- }
-
- public String getParentPath() {
- return parentPath;
- }
-
- public CreateDashboardRequest setSerializedDashboard(String serializedDashboard) {
- this.serializedDashboard = serializedDashboard;
- return this;
- }
-
- public String getSerializedDashboard() {
- return serializedDashboard;
- }
-
- public CreateDashboardRequest setWarehouseId(String warehouseId) {
- this.warehouseId = warehouseId;
+ public CreateDashboardRequest setDashboard(Dashboard dashboard) {
+ this.dashboard = dashboard;
return this;
}
- public String getWarehouseId() {
- return warehouseId;
+ public Dashboard getDashboard() {
+ return dashboard;
}
@Override
@@ -76,24 +28,16 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateDashboardRequest that = (CreateDashboardRequest) o;
- return Objects.equals(displayName, that.displayName)
- && Objects.equals(parentPath, that.parentPath)
- && Objects.equals(serializedDashboard, that.serializedDashboard)
- && Objects.equals(warehouseId, that.warehouseId);
+ return Objects.equals(dashboard, that.dashboard);
}
@Override
public int hashCode() {
- return Objects.hash(displayName, parentPath, serializedDashboard, warehouseId);
+ return Objects.hash(dashboard);
}
@Override
public String toString() {
- return new ToStringer(CreateDashboardRequest.class)
- .add("displayName", displayName)
- .add("parentPath", parentPath)
- .add("serializedDashboard", serializedDashboard)
- .add("warehouseId", warehouseId)
- .toString();
+ return new ToStringer(CreateDashboardRequest.class).add("dashboard", dashboard).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
index 8e1d57167..1c364865f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java
@@ -8,31 +8,15 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create dashboard schedule */
@Generated
public class CreateScheduleRequest {
- /** The cron expression describing the frequency of the periodic refresh for this schedule. */
- @JsonProperty("cron_schedule")
- private CronSchedule cronSchedule;
-
/** UUID identifying the dashboard to which the schedule belongs. */
@JsonIgnore private String dashboardId;
- /** The display name for schedule. */
- @JsonProperty("display_name")
- private String displayName;
-
- /** The status indicates whether this schedule is paused or not. */
- @JsonProperty("pause_status")
- private SchedulePauseStatus pauseStatus;
-
- public CreateScheduleRequest setCronSchedule(CronSchedule cronSchedule) {
- this.cronSchedule = cronSchedule;
- return this;
- }
-
- public CronSchedule getCronSchedule() {
- return cronSchedule;
- }
+ /** */
+ @JsonProperty("schedule")
+ private Schedule schedule;
public CreateScheduleRequest setDashboardId(String dashboardId) {
this.dashboardId = dashboardId;
@@ -43,22 +27,13 @@ public String getDashboardId() {
return dashboardId;
}
- public CreateScheduleRequest setDisplayName(String displayName) {
- this.displayName = displayName;
- return this;
- }
-
- public String getDisplayName() {
- return displayName;
- }
-
- public CreateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) {
- this.pauseStatus = pauseStatus;
+ public CreateScheduleRequest setSchedule(Schedule schedule) {
+ this.schedule = schedule;
return this;
}
- public SchedulePauseStatus getPauseStatus() {
- return pauseStatus;
+ public Schedule getSchedule() {
+ return schedule;
}
@Override
@@ -66,24 +41,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateScheduleRequest that = (CreateScheduleRequest) o;
- return Objects.equals(cronSchedule, that.cronSchedule)
- && Objects.equals(dashboardId, that.dashboardId)
- && Objects.equals(displayName, that.displayName)
- && Objects.equals(pauseStatus, that.pauseStatus);
+ return Objects.equals(dashboardId, that.dashboardId) && Objects.equals(schedule, that.schedule);
}
@Override
public int hashCode() {
- return Objects.hash(cronSchedule, dashboardId, displayName, pauseStatus);
+ return Objects.hash(dashboardId, schedule);
}
@Override
public String toString() {
return new ToStringer(CreateScheduleRequest.class)
- .add("cronSchedule", cronSchedule)
.add("dashboardId", dashboardId)
- .add("displayName", displayName)
- .add("pauseStatus", pauseStatus)
+ .add("schedule", schedule)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
index 9ece761be..66ce04221 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java
@@ -8,6 +8,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create schedule subscription */
@Generated
public class CreateSubscriptionRequest {
/** UUID identifying the dashboard to which the subscription belongs. */
@@ -16,9 +17,9 @@ public class CreateSubscriptionRequest {
/** UUID identifying the schedule to which the subscription belongs. */
@JsonIgnore private String scheduleId;
- /** Subscriber details for users and destinations to be added as subscribers to the schedule. */
- @JsonProperty("subscriber")
- private Subscriber subscriber;
+ /** */
+ @JsonProperty("subscription")
+ private Subscription subscription;
public CreateSubscriptionRequest setDashboardId(String dashboardId) {
this.dashboardId = dashboardId;
@@ -38,13 +39,13 @@ public String getScheduleId() {
return scheduleId;
}
- public CreateSubscriptionRequest setSubscriber(Subscriber subscriber) {
- this.subscriber = subscriber;
+ public CreateSubscriptionRequest setSubscription(Subscription subscription) {
+ this.subscription = subscription;
return this;
}
- public Subscriber getSubscriber() {
- return subscriber;
+ public Subscription getSubscription() {
+ return subscription;
}
@Override
@@ -54,12 +55,12 @@ public boolean equals(Object o) {
CreateSubscriptionRequest that = (CreateSubscriptionRequest) o;
return Objects.equals(dashboardId, that.dashboardId)
&& Objects.equals(scheduleId, that.scheduleId)
- && Objects.equals(subscriber, that.subscriber);
+ && Objects.equals(subscription, that.subscription);
}
@Override
public int hashCode() {
- return Objects.hash(dashboardId, scheduleId, subscriber);
+ return Objects.hash(dashboardId, scheduleId, subscription);
}
@Override
@@ -67,7 +68,7 @@ public String toString() {
return new ToStringer(CreateSubscriptionRequest.class)
.add("dashboardId", dashboardId)
.add("scheduleId", scheduleId)
- .add("subscriber", subscriber)
+ .add("subscription", subscription)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index c81ff268b..4b8dd23c6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -124,7 +124,7 @@ public Wait Execute the SQL query in the message.
*/
public GenieGetMessageQueryResultResponse executeMessageQuery(
- ExecuteMessageQueryRequest request) {
+ GenieExecuteMessageQueryRequest request) {
return impl.executeMessageQuery(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java
similarity index 78%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java
index 9c47c9e63..4ad41a28b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteMessageQueryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java
@@ -9,7 +9,7 @@
/** Execute SQL query in a conversation message */
@Generated
-public class ExecuteMessageQueryRequest {
+public class GenieExecuteMessageQueryRequest {
/** Conversation ID */
@JsonIgnore private String conversationId;
@@ -19,7 +19,7 @@ public class ExecuteMessageQueryRequest {
/** Genie space ID */
@JsonIgnore private String spaceId;
- public ExecuteMessageQueryRequest setConversationId(String conversationId) {
+ public GenieExecuteMessageQueryRequest setConversationId(String conversationId) {
this.conversationId = conversationId;
return this;
}
@@ -28,7 +28,7 @@ public String getConversationId() {
return conversationId;
}
- public ExecuteMessageQueryRequest setMessageId(String messageId) {
+ public GenieExecuteMessageQueryRequest setMessageId(String messageId) {
this.messageId = messageId;
return this;
}
@@ -37,7 +37,7 @@ public String getMessageId() {
return messageId;
}
- public ExecuteMessageQueryRequest setSpaceId(String spaceId) {
+ public GenieExecuteMessageQueryRequest setSpaceId(String spaceId) {
this.spaceId = spaceId;
return this;
}
@@ -50,7 +50,7 @@ public String getSpaceId() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- ExecuteMessageQueryRequest that = (ExecuteMessageQueryRequest) o;
+ GenieExecuteMessageQueryRequest that = (GenieExecuteMessageQueryRequest) o;
return Objects.equals(conversationId, that.conversationId)
&& Objects.equals(messageId, that.messageId)
&& Objects.equals(spaceId, that.spaceId);
@@ -63,7 +63,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(ExecuteMessageQueryRequest.class)
+ return new ToStringer(GenieExecuteMessageQueryRequest.class)
.add("conversationId", conversationId)
.add("messageId", messageId)
.add("spaceId", spaceId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index 7a282ccd9..fc0c9236f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -29,7 +29,7 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request)
@Override
public GenieGetMessageQueryResultResponse executeMessageQuery(
- ExecuteMessageQueryRequest request) {
+ GenieExecuteMessageQueryRequest request) {
String path =
String.format(
"/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/execute-query",
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index 325c0df27..5dad69dc0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -30,7 +30,7 @@ GenieMessage createMessage(
* Execute the SQL query in the message.
*/
GenieGetMessageQueryResultResponse executeMessageQuery(
- ExecuteMessageQueryRequest executeMessageQueryRequest);
+ GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest);
/**
* Get conversation message.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
index 8f7f66050..6f4978b04 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -27,10 +27,6 @@ public LakeviewAPI(LakeviewService mock) {
impl = mock;
}
- public Dashboard create(String displayName) {
- return create(new CreateDashboardRequest().setDisplayName(displayName));
- }
-
/**
* Create dashboard.
*
@@ -40,9 +36,8 @@ public Dashboard create(CreateDashboardRequest request) {
return impl.create(request);
}
- public Schedule createSchedule(String dashboardId, CronSchedule cronSchedule) {
- return createSchedule(
- new CreateScheduleRequest().setDashboardId(dashboardId).setCronSchedule(cronSchedule));
+ public Schedule createSchedule(String dashboardId) {
+ return createSchedule(new CreateScheduleRequest().setDashboardId(dashboardId));
}
/** Create dashboard schedule. */
@@ -50,13 +45,9 @@ public Schedule createSchedule(CreateScheduleRequest request) {
return impl.createSchedule(request);
}
- public Subscription createSubscription(
- String dashboardId, String scheduleId, Subscriber subscriber) {
+ public Subscription createSubscription(String dashboardId, String scheduleId) {
return createSubscription(
- new CreateSubscriptionRequest()
- .setDashboardId(dashboardId)
- .setScheduleId(scheduleId)
- .setSubscriber(subscriber));
+ new CreateSubscriptionRequest().setDashboardId(dashboardId).setScheduleId(scheduleId));
}
/** Create schedule subscription. */
@@ -256,12 +247,9 @@ public Dashboard update(UpdateDashboardRequest request) {
return impl.update(request);
}
- public Schedule updateSchedule(String dashboardId, String scheduleId, CronSchedule cronSchedule) {
+ public Schedule updateSchedule(String dashboardId, String scheduleId) {
return updateSchedule(
- new UpdateScheduleRequest()
- .setDashboardId(dashboardId)
- .setScheduleId(scheduleId)
- .setCronSchedule(cronSchedule));
+ new UpdateScheduleRequest().setDashboardId(dashboardId).setScheduleId(scheduleId));
}
/** Update dashboard schedule. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
index f6b468526..3d9689b2d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java
@@ -21,7 +21,7 @@ public Dashboard create(CreateDashboardRequest request) {
Map [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
- */
- @JsonProperty("serialized_dashboard")
- private String serializedDashboard;
+ public UpdateDashboardRequest setDashboard(Dashboard dashboard) {
+ this.dashboard = dashboard;
+ return this;
+ }
- /** The warehouse ID used to run the dashboard. */
- @JsonProperty("warehouse_id")
- private String warehouseId;
+ public Dashboard getDashboard() {
+ return dashboard;
+ }
public UpdateDashboardRequest setDashboardId(String dashboardId) {
this.dashboardId = dashboardId;
@@ -48,67 +36,25 @@ public String getDashboardId() {
return dashboardId;
}
- public UpdateDashboardRequest setDisplayName(String displayName) {
- this.displayName = displayName;
- return this;
- }
-
- public String getDisplayName() {
- return displayName;
- }
-
- public UpdateDashboardRequest setEtag(String etag) {
- this.etag = etag;
- return this;
- }
-
- public String getEtag() {
- return etag;
- }
-
- public UpdateDashboardRequest setSerializedDashboard(String serializedDashboard) {
- this.serializedDashboard = serializedDashboard;
- return this;
- }
-
- public String getSerializedDashboard() {
- return serializedDashboard;
- }
-
- public UpdateDashboardRequest setWarehouseId(String warehouseId) {
- this.warehouseId = warehouseId;
- return this;
- }
-
- public String getWarehouseId() {
- return warehouseId;
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateDashboardRequest that = (UpdateDashboardRequest) o;
- return Objects.equals(dashboardId, that.dashboardId)
- && Objects.equals(displayName, that.displayName)
- && Objects.equals(etag, that.etag)
- && Objects.equals(serializedDashboard, that.serializedDashboard)
- && Objects.equals(warehouseId, that.warehouseId);
+ return Objects.equals(dashboard, that.dashboard)
+ && Objects.equals(dashboardId, that.dashboardId);
}
@Override
public int hashCode() {
- return Objects.hash(dashboardId, displayName, etag, serializedDashboard, warehouseId);
+ return Objects.hash(dashboard, dashboardId);
}
@Override
public String toString() {
return new ToStringer(UpdateDashboardRequest.class)
+ .add("dashboard", dashboard)
.add("dashboardId", dashboardId)
- .add("displayName", displayName)
- .add("etag", etag)
- .add("serializedDashboard", serializedDashboard)
- .add("warehouseId", warehouseId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
index cfc48f115..cffdc6370 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java
@@ -8,43 +8,19 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Update dashboard schedule */
@Generated
public class UpdateScheduleRequest {
- /** The cron expression describing the frequency of the periodic refresh for this schedule. */
- @JsonProperty("cron_schedule")
- private CronSchedule cronSchedule;
-
/** UUID identifying the dashboard to which the schedule belongs. */
@JsonIgnore private String dashboardId;
- /** The display name for schedule. */
- @JsonProperty("display_name")
- private String displayName;
-
- /**
- * The etag for the schedule. Must be left empty on create, must be provided on updates to ensure
- * that the schedule has not been modified since the last read, and can be optionally provided on
- * delete.
- */
- @JsonProperty("etag")
- private String etag;
-
- /** The status indicates whether this schedule is paused or not. */
- @JsonProperty("pause_status")
- private SchedulePauseStatus pauseStatus;
+ /** */
+ @JsonProperty("schedule")
+ private Schedule schedule;
/** UUID identifying the schedule. */
@JsonIgnore private String scheduleId;
- public UpdateScheduleRequest setCronSchedule(CronSchedule cronSchedule) {
- this.cronSchedule = cronSchedule;
- return this;
- }
-
- public CronSchedule getCronSchedule() {
- return cronSchedule;
- }
-
public UpdateScheduleRequest setDashboardId(String dashboardId) {
this.dashboardId = dashboardId;
return this;
@@ -54,31 +30,13 @@ public String getDashboardId() {
return dashboardId;
}
- public UpdateScheduleRequest setDisplayName(String displayName) {
- this.displayName = displayName;
- return this;
- }
-
- public String getDisplayName() {
- return displayName;
- }
-
- public UpdateScheduleRequest setEtag(String etag) {
- this.etag = etag;
- return this;
- }
-
- public String getEtag() {
- return etag;
- }
-
- public UpdateScheduleRequest setPauseStatus(SchedulePauseStatus pauseStatus) {
- this.pauseStatus = pauseStatus;
+ public UpdateScheduleRequest setSchedule(Schedule schedule) {
+ this.schedule = schedule;
return this;
}
- public SchedulePauseStatus getPauseStatus() {
- return pauseStatus;
+ public Schedule getSchedule() {
+ return schedule;
}
public UpdateScheduleRequest setScheduleId(String scheduleId) {
@@ -95,27 +53,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateScheduleRequest that = (UpdateScheduleRequest) o;
- return Objects.equals(cronSchedule, that.cronSchedule)
- && Objects.equals(dashboardId, that.dashboardId)
- && Objects.equals(displayName, that.displayName)
- && Objects.equals(etag, that.etag)
- && Objects.equals(pauseStatus, that.pauseStatus)
+ return Objects.equals(dashboardId, that.dashboardId)
+ && Objects.equals(schedule, that.schedule)
&& Objects.equals(scheduleId, that.scheduleId);
}
@Override
public int hashCode() {
- return Objects.hash(cronSchedule, dashboardId, displayName, etag, pauseStatus, scheduleId);
+ return Objects.hash(dashboardId, schedule, scheduleId);
}
@Override
public String toString() {
return new ToStringer(UpdateScheduleRequest.class)
- .add("cronSchedule", cronSchedule)
.add("dashboardId", dashboardId)
- .add("displayName", displayName)
- .add("etag", etag)
- .add("pauseStatus", pauseStatus)
+ .add("schedule", schedule)
.add("scheduleId", scheduleId)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
index 2eb2601d9..521824587 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java
@@ -118,8 +118,9 @@ public ObjectPermissions set(String requestObjectType, String requestObjectId) {
/**
* Set object permissions.
*
- * Sets permissions on an object. Objects can inherit permissions from their parent objects or
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their parent
+ * objects or root object.
*/
public ObjectPermissions set(PermissionsRequest request) {
return impl.set(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
index e641809ea..03ccea94e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java
@@ -80,8 +80,9 @@ GetPermissionLevelsResponse getPermissionLevels(
/**
* Set object permissions.
*
- * Sets permissions on an object. Objects can inherit permissions from their parent objects or
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their parent
+ * objects or root object.
*/
ObjectPermissions set(PermissionsRequest permissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java
index e2764fd02..d079aba02 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java
@@ -132,7 +132,9 @@ public void patch(PartialUpdate request) {
/**
* Set password permissions.
*
- * Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public PasswordPermissions setPermissions(PasswordPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java
index f922d8310..83d9421eb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.java
@@ -76,7 +76,9 @@ public interface UsersService {
/**
* Set password permissions.
*
- * Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
PasswordPermissions setPermissions(PasswordPermissionsRequest passwordPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java
index 88691b4d7..bf870d3f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunRequest.java
@@ -22,8 +22,8 @@ public class GetRunRequest {
private Boolean includeResolvedValues;
/**
- * To list the next page or the previous page of job tasks, set this field to the value of the
- * `next_page_token` or `prev_page_token` returned in the GetJob response.
+ * To list the next page of job tasks, set this field to the value of the `next_page_token`
+ * returned in the GetJob response.
*/
@JsonIgnore
@QueryParam("page_token")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
index a5ec8c8b3..5b0ce638a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
@@ -341,7 +341,9 @@ public JobPermissions setPermissions(String jobId) {
/**
* Set job permissions.
*
- * Sets permissions on a job. Jobs can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public JobPermissions setPermissions(JobPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
index 57433b9a8..46696459b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
@@ -156,7 +156,9 @@ GetJobPermissionLevelsResponse getPermissionLevels(
/**
* Set job permissions.
*
- * Sets permissions on a job. Jobs can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
JobPermissions setPermissions(JobPermissionsRequest jobPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
index d79a76992..7fb9ace32 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
@@ -25,8 +25,9 @@ public class RepairRun {
* cannot be specified in conjunction with notebook_params. The JSON representation of this field
* (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- * Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters
- * containing information about job runs.
+ * Use [Task parameter variables] to set parameters containing information about job runs.
+ *
+ * [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
*/
@JsonProperty("jar_params")
private Collection Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters
- * containing information about job runs.
+ * Use [Task parameter variables] to set parameters containing information about job runs.
+ *
+ * [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
*/
@JsonProperty("jar_params")
private Collection Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters
- * containing information about job runs.
+ * Use [Task parameter variables] to set parameters containing information about job runs.
+ *
+ * [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
*/
@JsonProperty("jar_params")
private Collection Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters
- * containing information about job runs.
+ * Use [Task parameter variables] to set parameters containing information about job runs.
+ *
+ * [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
*/
@JsonProperty("jar_params")
private Collection In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead,
* use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -228,7 +231,10 @@ public class RunTask {
@JsonProperty("spark_submit_task")
private SparkSubmitTask sparkSubmitTask;
- /** If sql_task, indicates that this job must execute a SQL task. */
+ /**
+ * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+ * the `sql_task` field is present.
+ */
@JsonProperty("sql_task")
private SqlTask sqlTask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
index 6f8a12c10..be1e79187 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java
@@ -11,16 +11,16 @@
@Generated
public class SubmitTask {
/**
- * If condition_task, specifies a condition with an outcome that can be used to control the
- * execution of other tasks. Does not require a cluster to execute and does not support retries or
- * notifications.
+ * The task evaluates a condition that can be used to control the execution of other tasks when
+ * the `condition_task` field is present. The condition task does not require a cluster to execute
+ * and does not support retries or notifications.
*/
@JsonProperty("condition_task")
private ConditionTask conditionTask;
/**
- * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
- * the ability to use a serverless or a pro SQL warehouse.
+ * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+ * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.
*/
@JsonProperty("dbt_task")
private DbtTask dbtTask;
@@ -59,7 +59,10 @@ public class SubmitTask {
@JsonProperty("existing_cluster_id")
private String existingClusterId;
- /** If for_each_task, indicates that this task must execute the nested task within it. */
+ /**
+ * The task executes a nested task for every input provided when the `for_each_task` field is
+ * present.
+ */
@JsonProperty("for_each_task")
private ForEachTask forEachTask;
@@ -78,10 +81,7 @@ public class SubmitTask {
@JsonProperty("new_cluster")
private com.databricks.sdk.service.compute.ClusterSpec newCluster;
- /**
- * If notebook_task, indicates that this task must run a notebook. This field may not be specified
- * in conjunction with spark_jar_task.
- */
+ /** The task runs a notebook when the `notebook_task` field is present. */
@JsonProperty("notebook_task")
private NotebookTask notebookTask;
@@ -92,11 +92,14 @@ public class SubmitTask {
@JsonProperty("notification_settings")
private TaskNotificationSettings notificationSettings;
- /** If pipeline_task, indicates that this task must execute a Pipeline. */
+ /**
+ * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+ * configured to use triggered more are supported.
+ */
@JsonProperty("pipeline_task")
private PipelineTask pipelineTask;
- /** If python_wheel_task, indicates that this job must execute a PythonWheel. */
+ /** The task runs a Python wheel when the `python_wheel_task` field is present. */
@JsonProperty("python_wheel_task")
private PythonWheelTask pythonWheelTask;
@@ -108,21 +111,21 @@ public class SubmitTask {
@JsonProperty("run_if")
private RunIf runIf;
- /** If run_job_task, indicates that this task must execute another job. */
+ /** The task triggers another job when the `run_job_task` field is present. */
@JsonProperty("run_job_task")
private RunJobTask runJobTask;
- /** If spark_jar_task, indicates that this task must run a JAR. */
+ /** The task runs a JAR when the `spark_jar_task` field is present. */
@JsonProperty("spark_jar_task")
private SparkJarTask sparkJarTask;
- /** If spark_python_task, indicates that this task must run a Python file. */
+ /** The task runs a Python file when the `spark_python_task` field is present. */
@JsonProperty("spark_python_task")
private SparkPythonTask sparkPythonTask;
/**
- * If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
- * This task can run only on new clusters.
+ * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+ * This task can run only on new clusters and is not compatible with serverless compute.
*
* In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead,
* use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -140,7 +143,10 @@ public class SubmitTask {
@JsonProperty("spark_submit_task")
private SparkSubmitTask sparkSubmitTask;
- /** If sql_task, indicates that this job must execute a SQL task. */
+ /**
+ * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+ * the `sql_task` field is present.
+ */
@JsonProperty("sql_task")
private SqlTask sqlTask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
index 0f9026396..011b3ee30 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java
@@ -11,16 +11,16 @@
@Generated
public class Task {
/**
- * If condition_task, specifies a condition with an outcome that can be used to control the
- * execution of other tasks. Does not require a cluster to execute and does not support retries or
- * notifications.
+ * The task evaluates a condition that can be used to control the execution of other tasks when
+ * the `condition_task` field is present. The condition task does not require a cluster to execute
+ * and does not support retries or notifications.
*/
@JsonProperty("condition_task")
private ConditionTask conditionTask;
/**
- * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
- * the ability to use a serverless or a pro SQL warehouse.
+ * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+ * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.
*/
@JsonProperty("dbt_task")
private DbtTask dbtTask;
@@ -64,7 +64,10 @@ public class Task {
@JsonProperty("existing_cluster_id")
private String existingClusterId;
- /** If for_each_task, indicates that this task must execute the nested task within it. */
+ /**
+ * The task executes a nested task for every input provided when the `for_each_task` field is
+ * present.
+ */
@JsonProperty("for_each_task")
private ForEachTask forEachTask;
@@ -106,10 +109,7 @@ public class Task {
@JsonProperty("new_cluster")
private com.databricks.sdk.service.compute.ClusterSpec newCluster;
- /**
- * If notebook_task, indicates that this task must run a notebook. This field may not be specified
- * in conjunction with spark_jar_task.
- */
+ /** The task runs a notebook when the `notebook_task` field is present. */
@JsonProperty("notebook_task")
private NotebookTask notebookTask;
@@ -120,11 +120,14 @@ public class Task {
@JsonProperty("notification_settings")
private TaskNotificationSettings notificationSettings;
- /** If pipeline_task, indicates that this task must execute a Pipeline. */
+ /**
+ * The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+ * configured to use triggered more are supported.
+ */
@JsonProperty("pipeline_task")
private PipelineTask pipelineTask;
- /** If python_wheel_task, indicates that this job must execute a PythonWheel. */
+ /** The task runs a Python wheel when the `python_wheel_task` field is present. */
@JsonProperty("python_wheel_task")
private PythonWheelTask pythonWheelTask;
@@ -148,21 +151,21 @@ public class Task {
@JsonProperty("run_if")
private RunIf runIf;
- /** If run_job_task, indicates that this task must execute another job. */
+ /** The task triggers another job when the `run_job_task` field is present. */
@JsonProperty("run_job_task")
private RunJobTask runJobTask;
- /** If spark_jar_task, indicates that this task must run a JAR. */
+ /** The task runs a JAR when the `spark_jar_task` field is present. */
@JsonProperty("spark_jar_task")
private SparkJarTask sparkJarTask;
- /** If spark_python_task, indicates that this task must run a Python file. */
+ /** The task runs a Python file when the `spark_python_task` field is present. */
@JsonProperty("spark_python_task")
private SparkPythonTask sparkPythonTask;
/**
- * If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
- * This task can run only on new clusters.
+ * (Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+ * This task can run only on new clusters and is not compatible with serverless compute.
*
* In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead,
* use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -180,7 +183,10 @@ public class Task {
@JsonProperty("spark_submit_task")
private SparkSubmitTask sparkSubmitTask;
- /** If sql_task, indicates that this job must execute a SQL task. */
+ /**
+ * The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+ * the `sql_task` field is present.
+ */
@JsonProperty("sql_task")
private SqlTask sqlTask;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
index e37118d9e..699094949 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java
@@ -11,4 +11,5 @@ public enum AssetType {
ASSET_TYPE_MEDIA,
ASSET_TYPE_MODEL,
ASSET_TYPE_NOTEBOOK,
+ ASSET_TYPE_PARTNER_INTEGRATION,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
index 7f3923351..8c4c27e8c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
@@ -463,7 +463,8 @@ public ExperimentPermissions setPermissions(String experimentId) {
/**
* Set experiment permissions.
*
- * Sets permissions on an experiment. Experiments can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
index c4a641627..323c848c4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
@@ -274,7 +274,8 @@ ExperimentPermissions getPermissions(
/**
* Set experiment permissions.
*
- * Sets permissions on an experiment. Experiments can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
ExperimentPermissions setPermissions(ExperimentPermissionsRequest experimentPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
index e70d42047..9c4325eb9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java
@@ -478,8 +478,9 @@ public RegisteredModelPermissions setPermissions(String registeredModelId) {
/**
* Set registered model permissions.
*
- * Sets permissions on a registered model. Registered models can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public RegisteredModelPermissions setPermissions(RegisteredModelPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
index bf3535616..3f9dfae1a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryService.java
@@ -241,8 +241,9 @@ SearchModelVersionsResponse searchModelVersions(
/**
* Set registered model permissions.
*
- * Sets permissions on a registered model. Registered models can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
RegisteredModelPermissions setPermissions(
RegisteredModelPermissionsRequest registeredModelPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java
index 3ac7226b8..816fb09bb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsRequest.java
@@ -3,6 +3,7 @@
package com.databricks.sdk.service.oauth2;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
@@ -10,9 +11,31 @@
/** List service principal secrets */
@Generated
public class ListServicePrincipalSecretsRequest {
+ /**
+ * An opaque page token which was the `next_page_token` in the response of the previous request to
+ * list the secrets for this service principal. Provide this token to retrieve the next page of
+ * secret entries. When providing a `page_token`, all other parameters provided to the request
+ * must match the previous request. To list all of the secrets for a service principal, it is
+ * necessary to continue requesting pages of entries until the response contains no
+ * `next_page_token`. Note that the number of entries returned must not be used to determine when
+ * the listing is complete.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
/** The service principal ID. */
@JsonIgnore private Long servicePrincipalId;
+ public ListServicePrincipalSecretsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
public ListServicePrincipalSecretsRequest setServicePrincipalId(Long servicePrincipalId) {
this.servicePrincipalId = servicePrincipalId;
return this;
@@ -27,17 +50,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ListServicePrincipalSecretsRequest that = (ListServicePrincipalSecretsRequest) o;
- return Objects.equals(servicePrincipalId, that.servicePrincipalId);
+ return Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(servicePrincipalId, that.servicePrincipalId);
}
@Override
public int hashCode() {
- return Objects.hash(servicePrincipalId);
+ return Objects.hash(pageToken, servicePrincipalId);
}
@Override
public String toString() {
return new ToStringer(ListServicePrincipalSecretsRequest.class)
+ .add("pageToken", pageToken)
.add("servicePrincipalId", servicePrincipalId)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java
index 1b0d38c8c..dd971e938 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ListServicePrincipalSecretsResponse.java
@@ -10,10 +10,23 @@
@Generated
public class ListServicePrincipalSecretsResponse {
+ /** A token, which can be sent as `page_token` to retrieve the next page. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
/** List of the secrets */
@JsonProperty("secrets")
private Collection Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public PipelinePermissions setPermissions(PipelinePermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
index 127bbb06d..332eabdcf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
@@ -90,7 +90,9 @@ ListPipelineEventsResponse listPipelineEvents(
/**
* Set pipeline permissions.
*
- * Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
PipelinePermissions setPermissions(PipelinePermissionsRequest pipelinePermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java
new file mode 100755
index 000000000..3156277a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java
@@ -0,0 +1,84 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class RestartWindow {
+ /**
+ * Days of week in which the restart is allowed to happen (within a five-hour window starting at
+ * start_hour). If not specified all days of the week will be used.
+ */
+ @JsonProperty("days_of_week")
+ private RestartWindowDaysOfWeek daysOfWeek;
+
+ /**
+ * An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
+ * Continuous pipeline restart is triggered only within a five-hour window starting at this hour.
+ */
+ @JsonProperty("start_hour")
+ private Long startHour;
+
+ /**
+ * Time zone id of restart window. See
+ * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
+ * for details. If not specified, UTC will be used.
+ */
+ @JsonProperty("time_zone_id")
+ private String timeZoneId;
+
+ public RestartWindow setDaysOfWeek(RestartWindowDaysOfWeek daysOfWeek) {
+ this.daysOfWeek = daysOfWeek;
+ return this;
+ }
+
+ public RestartWindowDaysOfWeek getDaysOfWeek() {
+ return daysOfWeek;
+ }
+
+ public RestartWindow setStartHour(Long startHour) {
+ this.startHour = startHour;
+ return this;
+ }
+
+ public Long getStartHour() {
+ return startHour;
+ }
+
+ public RestartWindow setTimeZoneId(String timeZoneId) {
+ this.timeZoneId = timeZoneId;
+ return this;
+ }
+
+ public String getTimeZoneId() {
+ return timeZoneId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RestartWindow that = (RestartWindow) o;
+ return Objects.equals(daysOfWeek, that.daysOfWeek)
+ && Objects.equals(startHour, that.startHour)
+ && Objects.equals(timeZoneId, that.timeZoneId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(daysOfWeek, startHour, timeZoneId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestartWindow.class)
+ .add("daysOfWeek", daysOfWeek)
+ .add("startHour", startHour)
+ .add("timeZoneId", timeZoneId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java
new file mode 100755
index 000000000..37bf738a0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindowDaysOfWeek.java
@@ -0,0 +1,20 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Days of week in which the restart is allowed to happen (within a five-hour window starting at
+ * start_hour). If not specified all days of the week will be used.
+ */
+@Generated
+public enum RestartWindowDaysOfWeek {
+ FRIDAY,
+ MONDAY,
+ SATURDAY,
+ SUNDAY,
+ THURSDAY,
+ TUESDAY,
+ WEDNESDAY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
index 1d928eaad..2cc6ec80a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
@@ -93,6 +93,10 @@ public class CreateWorkspaceRequest {
@JsonProperty("gke_config")
private GkeConfig gkeConfig;
+ /** Whether no public IP is enabled for the workspace. */
+ @JsonProperty("is_no_public_ip_enabled")
+ private Boolean isNoPublicIpEnabled;
+
/**
* The Google Cloud region of the workspace data plane in your Google account. For example,
* `us-east4`.
@@ -225,6 +229,15 @@ public GkeConfig getGkeConfig() {
return gkeConfig;
}
+ public CreateWorkspaceRequest setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) {
+ this.isNoPublicIpEnabled = isNoPublicIpEnabled;
+ return this;
+ }
+
+ public Boolean getIsNoPublicIpEnabled() {
+ return isNoPublicIpEnabled;
+ }
+
public CreateWorkspaceRequest setLocation(String location) {
this.location = location;
return this;
@@ -311,6 +324,7 @@ public boolean equals(Object o) {
&& Objects.equals(deploymentName, that.deploymentName)
&& Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig)
&& Objects.equals(gkeConfig, that.gkeConfig)
+ && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled)
&& Objects.equals(location, that.location)
&& Objects.equals(
managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId)
@@ -333,6 +347,7 @@ public int hashCode() {
deploymentName,
gcpManagedNetworkConfig,
gkeConfig,
+ isNoPublicIpEnabled,
location,
managedServicesCustomerManagedKeyId,
networkId,
@@ -354,6 +369,7 @@ public String toString() {
.add("deploymentName", deploymentName)
.add("gcpManagedNetworkConfig", gcpManagedNetworkConfig)
.add("gkeConfig", gkeConfig)
+ .add("isNoPublicIpEnabled", isNoPublicIpEnabled)
.add("location", location)
.add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId)
.add("networkId", networkId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java
new file mode 100755
index 000000000..7654c68e7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/ExternalCustomerInfo.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.provisioning;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ExternalCustomerInfo {
+ /** Email of the authoritative user. */
+ @JsonProperty("authoritative_user_email")
+ private String authoritativeUserEmail;
+
+ /** The authoritative user full name. */
+ @JsonProperty("authoritative_user_full_name")
+ private String authoritativeUserFullName;
+
+ /** The legal entity name for the external workspace */
+ @JsonProperty("customer_name")
+ private String customerName;
+
+ public ExternalCustomerInfo setAuthoritativeUserEmail(String authoritativeUserEmail) {
+ this.authoritativeUserEmail = authoritativeUserEmail;
+ return this;
+ }
+
+ public String getAuthoritativeUserEmail() {
+ return authoritativeUserEmail;
+ }
+
+ public ExternalCustomerInfo setAuthoritativeUserFullName(String authoritativeUserFullName) {
+ this.authoritativeUserFullName = authoritativeUserFullName;
+ return this;
+ }
+
+ public String getAuthoritativeUserFullName() {
+ return authoritativeUserFullName;
+ }
+
+ public ExternalCustomerInfo setCustomerName(String customerName) {
+ this.customerName = customerName;
+ return this;
+ }
+
+ public String getCustomerName() {
+ return customerName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExternalCustomerInfo that = (ExternalCustomerInfo) o;
+ return Objects.equals(authoritativeUserEmail, that.authoritativeUserEmail)
+ && Objects.equals(authoritativeUserFullName, that.authoritativeUserFullName)
+ && Objects.equals(customerName, that.customerName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(authoritativeUserEmail, authoritativeUserFullName, customerName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExternalCustomerInfo.class)
+ .add("authoritativeUserEmail", authoritativeUserEmail)
+ .add("authoritativeUserFullName", authoritativeUserFullName)
+ .add("customerName", customerName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java
index cfb8817af..a690adac9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java
@@ -52,6 +52,13 @@ public class UpdateWorkspaceRequest {
@JsonProperty("network_id")
private String networkId;
+ /**
+ * The ID of the workspace's private access settings configuration object. This parameter is
+ * available only for updating failed workspaces.
+ */
+ @JsonProperty("private_access_settings_id")
+ private String privateAccessSettingsId;
+
/**
* The ID of the workspace's storage configuration object. This parameter is available only for
* updating failed workspaces.
@@ -124,6 +131,15 @@ public String getNetworkId() {
return networkId;
}
+ public UpdateWorkspaceRequest setPrivateAccessSettingsId(String privateAccessSettingsId) {
+ this.privateAccessSettingsId = privateAccessSettingsId;
+ return this;
+ }
+
+ public String getPrivateAccessSettingsId() {
+ return privateAccessSettingsId;
+ }
+
public UpdateWorkspaceRequest setStorageConfigurationId(String storageConfigurationId) {
this.storageConfigurationId = storageConfigurationId;
return this;
@@ -163,6 +179,7 @@ public boolean equals(Object o) {
managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId)
&& Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
&& Objects.equals(networkId, that.networkId)
+ && Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId)
&& Objects.equals(storageConfigurationId, that.storageConfigurationId)
&& Objects.equals(storageCustomerManagedKeyId, that.storageCustomerManagedKeyId)
&& Objects.equals(workspaceId, that.workspaceId);
@@ -177,6 +194,7 @@ public int hashCode() {
managedServicesCustomerManagedKeyId,
networkConnectivityConfigId,
networkId,
+ privateAccessSettingsId,
storageConfigurationId,
storageCustomerManagedKeyId,
workspaceId);
@@ -191,6 +209,7 @@ public String toString() {
.add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId)
.add("networkConnectivityConfigId", networkConnectivityConfigId)
.add("networkId", networkId)
+ .add("privateAccessSettingsId", privateAccessSettingsId)
.add("storageConfigurationId", storageConfigurationId)
.add("storageCustomerManagedKeyId", storageCustomerManagedKeyId)
.add("workspaceId", workspaceId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
index b6abc5a88..4d6b61c9d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
@@ -55,6 +55,13 @@ public class Workspace {
@JsonProperty("deployment_name")
private String deploymentName;
+ /**
+ * If this workspace is for a external customer, then external_customer_info is populated. If this
+ * workspace is not for a external customer, then external_customer_info is empty.
+ */
+ @JsonProperty("external_customer_info")
+ private ExternalCustomerInfo externalCustomerInfo;
+
/**
* The network settings for the workspace. The configurations are only for Databricks-managed
* VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the
@@ -83,6 +90,10 @@ public class Workspace {
@JsonProperty("gke_config")
private GkeConfig gkeConfig;
+ /** Whether no public IP is enabled for the workspace. */
+ @JsonProperty("is_no_public_ip_enabled")
+ private Boolean isNoPublicIpEnabled;
+
/**
* The Google Cloud region of the workspace data plane in your Google account (for example,
* `us-east4`).
@@ -231,6 +242,15 @@ public String getDeploymentName() {
return deploymentName;
}
+ public Workspace setExternalCustomerInfo(ExternalCustomerInfo externalCustomerInfo) {
+ this.externalCustomerInfo = externalCustomerInfo;
+ return this;
+ }
+
+ public ExternalCustomerInfo getExternalCustomerInfo() {
+ return externalCustomerInfo;
+ }
+
public Workspace setGcpManagedNetworkConfig(GcpManagedNetworkConfig gcpManagedNetworkConfig) {
this.gcpManagedNetworkConfig = gcpManagedNetworkConfig;
return this;
@@ -249,6 +269,15 @@ public GkeConfig getGkeConfig() {
return gkeConfig;
}
+ public Workspace setIsNoPublicIpEnabled(Boolean isNoPublicIpEnabled) {
+ this.isNoPublicIpEnabled = isNoPublicIpEnabled;
+ return this;
+ }
+
+ public Boolean getIsNoPublicIpEnabled() {
+ return isNoPublicIpEnabled;
+ }
+
public Workspace setLocation(String location) {
this.location = location;
return this;
@@ -363,8 +392,10 @@ public boolean equals(Object o) {
&& Objects.equals(credentialsId, that.credentialsId)
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(deploymentName, that.deploymentName)
+ && Objects.equals(externalCustomerInfo, that.externalCustomerInfo)
&& Objects.equals(gcpManagedNetworkConfig, that.gcpManagedNetworkConfig)
&& Objects.equals(gkeConfig, that.gkeConfig)
+ && Objects.equals(isNoPublicIpEnabled, that.isNoPublicIpEnabled)
&& Objects.equals(location, that.location)
&& Objects.equals(
managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId)
@@ -391,8 +422,10 @@ public int hashCode() {
credentialsId,
customTags,
deploymentName,
+ externalCustomerInfo,
gcpManagedNetworkConfig,
gkeConfig,
+ isNoPublicIpEnabled,
location,
managedServicesCustomerManagedKeyId,
networkId,
@@ -418,8 +451,10 @@ public String toString() {
.add("credentialsId", credentialsId)
.add("customTags", customTags)
.add("deploymentName", deploymentName)
+ .add("externalCustomerInfo", externalCustomerInfo)
.add("gcpManagedNetworkConfig", gcpManagedNetworkConfig)
.add("gkeConfig", gkeConfig)
+ .add("isNoPublicIpEnabled", isNoPublicIpEnabled)
.add("location", location)
.add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId)
.add("networkId", networkId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
index 5ef50b903..37c8b27b2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java
@@ -276,8 +276,9 @@ public ServingEndpointPermissions setPermissions(String servingEndpointId) {
/**
* Set serving endpoint permissions.
*
- * Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public ServingEndpointPermissions setPermissions(ServingEndpointPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
index f35cb2a7c..5a42d11ce 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java
@@ -114,8 +114,9 @@ ServingEndpointPermissions getPermissions(
/**
* Set serving endpoint permissions.
*
- * Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their
- * root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
ServingEndpointPermissions setPermissions(
ServingEndpointPermissionsRequest servingEndpointPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java
new file mode 100755
index 000000000..698c78634
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicy.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AibiDashboardEmbeddingAccessPolicy {
+ /** */
+ @JsonProperty("access_policy_type")
+ private AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType;
+
+ public AibiDashboardEmbeddingAccessPolicy setAccessPolicyType(
+ AibiDashboardEmbeddingAccessPolicyAccessPolicyType accessPolicyType) {
+ this.accessPolicyType = accessPolicyType;
+ return this;
+ }
+
+ public AibiDashboardEmbeddingAccessPolicyAccessPolicyType getAccessPolicyType() {
+ return accessPolicyType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AibiDashboardEmbeddingAccessPolicy that = (AibiDashboardEmbeddingAccessPolicy) o;
+ return Objects.equals(accessPolicyType, that.accessPolicyType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessPolicyType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AibiDashboardEmbeddingAccessPolicy.class)
+ .add("accessPolicyType", accessPolicyType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
new file mode 100755
index 000000000..35af0c8fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAPI.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or
+ * disabled at the workspace level. By default, this setting is conditionally enabled
+ * (ALLOW_APPROVED_DOMAINS).
+ */
+@Generated
+public class AibiDashboardEmbeddingAccessPolicyAPI {
+ private static final Logger LOG =
+ LoggerFactory.getLogger(AibiDashboardEmbeddingAccessPolicyAPI.class);
+
+ private final AibiDashboardEmbeddingAccessPolicyService impl;
+
+ /** Regular-use constructor */
+ public AibiDashboardEmbeddingAccessPolicyAPI(ApiClient apiClient) {
+ impl = new AibiDashboardEmbeddingAccessPolicyImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public AibiDashboardEmbeddingAccessPolicyAPI(AibiDashboardEmbeddingAccessPolicyService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Retrieve the AI/BI dashboard embedding access policy.
+ *
+ * Retrieves the AI/BI dashboard embedding access policy. The default setting is
+ * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains.
+ */
+ public AibiDashboardEmbeddingAccessPolicySetting get(
+ GetAibiDashboardEmbeddingAccessPolicySettingRequest request) {
+ return impl.get(request);
+ }
+
+ public AibiDashboardEmbeddingAccessPolicySetting update(
+ boolean allowMissing, AibiDashboardEmbeddingAccessPolicySetting setting, String fieldMask) {
+ return update(
+ new UpdateAibiDashboardEmbeddingAccessPolicySettingRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the AI/BI dashboard embedding access policy.
+ *
+ * Updates the AI/BI dashboard embedding access policy at the workspace level.
+ */
+ public AibiDashboardEmbeddingAccessPolicySetting update(
+ UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) {
+ return impl.update(request);
+ }
+
+ public AibiDashboardEmbeddingAccessPolicyService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java
new file mode 100755
index 000000000..7fc964b78
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyAccessPolicyType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum AibiDashboardEmbeddingAccessPolicyAccessPolicyType {
+ ALLOW_ALL_DOMAINS,
+ ALLOW_APPROVED_DOMAINS,
+ DENY_ALL_DOMAINS,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java
new file mode 100755
index 000000000..b27367992
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java
@@ -0,0 +1,36 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of AibiDashboardEmbeddingAccessPolicy */
+@Generated
+class AibiDashboardEmbeddingAccessPolicyImpl implements AibiDashboardEmbeddingAccessPolicyService {
+ private final ApiClient apiClient;
+
+ public AibiDashboardEmbeddingAccessPolicyImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public AibiDashboardEmbeddingAccessPolicySetting get(
+ GetAibiDashboardEmbeddingAccessPolicySettingRequest request) {
+ String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default";
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface AibiDashboardEmbeddingAccessPolicyService {
+ /**
+ * Retrieve the AI/BI dashboard embedding access policy.
+ *
+ * Retrieves the AI/BI dashboard embedding access policy. The default setting is
+ * ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains.
+ */
+ AibiDashboardEmbeddingAccessPolicySetting get(
+ GetAibiDashboardEmbeddingAccessPolicySettingRequest
+ getAibiDashboardEmbeddingAccessPolicySettingRequest);
+
+ /**
+ * Update the AI/BI dashboard embedding access policy.
+ *
+ * Updates the AI/BI dashboard embedding access policy at the workspace level.
+ */
+ AibiDashboardEmbeddingAccessPolicySetting update(
+ UpdateAibiDashboardEmbeddingAccessPolicySettingRequest
+ updateAibiDashboardEmbeddingAccessPolicySettingRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java
new file mode 100755
index 000000000..ead0e1b14
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicySetting.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AibiDashboardEmbeddingAccessPolicySetting {
+ /** */
+ @JsonProperty("aibi_dashboard_embedding_access_policy")
+ private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public AibiDashboardEmbeddingAccessPolicySetting setAibiDashboardEmbeddingAccessPolicy(
+ AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy) {
+ this.aibiDashboardEmbeddingAccessPolicy = aibiDashboardEmbeddingAccessPolicy;
+ return this;
+ }
+
+ public AibiDashboardEmbeddingAccessPolicy getAibiDashboardEmbeddingAccessPolicy() {
+ return aibiDashboardEmbeddingAccessPolicy;
+ }
+
+ public AibiDashboardEmbeddingAccessPolicySetting setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public AibiDashboardEmbeddingAccessPolicySetting setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AibiDashboardEmbeddingAccessPolicySetting that = (AibiDashboardEmbeddingAccessPolicySetting) o;
+ return Objects.equals(
+ aibiDashboardEmbeddingAccessPolicy, that.aibiDashboardEmbeddingAccessPolicy)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(aibiDashboardEmbeddingAccessPolicy, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AibiDashboardEmbeddingAccessPolicySetting.class)
+ .add("aibiDashboardEmbeddingAccessPolicy", aibiDashboardEmbeddingAccessPolicy)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java
new file mode 100755
index 000000000..ecfa50971
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomains.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class AibiDashboardEmbeddingApprovedDomains {
+ /** */
+ @JsonProperty("approved_domains")
+ private Collection Retrieves the list of domains approved to host embedded AI/BI dashboards.
+ */
+ public AibiDashboardEmbeddingApprovedDomainsSetting get(
+ GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) {
+ return impl.get(request);
+ }
+
+ public AibiDashboardEmbeddingApprovedDomainsSetting update(
+ boolean allowMissing,
+ AibiDashboardEmbeddingApprovedDomainsSetting setting,
+ String fieldMask) {
+ return update(
+ new UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the list of domains approved to host embedded AI/BI dashboards.
+ *
+ * Updates the list of domains approved to host embedded AI/BI dashboards. This update will
+ * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+ */
+ public AibiDashboardEmbeddingApprovedDomainsSetting update(
+ UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) {
+ return impl.update(request);
+ }
+
+ public AibiDashboardEmbeddingApprovedDomainsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java
new file mode 100755
index 000000000..e026484e4
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java
@@ -0,0 +1,39 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of AibiDashboardEmbeddingApprovedDomains */
+@Generated
+class AibiDashboardEmbeddingApprovedDomainsImpl
+ implements AibiDashboardEmbeddingApprovedDomainsService {
+ private final ApiClient apiClient;
+
+ public AibiDashboardEmbeddingApprovedDomainsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public AibiDashboardEmbeddingApprovedDomainsSetting get(
+ GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) {
+ String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default";
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface AibiDashboardEmbeddingApprovedDomainsService {
+ /**
+ * Retrieve the list of domains approved to host embedded AI/BI dashboards.
+ *
+ * Retrieves the list of domains approved to host embedded AI/BI dashboards.
+ */
+ AibiDashboardEmbeddingApprovedDomainsSetting get(
+ GetAibiDashboardEmbeddingApprovedDomainsSettingRequest
+ getAibiDashboardEmbeddingApprovedDomainsSettingRequest);
+
+ /**
+ * Update the list of domains approved to host embedded AI/BI dashboards.
+ *
+ * Updates the list of domains approved to host embedded AI/BI dashboards. This update will
+ * fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+ */
+ AibiDashboardEmbeddingApprovedDomainsSetting update(
+ UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest
+ updateAibiDashboardEmbeddingApprovedDomainsSettingRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java
new file mode 100755
index 000000000..14c060819
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsSetting.java
@@ -0,0 +1,89 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AibiDashboardEmbeddingApprovedDomainsSetting {
+ /** */
+ @JsonProperty("aibi_dashboard_embedding_approved_domains")
+ private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public AibiDashboardEmbeddingApprovedDomainsSetting setAibiDashboardEmbeddingApprovedDomains(
+ AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains) {
+ this.aibiDashboardEmbeddingApprovedDomains = aibiDashboardEmbeddingApprovedDomains;
+ return this;
+ }
+
+ public AibiDashboardEmbeddingApprovedDomains getAibiDashboardEmbeddingApprovedDomains() {
+ return aibiDashboardEmbeddingApprovedDomains;
+ }
+
+ public AibiDashboardEmbeddingApprovedDomainsSetting setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public AibiDashboardEmbeddingApprovedDomainsSetting setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AibiDashboardEmbeddingApprovedDomainsSetting that =
+ (AibiDashboardEmbeddingApprovedDomainsSetting) o;
+ return Objects.equals(
+ aibiDashboardEmbeddingApprovedDomains, that.aibiDashboardEmbeddingApprovedDomains)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(aibiDashboardEmbeddingApprovedDomains, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AibiDashboardEmbeddingApprovedDomainsSetting.class)
+ .add("aibiDashboardEmbeddingApprovedDomains", aibiDashboardEmbeddingApprovedDomains)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java
new file mode 100755
index 000000000..48a8c3910
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingAccessPolicySettingRequest.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Retrieve the AI/BI dashboard embedding access policy */
+@Generated
+public class GetAibiDashboardEmbeddingAccessPolicySettingRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetAibiDashboardEmbeddingAccessPolicySettingRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAibiDashboardEmbeddingAccessPolicySettingRequest that =
+ (GetAibiDashboardEmbeddingAccessPolicySettingRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAibiDashboardEmbeddingAccessPolicySettingRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
new file mode 100755
index 000000000..a9db24cfa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Retrieve the list of domains approved to host embedded AI/BI dashboards */
+@Generated
+public class GetAibiDashboardEmbeddingApprovedDomainsSettingRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetAibiDashboardEmbeddingApprovedDomainsSettingRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAibiDashboardEmbeddingApprovedDomainsSettingRequest that =
+ (GetAibiDashboardEmbeddingApprovedDomainsSettingRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAibiDashboardEmbeddingApprovedDomainsSettingRequest.class)
+ .add("etag", etag)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index e7f1c92a4..f1ba301de 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -13,6 +13,10 @@ public class SettingsAPI {
private final SettingsService impl;
+ private AibiDashboardEmbeddingAccessPolicyAPI aibiDashboardEmbeddingAccessPolicyAPI;
+
+ private AibiDashboardEmbeddingApprovedDomainsAPI aibiDashboardEmbeddingApprovedDomainsAPI;
+
private AutomaticClusterUpdateAPI automaticClusterUpdateAPI;
private ComplianceSecurityProfileAPI complianceSecurityProfileAPI;
@@ -31,6 +35,11 @@ public class SettingsAPI {
public SettingsAPI(ApiClient apiClient) {
impl = new SettingsImpl(apiClient);
+ aibiDashboardEmbeddingAccessPolicyAPI = new AibiDashboardEmbeddingAccessPolicyAPI(apiClient);
+
+ aibiDashboardEmbeddingApprovedDomainsAPI =
+ new AibiDashboardEmbeddingApprovedDomainsAPI(apiClient);
+
automaticClusterUpdateAPI = new AutomaticClusterUpdateAPI(apiClient);
complianceSecurityProfileAPI = new ComplianceSecurityProfileAPI(apiClient);
@@ -51,6 +60,19 @@ public SettingsAPI(SettingsService mock) {
impl = mock;
}
+ /**
+ * Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or
+ * disabled at the workspace level.
+ */
+ public AibiDashboardEmbeddingAccessPolicyAPI AibiDashboardEmbeddingAccessPolicy() {
+ return aibiDashboardEmbeddingAccessPolicyAPI;
+ }
+
+ /** Controls the list of domains approved to host the embedded AI/BI dashboards. */
+ public AibiDashboardEmbeddingApprovedDomainsAPI AibiDashboardEmbeddingApprovedDomains() {
+ return aibiDashboardEmbeddingApprovedDomainsAPI;
+ }
+
/** Controls whether automatic cluster update is enabled for the current workspace. */
public AutomaticClusterUpdateAPI AutomaticClusterUpdate() {
return automaticClusterUpdateAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
index 20c4a6bfa..08e227383 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
@@ -29,6 +29,10 @@ public class TokenInfo {
@JsonProperty("expiry_time")
private Long expiryTime;
+ /** Approximate timestamp for the day the token was last used. Accurate up to 1 day. */
+ @JsonProperty("last_used_day")
+ private Long lastUsedDay;
+
/** User ID of the user that owns the token. */
@JsonProperty("owner_id")
private Long ownerId;
@@ -86,6 +90,15 @@ public Long getExpiryTime() {
return expiryTime;
}
+ public TokenInfo setLastUsedDay(Long lastUsedDay) {
+ this.lastUsedDay = lastUsedDay;
+ return this;
+ }
+
+ public Long getLastUsedDay() {
+ return lastUsedDay;
+ }
+
public TokenInfo setOwnerId(Long ownerId) {
this.ownerId = ownerId;
return this;
@@ -123,6 +136,7 @@ public boolean equals(Object o) {
&& Objects.equals(createdByUsername, that.createdByUsername)
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(expiryTime, that.expiryTime)
+ && Objects.equals(lastUsedDay, that.lastUsedDay)
&& Objects.equals(ownerId, that.ownerId)
&& Objects.equals(tokenId, that.tokenId)
&& Objects.equals(workspaceId, that.workspaceId);
@@ -136,6 +150,7 @@ public int hashCode() {
createdByUsername,
creationTime,
expiryTime,
+ lastUsedDay,
ownerId,
tokenId,
workspaceId);
@@ -149,6 +164,7 @@ public String toString() {
.add("createdByUsername", createdByUsername)
.add("creationTime", creationTime)
.add("expiryTime", expiryTime)
+ .add("lastUsedDay", lastUsedDay)
.add("ownerId", ownerId)
.add("tokenId", tokenId)
.add("workspaceId", workspaceId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
index f105eef54..5e5311539 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java
@@ -97,7 +97,9 @@ public Iterable Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public TokenPermissions setPermissions(TokenPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java
index ab02b56ae..ad46b6abf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java
@@ -58,7 +58,9 @@ public interface TokenManagementService {
/**
* Set token permissions.
*
- * Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
TokenPermissions setPermissions(TokenPermissionsRequest tokenPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java
new file mode 100755
index 000000000..9e8a2ff89
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. The field mask needs to be supplied as single string.
+ * To specify multiple fields in the field mask, use comma as the separator (no space).
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private AibiDashboardEmbeddingAccessPolicySetting setting;
+
+ public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setAllowMissing(
+ Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateAibiDashboardEmbeddingAccessPolicySettingRequest setSetting(
+ AibiDashboardEmbeddingAccessPolicySetting setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public AibiDashboardEmbeddingAccessPolicySetting getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAibiDashboardEmbeddingAccessPolicySettingRequest that =
+ (UpdateAibiDashboardEmbeddingAccessPolicySettingRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAibiDashboardEmbeddingAccessPolicySettingRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
new file mode 100755
index 000000000..a3e7de0dd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. The field mask needs to be supplied as single string.
+ * To specify multiple fields in the field mask, use comma as the separator (no space).
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private AibiDashboardEmbeddingApprovedDomainsSetting setting;
+
+ public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setAllowMissing(
+ Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest setSetting(
+ AibiDashboardEmbeddingApprovedDomainsSetting setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public AibiDashboardEmbeddingApprovedDomainsSetting getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest that =
+ (UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java
deleted file mode 100755
index b9bc1f6b6..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CentralCleanRoomInfo.java
+++ /dev/null
@@ -1,109 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sharing;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
-import java.util.Objects;
-
-@Generated
-public class CentralCleanRoomInfo {
- /**
- * All assets from all collaborators that are available in the clean room. Only one of table_info
- * or notebook_info will be filled in.
- */
- @JsonProperty("clean_room_assets")
- private Collection To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM**
- * privilege.
- */
-@Generated
-public class CleanRoomsAPI {
- private static final Logger LOG = LoggerFactory.getLogger(CleanRoomsAPI.class);
-
- private final CleanRoomsService impl;
-
- /** Regular-use constructor */
- public CleanRoomsAPI(ApiClient apiClient) {
- impl = new CleanRoomsImpl(apiClient);
- }
-
- /** Constructor for mocks */
- public CleanRoomsAPI(CleanRoomsService mock) {
- impl = mock;
- }
-
- public CleanRoomInfo create(String name, CentralCleanRoomInfo remoteDetailedInfo) {
- return create(new CreateCleanRoom().setName(name).setRemoteDetailedInfo(remoteDetailedInfo));
- }
-
- /**
- * Create a clean room.
- *
- * Creates a new clean room with specified colaborators. The caller must be a metastore admin
- * or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
- */
- public CleanRoomInfo create(CreateCleanRoom request) {
- return impl.create(request);
- }
-
- public void delete(String name) {
- delete(new DeleteCleanRoomRequest().setName(name));
- }
-
- /**
- * Delete a clean room.
- *
- * Deletes a data object clean room from the metastore. The caller must be an owner of the
- * clean room.
- */
- public void delete(DeleteCleanRoomRequest request) {
- impl.delete(request);
- }
-
- public CleanRoomInfo get(String name) {
- return get(new GetCleanRoomRequest().setName(name));
- }
-
- /**
- * Get a clean room.
- *
- * Gets a data object clean room from the metastore. The caller must be a metastore admin or
- * the owner of the clean room.
- */
- public CleanRoomInfo get(GetCleanRoomRequest request) {
- return impl.get(request);
- }
-
- /**
- * List clean rooms.
- *
- * Gets an array of data object clean rooms from the metastore. The caller must be a metastore
- * admin or the owner of the clean room. There is no guarantee of a specific ordering of the
- * elements in the array.
- */
- public Iterable Updates the clean room with the changes and data objects in the request. The caller must be
- * the owner of the clean room or a metastore admin.
- *
- * When the caller is a metastore admin, only the __owner__ field can be updated.
- *
- * In the case that the clean room name is changed **updateCleanRoom** requires that the caller
- * is both the clean room owner and a metastore admin.
- *
- * For each table that is added through this method, the clean room owner must also have
- * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients
- * to be able to access the table. Typically, you should use a group as the clean room owner.
- *
- * Table removals through **update** do not require additional privileges.
- */
- public CleanRoomInfo update(UpdateCleanRoom request) {
- return impl.update(request);
- }
-
- public CleanRoomsService impl() {
- return impl;
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java
deleted file mode 100755
index 8e0e85e68..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CleanRoomsImpl.java
+++ /dev/null
@@ -1,59 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.sharing;
-
-import com.databricks.sdk.core.ApiClient;
-import com.databricks.sdk.support.Generated;
-import java.util.HashMap;
-import java.util.Map;
-
-/** Package-local implementation of CleanRooms */
-@Generated
-class CleanRoomsImpl implements CleanRoomsService {
- private final ApiClient apiClient;
-
- public CleanRoomsImpl(ApiClient apiClient) {
- this.apiClient = apiClient;
- }
-
- @Override
- public CleanRoomInfo create(CreateCleanRoom request) {
- String path = "/api/2.1/unity-catalog/clean-rooms";
- Map To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM**
- * privilege.
- *
- * This is the high-level interface, that contains generated methods.
- *
- * Evolving: this interface is under development. Method signatures may change.
- */
-@Generated
-public interface CleanRoomsService {
- /**
- * Create a clean room.
- *
- * Creates a new clean room with specified colaborators. The caller must be a metastore admin
- * or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
- */
- CleanRoomInfo create(CreateCleanRoom createCleanRoom);
-
- /**
- * Delete a clean room.
- *
- * Deletes a data object clean room from the metastore. The caller must be an owner of the
- * clean room.
- */
- void delete(DeleteCleanRoomRequest deleteCleanRoomRequest);
-
- /**
- * Get a clean room.
- *
- * Gets a data object clean room from the metastore. The caller must be a metastore admin or
- * the owner of the clean room.
- */
- CleanRoomInfo get(GetCleanRoomRequest getCleanRoomRequest);
-
- /**
- * List clean rooms.
- *
- * Gets an array of data object clean rooms from the metastore. The caller must be a metastore
- * admin or the owner of the clean room. There is no guarantee of a specific ordering of the
- * elements in the array.
- */
- ListCleanRoomsResponse list(ListCleanRoomsRequest listCleanRoomsRequest);
-
- /**
- * Update a clean room.
- *
- * Updates the clean room with the changes and data objects in the request. The caller must be
- * the owner of the clean room or a metastore admin.
- *
- * When the caller is a metastore admin, only the __owner__ field can be updated.
- *
- * In the case that the clean room name is changed **updateCleanRoom** requires that the caller
- * is both the clean room owner and a metastore admin.
- *
- * For each table that is added through this method, the clean room owner must also have
- * **SELECT** privilege on the table. The privilege must be maintained indefinitely for recipients
- * to be able to access the table. Typically, you should use a group as the clean room owner.
- *
- * Table removals through **update** do not require additional privileges.
- */
- CleanRoomInfo update(UpdateCleanRoom updateCleanRoom);
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java
deleted file mode 100755
index 40abbd429..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnInfo.java
+++ /dev/null
@@ -1,221 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sharing;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class ColumnInfo {
- /** User-provided free-form text description. */
- @JsonProperty("comment")
- private String comment;
-
- /** */
- @JsonProperty("mask")
- private ColumnMask mask;
-
- /** Name of Column. */
- @JsonProperty("name")
- private String name;
-
- /** Whether field may be Null (default: true). */
- @JsonProperty("nullable")
- private Boolean nullable;
-
- /** Partition index for column. */
- @JsonProperty("partition_index")
- private Long partitionIndex;
-
- /** Ordinal position of column (starting at position 0). */
- @JsonProperty("position")
- private Long position;
-
- /** Format of IntervalType. */
- @JsonProperty("type_interval_type")
- private String typeIntervalType;
-
- /** Full data type specification, JSON-serialized. */
- @JsonProperty("type_json")
- private String typeJson;
-
- /** Name of type (INT, STRUCT, MAP, etc.). */
- @JsonProperty("type_name")
- private ColumnTypeName typeName;
-
- /** Digits of precision; required for DecimalTypes. */
- @JsonProperty("type_precision")
- private Long typePrecision;
-
- /** Digits to right of decimal; Required for DecimalTypes. */
- @JsonProperty("type_scale")
- private Long typeScale;
-
- /** Full data type specification as SQL/catalogString text. */
- @JsonProperty("type_text")
- private String typeText;
-
- public ColumnInfo setComment(String comment) {
- this.comment = comment;
- return this;
- }
-
- public String getComment() {
- return comment;
- }
-
- public ColumnInfo setMask(ColumnMask mask) {
- this.mask = mask;
- return this;
- }
-
- public ColumnMask getMask() {
- return mask;
- }
-
- public ColumnInfo setName(String name) {
- this.name = name;
- return this;
- }
-
- public String getName() {
- return name;
- }
-
- public ColumnInfo setNullable(Boolean nullable) {
- this.nullable = nullable;
- return this;
- }
-
- public Boolean getNullable() {
- return nullable;
- }
-
- public ColumnInfo setPartitionIndex(Long partitionIndex) {
- this.partitionIndex = partitionIndex;
- return this;
- }
-
- public Long getPartitionIndex() {
- return partitionIndex;
- }
-
- public ColumnInfo setPosition(Long position) {
- this.position = position;
- return this;
- }
-
- public Long getPosition() {
- return position;
- }
-
- public ColumnInfo setTypeIntervalType(String typeIntervalType) {
- this.typeIntervalType = typeIntervalType;
- return this;
- }
-
- public String getTypeIntervalType() {
- return typeIntervalType;
- }
-
- public ColumnInfo setTypeJson(String typeJson) {
- this.typeJson = typeJson;
- return this;
- }
-
- public String getTypeJson() {
- return typeJson;
- }
-
- public ColumnInfo setTypeName(ColumnTypeName typeName) {
- this.typeName = typeName;
- return this;
- }
-
- public ColumnTypeName getTypeName() {
- return typeName;
- }
-
- public ColumnInfo setTypePrecision(Long typePrecision) {
- this.typePrecision = typePrecision;
- return this;
- }
-
- public Long getTypePrecision() {
- return typePrecision;
- }
-
- public ColumnInfo setTypeScale(Long typeScale) {
- this.typeScale = typeScale;
- return this;
- }
-
- public Long getTypeScale() {
- return typeScale;
- }
-
- public ColumnInfo setTypeText(String typeText) {
- this.typeText = typeText;
- return this;
- }
-
- public String getTypeText() {
- return typeText;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- ColumnInfo that = (ColumnInfo) o;
- return Objects.equals(comment, that.comment)
- && Objects.equals(mask, that.mask)
- && Objects.equals(name, that.name)
- && Objects.equals(nullable, that.nullable)
- && Objects.equals(partitionIndex, that.partitionIndex)
- && Objects.equals(position, that.position)
- && Objects.equals(typeIntervalType, that.typeIntervalType)
- && Objects.equals(typeJson, that.typeJson)
- && Objects.equals(typeName, that.typeName)
- && Objects.equals(typePrecision, that.typePrecision)
- && Objects.equals(typeScale, that.typeScale)
- && Objects.equals(typeText, that.typeText);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(
- comment,
- mask,
- name,
- nullable,
- partitionIndex,
- position,
- typeIntervalType,
- typeJson,
- typeName,
- typePrecision,
- typeScale,
- typeText);
- }
-
- @Override
- public String toString() {
- return new ToStringer(ColumnInfo.class)
- .add("comment", comment)
- .add("mask", mask)
- .add("name", name)
- .add("nullable", nullable)
- .add("partitionIndex", partitionIndex)
- .add("position", position)
- .add("typeIntervalType", typeIntervalType)
- .add("typeJson", typeJson)
- .add("typeName", typeName)
- .add("typePrecision", typePrecision)
- .add("typeScale", typeScale)
- .add("typeText", typeText)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java
deleted file mode 100755
index 9c0e3e84b..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnMask.java
+++ /dev/null
@@ -1,64 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sharing;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
-import java.util.Objects;
-
-@Generated
-public class ColumnMask {
- /** The full name of the column mask SQL UDF. */
- @JsonProperty("function_name")
- private String functionName;
-
- /**
- * The list of additional table columns to be passed as input to the column mask function. The
- * first arg of the mask function should be of the type of the column being masked and the types
- * of the rest of the args should match the types of columns in 'using_column_names'.
- */
- @JsonProperty("using_column_names")
- private Collection [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
index b22e9dbe0..5132b0354 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
@@ -84,11 +84,11 @@
* completed execution when the cancel request arrives. Polling for status until a terminal state is
* reached is a reliable way to determine the final state. - Wait timeouts are approximate, occur
* server-side, and cannot account for things such as caller delays and network latency from caller
- * to service. - The system will auto-close a statement after one hour if the client stops polling
- * and thus you must poll at least once an hour. - The results are only available for one hour after
- * success; polling does not extend this. - The SQL Execution API must be used for the entire
- * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and
- * then the SQL Execution API to cancel it.
+ * to service. - To guarantee that the statement is kept alive, you must poll at least once every 15
+ * minutes. - The results are only available for one hour after success; polling does not extend
+ * this. - The SQL Execution API must be used for the entire lifecycle of the statement. For
+ * example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to
+ * cancel it.
*
* [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
index 9564b525a..00b0c3941 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java
@@ -233,7 +233,8 @@ public WarehousePermissions setPermissions(String warehouseId) {
/**
* Set SQL warehouse permissions.
*
- * Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
public WarehousePermissions setPermissions(WarehousePermissionsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java
index c1da3aedc..8b18fcca2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesService.java
@@ -76,7 +76,8 @@ WarehousePermissions getPermissions(
/**
* Set SQL warehouse permissions.
*
- * Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
* object.
*/
WarehousePermissions setPermissions(WarehousePermissionsRequest warehousePermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
index dab9e912e..8a51b47a3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposAPI.java
@@ -130,7 +130,9 @@ public RepoPermissions setPermissions(String repoId) {
/**
* Set repo permissions.
*
- * Sets permissions on a repo. Repos can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
public RepoPermissions setPermissions(RepoPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java
index 313477542..188c2d30d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposService.java
@@ -70,7 +70,9 @@ GetRepoPermissionLevelsResponse getPermissionLevels(
/**
* Set repo permissions.
*
- * Sets permissions on a repo. Repos can inherit permissions from their root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their root
+ * object.
*/
RepoPermissions setPermissions(RepoPermissionsRequest repoPermissionsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
index 486826182..3933eeff3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java
@@ -175,8 +175,9 @@ public WorkspaceObjectPermissions setPermissions(
/**
* Set workspace object permissions.
*
- * Sets permissions on a workspace object. Workspace objects can inherit permissions from their
- * parent objects or root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their parent
+ * objects or root object.
*/
public WorkspaceObjectPermissions setPermissions(WorkspaceObjectPermissionsRequest request) {
return impl.setPermissions(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
index e6e7be354..93dc98423 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java
@@ -98,8 +98,9 @@ WorkspaceObjectPermissions getPermissions(
/**
* Set workspace object permissions.
*
- * Sets permissions on a workspace object. Workspace objects can inherit permissions from their
- * parent objects or root object.
+ * Sets permissions on an object, replacing existing permissions if they exist. Deletes all
+ * direct permissions if none are specified. Objects can inherit permissions from their parent
+ * objects or root object.
*/
WorkspaceObjectPermissions setPermissions(
WorkspaceObjectPermissionsRequest workspaceObjectPermissionsRequest);
diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml
index 32731d879..89e86caf3 100644
--- a/examples/docs/pom.xml
+++ b/examples/docs/pom.xml
@@ -24,7 +24,7 @@