diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 57c80b78c..d4d96fe05 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -c0e83a0de9da6aaad4cd6924fb65b1496bfdedcb \ No newline at end of file +62870287fa7d3f2ba570333dd2645e86cb8e1a82 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 31bcf3f71..934e4720c 100644 --- a/.gitattributes +++ b/.gitattributes @@ -20,6 +20,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentStatus.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceJobSpec.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceJobSpecJobPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSecretSpec.java linguist-generated=true @@ -40,6 +42,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabase.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabaseDatabasePermission.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpace.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceGenieSpaceGenieSpacePermission.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceJob.java linguist-generated=true @@ -81,6 +85,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppUpdateRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetCustomTemplateRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppDeploymentsResponse.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsRequest.java linguist-generated=true @@ -2067,7 +2073,9 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateStateInfoState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchOperationMetadata.java linguist-generated=true -/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchState.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatusState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchOperation.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateBranchRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/CreateEndpointOperation.java linguist-generated=true @@ -2082,7 +2090,9 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointPoolerMode.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true -/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ErrorCode.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/GetBranchRequest.java linguist-generated=true @@ -2103,6 +2113,8 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectDefaultEndpointSettings.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectOperationMetadata.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSettings.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchOperation.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateBranchRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/UpdateEndpointOperation.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 09184f38a..ce52a0f48 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,22 @@ ### Internal Changes ### API Changes +* Add `gitRepository` field for `com.databricks.sdk.service.apps.App`. +* Add `gitSource` field for `com.databricks.sdk.service.apps.AppDeployment`. +* Add `experimentSpec` field for `com.databricks.sdk.service.apps.AppManifestAppResourceSpec`. +* Add `experiment` field for `com.databricks.sdk.service.apps.AppResource`. +* Add `gitRepository` field for `com.databricks.sdk.service.apps.AppUpdate`. +* Add `excludedTableFullNames` field for `com.databricks.sdk.service.dataquality.AnomalyDetectionConfig`. +* Add `groupName` field for `com.databricks.sdk.service.jobs.JobRunAs`. +* Add `rowFilter` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`. +* Add `spec` and `status` fields for `com.databricks.sdk.service.postgres.Branch`. +* Add `spec` and `status` fields for `com.databricks.sdk.service.postgres.Endpoint`. +* Add `spec` and `status` fields for `com.databricks.sdk.service.postgres.Project`. +* Add `excludedTableFullNames` field for `com.databricks.sdk.service.qualitymonitorv2.AnomalyDetectionConfig`. +* Add `EXECUTE` and `USE_CONNECTION` enum values for `com.databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurablePermission`. +* Add `FUNCTION` and `CONNECTION` enum values for `com.databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurableType`. +* Add `SELECT`, `EXECUTE` and `USE_CONNECTION` enum values for `com.databricks.sdk.service.apps.AppResourceUcSecurableUcSecurablePermission`. +* Add `TABLE`, `FUNCTION` and `CONNECTION` enum values for `com.databricks.sdk.service.apps.AppResourceUcSecurableUcSecurableType`. +* [Breaking] Remove `currentState`, `default`, `effectiveDefault`, `effectiveIsProtected`, `effectiveSourceBranch`, `effectiveSourceBranchLsn`, `effectiveSourceBranchTime`, `isProtected`, `logicalSizeBytes`, `pendingState`, `sourceBranch`, `sourceBranchLsn`, `sourceBranchTime` and `stateChangeTime` fields for `com.databricks.sdk.service.postgres.Branch`. +* [Breaking] Remove `autoscalingLimitMaxCu`, `autoscalingLimitMinCu`, `currentState`, `disabled`, `effectiveAutoscalingLimitMaxCu`, `effectiveAutoscalingLimitMinCu`, `effectiveDisabled`, `effectivePoolerMode`, `effectiveSettings`, `effectiveSuspendTimeoutDuration`, `endpointType`, `host`, `lastActiveTime`, `pendingState`, `poolerMode`, `settings`, `startTime`, `suspendTime` and `suspendTimeoutDuration` fields for `com.databricks.sdk.service.postgres.Endpoint`. +* [Breaking] Remove `branchLogicalSizeLimitBytes`, `computeLastActiveTime`, `defaultEndpointSettings`, `displayName`, `effectiveDefaultEndpointSettings`, `effectiveDisplayName`, `effectiveHistoryRetentionDuration`, `effectivePgVersion`, `effectiveSettings`, `historyRetentionDuration`, `pgVersion`, `settings` and `syntheticStorageSizeBytes` fields for `com.databricks.sdk.service.postgres.Project`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index d7ef3d38a..d403abc7f 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -64,6 +64,13 @@ public class App { @JsonProperty("effective_user_api_scopes") private Collection effectiveUserApiScopes; + /** + * Git repository configuration for app deployments. When specified, deployments can reference + * code from this repository by providing only the git reference (branch, tag, or commit). + */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + /** The unique identifier of the app. */ @JsonProperty("id") private String id; @@ -234,6 +241,15 @@ public Collection getEffectiveUserApiScopes() { return effectiveUserApiScopes; } + public App setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + public App setId(String id) { this.id = id; return this; @@ -377,6 +393,7 @@ public boolean equals(Object o) { && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(effectiveUsagePolicyId, that.effectiveUsagePolicyId) && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) + && Objects.equals(gitRepository, that.gitRepository) && Objects.equals(id, that.id) && Objects.equals(name, that.name) && Objects.equals(oauth2AppClientId, that.oauth2AppClientId) @@ -408,6 +425,7 @@ public int hashCode() { effectiveBudgetPolicyId, effectiveUsagePolicyId, effectiveUserApiScopes, + gitRepository, id, name, oauth2AppClientId, @@ -439,6 +457,7 @@ public String toString() { .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .add("effectiveUsagePolicyId", effectiveUsagePolicyId) .add("effectiveUserApiScopes", effectiveUserApiScopes) + .add("gitRepository", gitRepository) .add("id", id) .add("name", name) .add("oauth2AppClientId", oauth2AppClientId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java old mode 100755 new mode 100644 index 0961135b1..f127a9538 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeployment.java @@ -25,6 +25,10 @@ public class AppDeployment { @JsonProperty("deployment_id") private String deploymentId; + /** Git repository to use as the source for the app deployment. */ + @JsonProperty("git_source") + private GitSource gitSource; + /** The mode of which the deployment will manage the source code. */ @JsonProperty("mode") private AppDeploymentMode mode; @@ -83,6 +87,15 @@ public String getDeploymentId() { return deploymentId; } + public AppDeployment setGitSource(GitSource gitSource) { + this.gitSource = gitSource; + return this; + } + + public GitSource getGitSource() { + return gitSource; + } + public AppDeployment setMode(AppDeploymentMode mode) { this.mode = mode; return this; @@ -128,6 +141,7 @@ public boolean equals(Object o) { && Objects.equals(creator, that.creator) && Objects.equals(deploymentArtifacts, that.deploymentArtifacts) && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(gitSource, that.gitSource) && Objects.equals(mode, that.mode) && Objects.equals(sourceCodePath, that.sourceCodePath) && Objects.equals(status, that.status) @@ -141,6 +155,7 @@ public int hashCode() { creator, deploymentArtifacts, deploymentId, + gitSource, mode, sourceCodePath, status, @@ -154,6 +169,7 @@ public String toString() { .add("creator", creator) .add("deploymentArtifacts", deploymentArtifacts) .add("deploymentId", deploymentId) + .add("gitSource", gitSource) .add("mode", mode) .add("sourceCodePath", sourceCodePath) .add("status", status) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java new file mode 100644 index 000000000..2df07f818 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpec.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppManifestAppResourceExperimentSpec { + /** */ + @JsonProperty("permission") + private AppManifestAppResourceExperimentSpecExperimentPermission permission; + + public AppManifestAppResourceExperimentSpec setPermission( + AppManifestAppResourceExperimentSpecExperimentPermission permission) { + this.permission = permission; + return this; + } + + public AppManifestAppResourceExperimentSpecExperimentPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppManifestAppResourceExperimentSpec that = (AppManifestAppResourceExperimentSpec) o; + return Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(permission); + } + + @Override + public String toString() { + return new ToStringer(AppManifestAppResourceExperimentSpec.class) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java new file mode 100644 index 000000000..27db3de37 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceExperimentSpecExperimentPermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppManifestAppResourceExperimentSpecExperimentPermission { + CAN_EDIT, + CAN_MANAGE, + CAN_READ, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java old mode 100755 new mode 100644 index f4487f6e5..70c87f518 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceSpec.java @@ -17,6 +17,10 @@ public class AppManifestAppResourceSpec { @JsonProperty("description") private String description; + /** */ + @JsonProperty("experiment_spec") + private AppManifestAppResourceExperimentSpec experimentSpec; + /** */ @JsonProperty("job_spec") private AppManifestAppResourceJobSpec jobSpec; @@ -50,6 +54,16 @@ public String getDescription() { return description; } + public AppManifestAppResourceSpec setExperimentSpec( + AppManifestAppResourceExperimentSpec experimentSpec) { + this.experimentSpec = experimentSpec; + return this; + } + + public AppManifestAppResourceExperimentSpec getExperimentSpec() { + return experimentSpec; + } + public AppManifestAppResourceSpec setJobSpec(AppManifestAppResourceJobSpec jobSpec) { this.jobSpec = jobSpec; return this; @@ -113,6 +127,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; AppManifestAppResourceSpec that = (AppManifestAppResourceSpec) o; return Objects.equals(description, that.description) + && Objects.equals(experimentSpec, that.experimentSpec) && Objects.equals(jobSpec, that.jobSpec) && Objects.equals(name, that.name) && Objects.equals(secretSpec, that.secretSpec) @@ -125,6 +140,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( description, + experimentSpec, jobSpec, name, secretSpec, @@ -137,6 +153,7 @@ public int hashCode() { public String toString() { return new ToStringer(AppManifestAppResourceSpec.class) .add("description", description) + .add("experimentSpec", experimentSpec) .add("jobSpec", jobSpec) .add("name", name) .add("secretSpec", secretSpec) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java old mode 100755 new mode 100644 index 1a614264f..a5c0749fa --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurablePermission.java @@ -6,8 +6,10 @@ @Generated public enum AppManifestAppResourceUcSecurableSpecUcSecurablePermission { + EXECUTE, MANAGE, READ_VOLUME, SELECT, + USE_CONNECTION, WRITE_VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java old mode 100755 new mode 100644 index 45cccb2d5..d8d452c05 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppManifestAppResourceUcSecurableSpecUcSecurableType.java @@ -6,6 +6,8 @@ @Generated public enum AppManifestAppResourceUcSecurableSpecUcSecurableType { + CONNECTION, + FUNCTION, TABLE, VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java old mode 100755 new mode 100644 index 2761c1651..e46cb6e8e --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java @@ -17,6 +17,10 @@ public class AppResource { @JsonProperty("description") private String description; + /** */ + @JsonProperty("experiment") + private AppResourceExperiment experiment; + /** */ @JsonProperty("genie_space") private AppResourceGenieSpace genieSpace; @@ -63,6 +67,15 @@ public String getDescription() { return description; } + public AppResource setExperiment(AppResourceExperiment experiment) { + this.experiment = experiment; + return this; + } + + public AppResourceExperiment getExperiment() { + return experiment; + } + public AppResource setGenieSpace(AppResourceGenieSpace genieSpace) { this.genieSpace = genieSpace; return this; @@ -133,6 +146,7 @@ public boolean equals(Object o) { AppResource that = (AppResource) o; return Objects.equals(database, that.database) && Objects.equals(description, that.description) + && Objects.equals(experiment, that.experiment) && Objects.equals(genieSpace, that.genieSpace) && Objects.equals(job, that.job) && Objects.equals(name, that.name) @@ -147,6 +161,7 @@ public int hashCode() { return Objects.hash( database, description, + experiment, genieSpace, job, name, @@ -161,6 +176,7 @@ public String toString() { return new ToStringer(AppResource.class) .add("database", database) .add("description", description) + .add("experiment", experiment) .add("genieSpace", genieSpace) .add("job", job) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java new file mode 100644 index 000000000..6f194ec5c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AppResourceExperiment { + /** */ + @JsonProperty("experiment_id") + private String experimentId; + + /** */ + @JsonProperty("permission") + private AppResourceExperimentExperimentPermission permission; + + public AppResourceExperiment setExperimentId(String experimentId) { + this.experimentId = experimentId; + return this; + } + + public String getExperimentId() { + return experimentId; + } + + public AppResourceExperiment setPermission(AppResourceExperimentExperimentPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceExperimentExperimentPermission getPermission() { + return permission; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppResourceExperiment that = (AppResourceExperiment) o; + return Objects.equals(experimentId, that.experimentId) + && Objects.equals(permission, that.permission); + } + + @Override + public int hashCode() { + return Objects.hash(experimentId, permission); + } + + @Override + public String toString() { + return new ToStringer(AppResourceExperiment.class) + .add("experimentId", experimentId) + .add("permission", permission) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java new file mode 100644 index 000000000..db12ec2b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperimentExperimentPermission.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceExperimentExperimentPermission { + CAN_EDIT, + CAN_MANAGE, + CAN_READ, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java old mode 100755 new mode 100644 index 551de2d21..6ed176b3c --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java @@ -6,6 +6,9 @@ @Generated public enum AppResourceUcSecurableUcSecurablePermission { + EXECUTE, READ_VOLUME, + SELECT, + USE_CONNECTION, WRITE_VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java old mode 100755 new mode 100644 index b05b5435e..c97224882 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java @@ -6,5 +6,8 @@ @Generated public enum AppResourceUcSecurableUcSecurableType { + CONNECTION, + FUNCTION, + TABLE, VOLUME, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java old mode 100755 new mode 100644 index b34c390e7..ff4970a5a --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java @@ -22,6 +22,10 @@ public class AppUpdate { @JsonProperty("description") private String description; + /** */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + /** */ @JsonProperty("resources") private Collection resources; @@ -65,6 +69,15 @@ public String getDescription() { return description; } + public AppUpdate setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + public AppUpdate setResources(Collection resources) { this.resources = resources; return this; @@ -109,6 +122,7 @@ public boolean equals(Object o) { return Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(computeSize, that.computeSize) && Objects.equals(description, that.description) + && Objects.equals(gitRepository, that.gitRepository) && Objects.equals(resources, that.resources) && Objects.equals(status, that.status) && Objects.equals(usagePolicyId, that.usagePolicyId) @@ -118,7 +132,14 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - budgetPolicyId, computeSize, description, resources, status, usagePolicyId, userApiScopes); + budgetPolicyId, + computeSize, + description, + gitRepository, + resources, + status, + usagePolicyId, + userApiScopes); } @Override @@ -127,6 +148,7 @@ public String toString() { .add("budgetPolicyId", budgetPolicyId) .add("computeSize", computeSize) .add("description", description) + .add("gitRepository", gitRepository) .add("resources", resources) .add("status", status) .add("usagePolicyId", usagePolicyId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java new file mode 100644 index 000000000..bd523a790 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitRepository.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Git repository configuration specifying the location of the repository. */ +@Generated +public class GitRepository { + /** + * Git provider. Case insensitive. Supported values: gitHub, gitHubEnterprise, bitbucketCloud, + * bitbucketServer, azureDevOpsServices, gitLab, gitLabEnterpriseEdition, awsCodeCommit. + */ + @JsonProperty("provider") + private String provider; + + /** URL of the Git repository. */ + @JsonProperty("url") + private String url; + + public GitRepository setProvider(String provider) { + this.provider = provider; + return this; + } + + public String getProvider() { + return provider; + } + + public GitRepository setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitRepository that = (GitRepository) o; + return Objects.equals(provider, that.provider) && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(provider, url); + } + + @Override + public String toString() { + return new ToStringer(GitRepository.class).add("provider", provider).add("url", url).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java new file mode 100644 index 000000000..6b9728c1b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GitSource.java @@ -0,0 +1,128 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Complete git source specification including repository location and reference. */ +@Generated +public class GitSource { + /** Git branch to checkout. */ + @JsonProperty("branch") + private String branch; + + /** Git commit SHA to checkout. */ + @JsonProperty("commit") + private String commit; + + /** Git repository configuration. Populated from the app's git_repository configuration. */ + @JsonProperty("git_repository") + private GitRepository gitRepository; + + /** + * The resolved commit SHA that was actually used for the deployment. This is populated by the + * system after resolving the reference (branch, tag, or commit). If commit is specified directly, + * this will match commit. If a branch or tag is specified, this contains the commit SHA that the + * branch or tag pointed to at deployment time. + */ + @JsonProperty("resolved_commit") + private String resolvedCommit; + + /** + * Relative path to the app source code within the Git repository. If not specified, the root of + * the repository is used. + */ + @JsonProperty("source_code_path") + private String sourceCodePath; + + /** Git tag to checkout. */ + @JsonProperty("tag") + private String tag; + + public GitSource setBranch(String branch) { + this.branch = branch; + return this; + } + + public String getBranch() { + return branch; + } + + public GitSource setCommit(String commit) { + this.commit = commit; + return this; + } + + public String getCommit() { + return commit; + } + + public GitSource setGitRepository(GitRepository gitRepository) { + this.gitRepository = gitRepository; + return this; + } + + public GitRepository getGitRepository() { + return gitRepository; + } + + public GitSource setResolvedCommit(String resolvedCommit) { + this.resolvedCommit = resolvedCommit; + return this; + } + + public String getResolvedCommit() { + return resolvedCommit; + } + + public GitSource setSourceCodePath(String sourceCodePath) { + this.sourceCodePath = sourceCodePath; + return this; + } + + public String getSourceCodePath() { + return sourceCodePath; + } + + public GitSource setTag(String tag) { + this.tag = tag; + return this; + } + + public String getTag() { + return tag; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitSource that = (GitSource) o; + return Objects.equals(branch, that.branch) + && Objects.equals(commit, that.commit) + && Objects.equals(gitRepository, that.gitRepository) + && Objects.equals(resolvedCommit, that.resolvedCommit) + && Objects.equals(sourceCodePath, that.sourceCodePath) + && Objects.equals(tag, that.tag); + } + + @Override + public int hashCode() { + return Objects.hash(branch, commit, gitRepository, resolvedCommit, sourceCodePath, tag); + } + + @Override + public String toString() { + return new ToStringer(GitSource.class) + .add("branch", branch) + .add("commit", commit) + .add("gitRepository", gitRepository) + .add("resolvedCommit", resolvedCommit) + .add("sourceCodePath", sourceCodePath) + .add("tag", tag) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java old mode 100755 new mode 100644 index 04137ed3e..44c7b1bc9 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -8,7 +8,7 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 43 */ +/** Next ID: 44 */ @Generated public class SchemaInfo { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java old mode 100755 new mode 100644 index 182c3d0f0..cbe5577c2 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -160,7 +160,8 @@ public class TableInfo { * View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - * when DependencyList is None, the dependency is not provided; - when DependencyList is an empty * list, the dependency is provided but is empty; - when DependencyList is not an empty list, - * dependencies are provided and recorded. + * dependencies are provided and recorded. Note: this field is not set in the output of the + * __listTables__ API. */ @JsonProperty("view_dependencies") private DependencyList viewDependencies; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java old mode 100755 new mode 100644 index ac41bdf5a..5db6a605a --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java @@ -117,6 +117,9 @@ public Iterable list(String catalogName, String schemaName) { * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. * + *

NOTE: **view_dependencies** and **table_constraints** are not returned by ListTables + * queries. + * *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated * calls will be deprecated soon. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java old mode 100755 new mode 100644 index 34f2b78ed..1efde490f --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java @@ -79,6 +79,9 @@ public interface TablesService { * privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is * no guarantee of a specific ordering of the elements in the array. * + *

NOTE: **view_dependencies** and **table_constraints** are not returned by ListTables + * queries. + * *

NOTE: we recommend using max_results=0 to use the paginated version of this API. Unpaginated * calls will be deprecated soon. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java old mode 100755 new mode 100644 index 6fd6e421c..3230194c5 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/AnomalyDetectionConfig.java @@ -4,26 +4,44 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; /** Anomaly Detection Configurations. */ @Generated public class AnomalyDetectionConfig { + /** List of fully qualified table names to exclude from anomaly detection. */ + @JsonProperty("excluded_table_full_names") + private Collection excludedTableFullNames; + + public AnomalyDetectionConfig setExcludedTableFullNames( + Collection excludedTableFullNames) { + this.excludedTableFullNames = excludedTableFullNames; + return this; + } + + public Collection getExcludedTableFullNames() { + return excludedTableFullNames; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; + return Objects.equals(excludedTableFullNames, that.excludedTableFullNames); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(excludedTableFullNames); } @Override public String toString() { - return new ToStringer(AnomalyDetectionConfig.class).toString(); + return new ToStringer(AnomalyDetectionConfig.class) + .add("excludedTableFullNames", excludedTableFullNames) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java old mode 100755 new mode 100644 index 53dc83891..dcbf39048 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java @@ -16,6 +16,13 @@ */ @Generated public class JobRunAs { + /** + * Group name of an account group assigned to the workspace. Setting this field requires being a + * member of the group. + */ + @JsonProperty("group_name") + private String groupName; + /** * Application ID of an active service principal. Setting this field requires the * `servicePrincipal/user` role. @@ -30,6 +37,15 @@ public class JobRunAs { @JsonProperty("user_name") private String userName; + public JobRunAs setGroupName(String groupName) { + this.groupName = groupName; + return this; + } + + public String getGroupName() { + return groupName; + } + public JobRunAs setServicePrincipalName(String servicePrincipalName) { this.servicePrincipalName = servicePrincipalName; return this; @@ -53,18 +69,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JobRunAs that = (JobRunAs) o; - return Objects.equals(servicePrincipalName, that.servicePrincipalName) + return Objects.equals(groupName, that.groupName) + && Objects.equals(servicePrincipalName, that.servicePrincipalName) && Objects.equals(userName, that.userName); } @Override public int hashCode() { - return Objects.hash(servicePrincipalName, userName); + return Objects.hash(groupName, servicePrincipalName, userName); } @Override public String toString() { return new ToStringer(JobRunAs.class) + .add("groupName", groupName) .add("servicePrincipalName", servicePrincipalName) .add("userName", userName) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java old mode 100755 new mode 100644 index 4e8dc6013..5a1f380e3 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -37,6 +37,13 @@ public class TableSpecificConfig { private IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig queryBasedConnectorConfig; + /** + * (Optional, Immutable) The row filter condition to be applied to the table. It must not contain + * the WHERE keyword, only the actual filter condition. It must be in DBSQL format. + */ + @JsonProperty("row_filter") + private String rowFilter; + /** * If true, formula fields defined in the table are included in the ingestion. This setting is * only valid for the Salesforce connector @@ -98,6 +105,15 @@ public TableSpecificConfig setQueryBasedConnectorConfig( return queryBasedConnectorConfig; } + public TableSpecificConfig setRowFilter(String rowFilter) { + this.rowFilter = rowFilter; + return this; + } + + public String getRowFilter() { + return rowFilter; + } + public TableSpecificConfig setSalesforceIncludeFormulaFields( Boolean salesforceIncludeFormulaFields) { this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields; @@ -145,6 +161,7 @@ public boolean equals(Object o) { && Objects.equals(includeColumns, that.includeColumns) && Objects.equals(primaryKeys, that.primaryKeys) && Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig) + && Objects.equals(rowFilter, that.rowFilter) && Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields) && Objects.equals(scdType, that.scdType) && Objects.equals(sequenceBy, that.sequenceBy) @@ -158,6 +175,7 @@ public int hashCode() { includeColumns, primaryKeys, queryBasedConnectorConfig, + rowFilter, salesforceIncludeFormulaFields, scdType, sequenceBy, @@ -171,6 +189,7 @@ public String toString() { .add("includeColumns", includeColumns) .add("primaryKeys", primaryKeys) .add("queryBasedConnectorConfig", queryBasedConnectorConfig) + .add("rowFilter", rowFilter) .add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields) .add("scdType", scdType) .add("sequenceBy", sequenceBy) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java index 9bdb82f5d..ee3f7d02d 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Branch.java @@ -14,49 +14,6 @@ public class Branch { @JsonProperty("create_time") private Timestamp createTime; - /** The branch's state, indicating if it is initializing, ready for use, or archived. */ - @JsonProperty("current_state") - private BranchState currentState; - - /** - * Whether the branch is the project's default branch. This field is only returned on - * create/update responses. See effective_default for the value that is actually applied to the - * branch. - */ - @JsonProperty("default") - private Boolean defaultValue; - - /** Whether the branch is the project's default branch. */ - @JsonProperty("effective_default") - private Boolean effectiveDefault; - - /** Whether the branch is protected. */ - @JsonProperty("effective_is_protected") - private Boolean effectiveIsProtected; - - /** - * The name of the source branch from which this branch was created. Format: - * projects/{project_id}/branches/{branch_id} - */ - @JsonProperty("effective_source_branch") - private String effectiveSourceBranch; - - /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ - @JsonProperty("effective_source_branch_lsn") - private String effectiveSourceBranchLsn; - - /** The point in time on the source branch from which this branch was created. */ - @JsonProperty("effective_source_branch_time") - private Timestamp effectiveSourceBranchTime; - - /** Whether the branch is protected. */ - @JsonProperty("is_protected") - private Boolean isProtected; - - /** The logical size of the branch. */ - @JsonProperty("logical_size_bytes") - private Long logicalSizeBytes; - /** The resource name of the branch. Format: projects/{project_id}/branches/{branch_id} */ @JsonProperty("name") private String name; @@ -65,28 +22,13 @@ public class Branch { @JsonProperty("parent") private String parent; - /** The pending state of the branch, if a state transition is in progress. */ - @JsonProperty("pending_state") - private BranchState pendingState; + /** The desired state of a Branch. */ + @JsonProperty("spec") + private BranchSpec spec; - /** - * The name of the source branch from which this branch was created. Format: - * projects/{project_id}/branches/{branch_id} - */ - @JsonProperty("source_branch") - private String sourceBranch; - - /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ - @JsonProperty("source_branch_lsn") - private String sourceBranchLsn; - - /** The point in time on the source branch from which this branch was created. */ - @JsonProperty("source_branch_time") - private Timestamp sourceBranchTime; - - /** A timestamp indicating when the `current_state` began. */ - @JsonProperty("state_change_time") - private Timestamp stateChangeTime; + /** The current status of a Branch. */ + @JsonProperty("status") + private BranchStatus status; /** System generated unique ID for the branch. */ @JsonProperty("uid") @@ -105,87 +47,6 @@ public Timestamp getCreateTime() { return createTime; } - public Branch setCurrentState(BranchState currentState) { - this.currentState = currentState; - return this; - } - - public BranchState getCurrentState() { - return currentState; - } - - public Branch setDefault(Boolean defaultValue) { - this.defaultValue = defaultValue; - return this; - } - - public Boolean getDefault() { - return defaultValue; - } - - public Branch setEffectiveDefault(Boolean effectiveDefault) { - this.effectiveDefault = effectiveDefault; - return this; - } - - public Boolean getEffectiveDefault() { - return effectiveDefault; - } - - public Branch setEffectiveIsProtected(Boolean effectiveIsProtected) { - this.effectiveIsProtected = effectiveIsProtected; - return this; - } - - public Boolean getEffectiveIsProtected() { - return effectiveIsProtected; - } - - public Branch setEffectiveSourceBranch(String effectiveSourceBranch) { - this.effectiveSourceBranch = effectiveSourceBranch; - return this; - } - - public String getEffectiveSourceBranch() { - return effectiveSourceBranch; - } - - public Branch setEffectiveSourceBranchLsn(String effectiveSourceBranchLsn) { - this.effectiveSourceBranchLsn = effectiveSourceBranchLsn; - return this; - } - - public String getEffectiveSourceBranchLsn() { - return effectiveSourceBranchLsn; - } - - public Branch setEffectiveSourceBranchTime(Timestamp effectiveSourceBranchTime) { - this.effectiveSourceBranchTime = effectiveSourceBranchTime; - return this; - } - - public Timestamp getEffectiveSourceBranchTime() { - return effectiveSourceBranchTime; - } - - public Branch setIsProtected(Boolean isProtected) { - this.isProtected = isProtected; - return this; - } - - public Boolean getIsProtected() { - return isProtected; - } - - public Branch setLogicalSizeBytes(Long logicalSizeBytes) { - this.logicalSizeBytes = logicalSizeBytes; - return this; - } - - public Long getLogicalSizeBytes() { - return logicalSizeBytes; - } - public Branch setName(String name) { this.name = name; return this; @@ -204,49 +65,22 @@ public String getParent() { return parent; } - public Branch setPendingState(BranchState pendingState) { - this.pendingState = pendingState; - return this; - } - - public BranchState getPendingState() { - return pendingState; - } - - public Branch setSourceBranch(String sourceBranch) { - this.sourceBranch = sourceBranch; - return this; - } - - public String getSourceBranch() { - return sourceBranch; - } - - public Branch setSourceBranchLsn(String sourceBranchLsn) { - this.sourceBranchLsn = sourceBranchLsn; - return this; - } - - public String getSourceBranchLsn() { - return sourceBranchLsn; - } - - public Branch setSourceBranchTime(Timestamp sourceBranchTime) { - this.sourceBranchTime = sourceBranchTime; + public Branch setSpec(BranchSpec spec) { + this.spec = spec; return this; } - public Timestamp getSourceBranchTime() { - return sourceBranchTime; + public BranchSpec getSpec() { + return spec; } - public Branch setStateChangeTime(Timestamp stateChangeTime) { - this.stateChangeTime = stateChangeTime; + public Branch setStatus(BranchStatus status) { + this.status = status; return this; } - public Timestamp getStateChangeTime() { - return stateChangeTime; + public BranchStatus getStatus() { + return status; } public Branch setUid(String uid) { @@ -273,70 +107,27 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Branch that = (Branch) o; return Objects.equals(createTime, that.createTime) - && Objects.equals(currentState, that.currentState) - && Objects.equals(defaultValue, that.defaultValue) - && Objects.equals(effectiveDefault, that.effectiveDefault) - && Objects.equals(effectiveIsProtected, that.effectiveIsProtected) - && Objects.equals(effectiveSourceBranch, that.effectiveSourceBranch) - && Objects.equals(effectiveSourceBranchLsn, that.effectiveSourceBranchLsn) - && Objects.equals(effectiveSourceBranchTime, that.effectiveSourceBranchTime) - && Objects.equals(isProtected, that.isProtected) - && Objects.equals(logicalSizeBytes, that.logicalSizeBytes) && Objects.equals(name, that.name) && Objects.equals(parent, that.parent) - && Objects.equals(pendingState, that.pendingState) - && Objects.equals(sourceBranch, that.sourceBranch) - && Objects.equals(sourceBranchLsn, that.sourceBranchLsn) - && Objects.equals(sourceBranchTime, that.sourceBranchTime) - && Objects.equals(stateChangeTime, that.stateChangeTime) + && Objects.equals(spec, that.spec) + && Objects.equals(status, that.status) && Objects.equals(uid, that.uid) && Objects.equals(updateTime, that.updateTime); } @Override public int hashCode() { - return Objects.hash( - createTime, - currentState, - defaultValue, - effectiveDefault, - effectiveIsProtected, - effectiveSourceBranch, - effectiveSourceBranchLsn, - effectiveSourceBranchTime, - isProtected, - logicalSizeBytes, - name, - parent, - pendingState, - sourceBranch, - sourceBranchLsn, - sourceBranchTime, - stateChangeTime, - uid, - updateTime); + return Objects.hash(createTime, name, parent, spec, status, uid, updateTime); } @Override public String toString() { return new ToStringer(Branch.class) .add("createTime", createTime) - .add("currentState", currentState) - .add("defaultValue", defaultValue) - .add("effectiveDefault", effectiveDefault) - .add("effectiveIsProtected", effectiveIsProtected) - .add("effectiveSourceBranch", effectiveSourceBranch) - .add("effectiveSourceBranchLsn", effectiveSourceBranchLsn) - .add("effectiveSourceBranchTime", effectiveSourceBranchTime) - .add("isProtected", isProtected) - .add("logicalSizeBytes", logicalSizeBytes) .add("name", name) .add("parent", parent) - .add("pendingState", pendingState) - .add("sourceBranch", sourceBranch) - .add("sourceBranchLsn", sourceBranchLsn) - .add("sourceBranchTime", sourceBranchTime) - .add("stateChangeTime", stateChangeTime) + .add("spec", spec) + .add("status", status) .add("uid", uid) .add("updateTime", updateTime) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java new file mode 100644 index 000000000..28830ce4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchSpec.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class BranchSpec { + /** Whether the branch is the project's default branch. */ + @JsonProperty("default") + private Boolean defaultValue; + + /** Whether the branch is protected. */ + @JsonProperty("is_protected") + private Boolean isProtected; + + /** + * The name of the source branch from which this branch was created. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonProperty("source_branch") + private String sourceBranch; + + /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ + @JsonProperty("source_branch_lsn") + private String sourceBranchLsn; + + /** The point in time on the source branch from which this branch was created. */ + @JsonProperty("source_branch_time") + private Timestamp sourceBranchTime; + + public BranchSpec setDefault(Boolean defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Boolean getDefault() { + return defaultValue; + } + + public BranchSpec setIsProtected(Boolean isProtected) { + this.isProtected = isProtected; + return this; + } + + public Boolean getIsProtected() { + return isProtected; + } + + public BranchSpec setSourceBranch(String sourceBranch) { + this.sourceBranch = sourceBranch; + return this; + } + + public String getSourceBranch() { + return sourceBranch; + } + + public BranchSpec setSourceBranchLsn(String sourceBranchLsn) { + this.sourceBranchLsn = sourceBranchLsn; + return this; + } + + public String getSourceBranchLsn() { + return sourceBranchLsn; + } + + public BranchSpec setSourceBranchTime(Timestamp sourceBranchTime) { + this.sourceBranchTime = sourceBranchTime; + return this; + } + + public Timestamp getSourceBranchTime() { + return sourceBranchTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BranchSpec that = (BranchSpec) o; + return Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(isProtected, that.isProtected) + && Objects.equals(sourceBranch, that.sourceBranch) + && Objects.equals(sourceBranchLsn, that.sourceBranchLsn) + && Objects.equals(sourceBranchTime, that.sourceBranchTime); + } + + @Override + public int hashCode() { + return Objects.hash(defaultValue, isProtected, sourceBranch, sourceBranchLsn, sourceBranchTime); + } + + @Override + public String toString() { + return new ToStringer(BranchSpec.class) + .add("defaultValue", defaultValue) + .add("isProtected", isProtected) + .add("sourceBranch", sourceBranch) + .add("sourceBranchLsn", sourceBranchLsn) + .add("sourceBranchTime", sourceBranchTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java new file mode 100644 index 000000000..041beb73e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatus.java @@ -0,0 +1,177 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class BranchStatus { + /** The branch's state, indicating if it is initializing, ready for use, or archived. */ + @JsonProperty("current_state") + private BranchStatusState currentState; + + /** Whether the branch is the project's default branch. */ + @JsonProperty("default") + private Boolean defaultValue; + + /** Whether the branch is protected. */ + @JsonProperty("is_protected") + private Boolean isProtected; + + /** The logical size of the branch. */ + @JsonProperty("logical_size_bytes") + private Long logicalSizeBytes; + + /** The pending state of the branch, if a state transition is in progress. */ + @JsonProperty("pending_state") + private BranchStatusState pendingState; + + /** + * The name of the source branch from which this branch was created. Format: + * projects/{project_id}/branches/{branch_id} + */ + @JsonProperty("source_branch") + private String sourceBranch; + + /** The Log Sequence Number (LSN) on the source branch from which this branch was created. */ + @JsonProperty("source_branch_lsn") + private String sourceBranchLsn; + + /** The point in time on the source branch from which this branch was created. */ + @JsonProperty("source_branch_time") + private Timestamp sourceBranchTime; + + /** A timestamp indicating when the `current_state` began. */ + @JsonProperty("state_change_time") + private Timestamp stateChangeTime; + + public BranchStatus setCurrentState(BranchStatusState currentState) { + this.currentState = currentState; + return this; + } + + public BranchStatusState getCurrentState() { + return currentState; + } + + public BranchStatus setDefault(Boolean defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Boolean getDefault() { + return defaultValue; + } + + public BranchStatus setIsProtected(Boolean isProtected) { + this.isProtected = isProtected; + return this; + } + + public Boolean getIsProtected() { + return isProtected; + } + + public BranchStatus setLogicalSizeBytes(Long logicalSizeBytes) { + this.logicalSizeBytes = logicalSizeBytes; + return this; + } + + public Long getLogicalSizeBytes() { + return logicalSizeBytes; + } + + public BranchStatus setPendingState(BranchStatusState pendingState) { + this.pendingState = pendingState; + return this; + } + + public BranchStatusState getPendingState() { + return pendingState; + } + + public BranchStatus setSourceBranch(String sourceBranch) { + this.sourceBranch = sourceBranch; + return this; + } + + public String getSourceBranch() { + return sourceBranch; + } + + public BranchStatus setSourceBranchLsn(String sourceBranchLsn) { + this.sourceBranchLsn = sourceBranchLsn; + return this; + } + + public String getSourceBranchLsn() { + return sourceBranchLsn; + } + + public BranchStatus setSourceBranchTime(Timestamp sourceBranchTime) { + this.sourceBranchTime = sourceBranchTime; + return this; + } + + public Timestamp getSourceBranchTime() { + return sourceBranchTime; + } + + public BranchStatus setStateChangeTime(Timestamp stateChangeTime) { + this.stateChangeTime = stateChangeTime; + return this; + } + + public Timestamp getStateChangeTime() { + return stateChangeTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BranchStatus that = (BranchStatus) o; + return Objects.equals(currentState, that.currentState) + && Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(isProtected, that.isProtected) + && Objects.equals(logicalSizeBytes, that.logicalSizeBytes) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(sourceBranch, that.sourceBranch) + && Objects.equals(sourceBranchLsn, that.sourceBranchLsn) + && Objects.equals(sourceBranchTime, that.sourceBranchTime) + && Objects.equals(stateChangeTime, that.stateChangeTime); + } + + @Override + public int hashCode() { + return Objects.hash( + currentState, + defaultValue, + isProtected, + logicalSizeBytes, + pendingState, + sourceBranch, + sourceBranchLsn, + sourceBranchTime, + stateChangeTime); + } + + @Override + public String toString() { + return new ToStringer(BranchStatus.class) + .add("currentState", currentState) + .add("defaultValue", defaultValue) + .add("isProtected", isProtected) + .add("logicalSizeBytes", logicalSizeBytes) + .add("pendingState", pendingState) + .add("sourceBranch", sourceBranch) + .add("sourceBranchLsn", sourceBranchLsn) + .add("sourceBranchTime", sourceBranchTime) + .add("stateChangeTime", stateChangeTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatusState.java similarity index 89% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchState.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatusState.java index 0a0cec938..5fa60cdd7 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/BranchStatusState.java @@ -6,7 +6,7 @@ /** The state of the database branch. */ @Generated -public enum BranchState { +public enum BranchStatusState { ARCHIVED, IMPORTING, INIT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java index b8adfb44f..925c3766e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java @@ -5,79 +5,15 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.protobuf.Duration; import com.google.protobuf.Timestamp; import java.util.Objects; @Generated public class Endpoint { - /** The maximum number of Compute Units. */ - @JsonProperty("autoscaling_limit_max_cu") - private Double autoscalingLimitMaxCu; - - /** The minimum number of Compute Units. */ - @JsonProperty("autoscaling_limit_min_cu") - private Double autoscalingLimitMinCu; - /** A timestamp indicating when the compute endpoint was created. */ @JsonProperty("create_time") private Timestamp createTime; - /** */ - @JsonProperty("current_state") - private EndpointState currentState; - - /** - * Whether to restrict connections to the compute endpoint. Enabling this option schedules a - * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or - * console action. - */ - @JsonProperty("disabled") - private Boolean disabled; - - /** The maximum number of Compute Units. */ - @JsonProperty("effective_autoscaling_limit_max_cu") - private Double effectiveAutoscalingLimitMaxCu; - - /** The minimum number of Compute Units. */ - @JsonProperty("effective_autoscaling_limit_min_cu") - private Double effectiveAutoscalingLimitMinCu; - - /** - * Whether to restrict connections to the compute endpoint. Enabling this option schedules a - * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or - * console action. - */ - @JsonProperty("effective_disabled") - private Boolean effectiveDisabled; - - /** */ - @JsonProperty("effective_pooler_mode") - private EndpointPoolerMode effectivePoolerMode; - - /** */ - @JsonProperty("effective_settings") - private EndpointSettings effectiveSettings; - - /** Duration of inactivity after which the compute endpoint is automatically suspended. */ - @JsonProperty("effective_suspend_timeout_duration") - private Duration effectiveSuspendTimeoutDuration; - - /** The endpoint type. There could be only one READ_WRITE endpoint per branch. */ - @JsonProperty("endpoint_type") - private EndpointType endpointType; - - /** - * The hostname of the compute endpoint. This is the hostname specified when connecting to a - * database. - */ - @JsonProperty("host") - private String host; - - /** A timestamp indicating when the compute endpoint was last active. */ - @JsonProperty("last_active_time") - private Timestamp lastActiveTime; - /** * The resource name of the endpoint. Format: * projects/{project_id}/branches/{branch_id}/endpoints/{endpoint_id} @@ -89,29 +25,13 @@ public class Endpoint { @JsonProperty("parent") private String parent; - /** */ - @JsonProperty("pending_state") - private EndpointState pendingState; - - /** */ - @JsonProperty("pooler_mode") - private EndpointPoolerMode poolerMode; - - /** */ - @JsonProperty("settings") - private EndpointSettings settings; + /** The desired state of an Endpoint. */ + @JsonProperty("spec") + private EndpointSpec spec; - /** A timestamp indicating when the compute endpoint was last started. */ - @JsonProperty("start_time") - private Timestamp startTime; - - /** A timestamp indicating when the compute endpoint was last suspended. */ - @JsonProperty("suspend_time") - private Timestamp suspendTime; - - /** Duration of inactivity after which the compute endpoint is automatically suspended. */ - @JsonProperty("suspend_timeout_duration") - private Duration suspendTimeoutDuration; + /** The current status of an Endpoint. */ + @JsonProperty("status") + private EndpointStatus status; /** System generated unique ID for the endpoint. */ @JsonProperty("uid") @@ -121,24 +41,6 @@ public class Endpoint { @JsonProperty("update_time") private Timestamp updateTime; - public Endpoint setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { - this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; - return this; - } - - public Double getAutoscalingLimitMaxCu() { - return autoscalingLimitMaxCu; - } - - public Endpoint setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { - this.autoscalingLimitMinCu = autoscalingLimitMinCu; - return this; - } - - public Double getAutoscalingLimitMinCu() { - return autoscalingLimitMinCu; - } - public Endpoint setCreateTime(Timestamp createTime) { this.createTime = createTime; return this; @@ -148,105 +50,6 @@ public Timestamp getCreateTime() { return createTime; } - public Endpoint setCurrentState(EndpointState currentState) { - this.currentState = currentState; - return this; - } - - public EndpointState getCurrentState() { - return currentState; - } - - public Endpoint setDisabled(Boolean disabled) { - this.disabled = disabled; - return this; - } - - public Boolean getDisabled() { - return disabled; - } - - public Endpoint setEffectiveAutoscalingLimitMaxCu(Double effectiveAutoscalingLimitMaxCu) { - this.effectiveAutoscalingLimitMaxCu = effectiveAutoscalingLimitMaxCu; - return this; - } - - public Double getEffectiveAutoscalingLimitMaxCu() { - return effectiveAutoscalingLimitMaxCu; - } - - public Endpoint setEffectiveAutoscalingLimitMinCu(Double effectiveAutoscalingLimitMinCu) { - this.effectiveAutoscalingLimitMinCu = effectiveAutoscalingLimitMinCu; - return this; - } - - public Double getEffectiveAutoscalingLimitMinCu() { - return effectiveAutoscalingLimitMinCu; - } - - public Endpoint setEffectiveDisabled(Boolean effectiveDisabled) { - this.effectiveDisabled = effectiveDisabled; - return this; - } - - public Boolean getEffectiveDisabled() { - return effectiveDisabled; - } - - public Endpoint setEffectivePoolerMode(EndpointPoolerMode effectivePoolerMode) { - this.effectivePoolerMode = effectivePoolerMode; - return this; - } - - public EndpointPoolerMode getEffectivePoolerMode() { - return effectivePoolerMode; - } - - public Endpoint setEffectiveSettings(EndpointSettings effectiveSettings) { - this.effectiveSettings = effectiveSettings; - return this; - } - - public EndpointSettings getEffectiveSettings() { - return effectiveSettings; - } - - public Endpoint setEffectiveSuspendTimeoutDuration(Duration effectiveSuspendTimeoutDuration) { - this.effectiveSuspendTimeoutDuration = effectiveSuspendTimeoutDuration; - return this; - } - - public Duration getEffectiveSuspendTimeoutDuration() { - return effectiveSuspendTimeoutDuration; - } - - public Endpoint setEndpointType(EndpointType endpointType) { - this.endpointType = endpointType; - return this; - } - - public EndpointType getEndpointType() { - return endpointType; - } - - public Endpoint setHost(String host) { - this.host = host; - return this; - } - - public String getHost() { - return host; - } - - public Endpoint setLastActiveTime(Timestamp lastActiveTime) { - this.lastActiveTime = lastActiveTime; - return this; - } - - public Timestamp getLastActiveTime() { - return lastActiveTime; - } - public Endpoint setName(String name) { this.name = name; return this; @@ -265,58 +68,22 @@ public String getParent() { return parent; } - public Endpoint setPendingState(EndpointState pendingState) { - this.pendingState = pendingState; - return this; - } - - public EndpointState getPendingState() { - return pendingState; - } - - public Endpoint setPoolerMode(EndpointPoolerMode poolerMode) { - this.poolerMode = poolerMode; - return this; - } - - public EndpointPoolerMode getPoolerMode() { - return poolerMode; - } - - public Endpoint setSettings(EndpointSettings settings) { - this.settings = settings; - return this; - } - - public EndpointSettings getSettings() { - return settings; - } - - public Endpoint setStartTime(Timestamp startTime) { - this.startTime = startTime; - return this; - } - - public Timestamp getStartTime() { - return startTime; - } - - public Endpoint setSuspendTime(Timestamp suspendTime) { - this.suspendTime = suspendTime; + public Endpoint setSpec(EndpointSpec spec) { + this.spec = spec; return this; } - public Timestamp getSuspendTime() { - return suspendTime; + public EndpointSpec getSpec() { + return spec; } - public Endpoint setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { - this.suspendTimeoutDuration = suspendTimeoutDuration; + public Endpoint setStatus(EndpointStatus status) { + this.status = status; return this; } - public Duration getSuspendTimeoutDuration() { - return suspendTimeoutDuration; + public EndpointStatus getStatus() { + return status; } public Endpoint setUid(String uid) { @@ -342,86 +109,28 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Endpoint that = (Endpoint) o; - return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) - && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) - && Objects.equals(createTime, that.createTime) - && Objects.equals(currentState, that.currentState) - && Objects.equals(disabled, that.disabled) - && Objects.equals(effectiveAutoscalingLimitMaxCu, that.effectiveAutoscalingLimitMaxCu) - && Objects.equals(effectiveAutoscalingLimitMinCu, that.effectiveAutoscalingLimitMinCu) - && Objects.equals(effectiveDisabled, that.effectiveDisabled) - && Objects.equals(effectivePoolerMode, that.effectivePoolerMode) - && Objects.equals(effectiveSettings, that.effectiveSettings) - && Objects.equals(effectiveSuspendTimeoutDuration, that.effectiveSuspendTimeoutDuration) - && Objects.equals(endpointType, that.endpointType) - && Objects.equals(host, that.host) - && Objects.equals(lastActiveTime, that.lastActiveTime) + return Objects.equals(createTime, that.createTime) && Objects.equals(name, that.name) && Objects.equals(parent, that.parent) - && Objects.equals(pendingState, that.pendingState) - && Objects.equals(poolerMode, that.poolerMode) - && Objects.equals(settings, that.settings) - && Objects.equals(startTime, that.startTime) - && Objects.equals(suspendTime, that.suspendTime) - && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration) + && Objects.equals(spec, that.spec) + && Objects.equals(status, that.status) && Objects.equals(uid, that.uid) && Objects.equals(updateTime, that.updateTime); } @Override public int hashCode() { - return Objects.hash( - autoscalingLimitMaxCu, - autoscalingLimitMinCu, - createTime, - currentState, - disabled, - effectiveAutoscalingLimitMaxCu, - effectiveAutoscalingLimitMinCu, - effectiveDisabled, - effectivePoolerMode, - effectiveSettings, - effectiveSuspendTimeoutDuration, - endpointType, - host, - lastActiveTime, - name, - parent, - pendingState, - poolerMode, - settings, - startTime, - suspendTime, - suspendTimeoutDuration, - uid, - updateTime); + return Objects.hash(createTime, name, parent, spec, status, uid, updateTime); } @Override public String toString() { return new ToStringer(Endpoint.class) - .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) - .add("autoscalingLimitMinCu", autoscalingLimitMinCu) .add("createTime", createTime) - .add("currentState", currentState) - .add("disabled", disabled) - .add("effectiveAutoscalingLimitMaxCu", effectiveAutoscalingLimitMaxCu) - .add("effectiveAutoscalingLimitMinCu", effectiveAutoscalingLimitMinCu) - .add("effectiveDisabled", effectiveDisabled) - .add("effectivePoolerMode", effectivePoolerMode) - .add("effectiveSettings", effectiveSettings) - .add("effectiveSuspendTimeoutDuration", effectiveSuspendTimeoutDuration) - .add("endpointType", endpointType) - .add("host", host) - .add("lastActiveTime", lastActiveTime) .add("name", name) .add("parent", parent) - .add("pendingState", pendingState) - .add("poolerMode", poolerMode) - .add("settings", settings) - .add("startTime", startTime) - .add("suspendTime", suspendTime) - .add("suspendTimeoutDuration", suspendTimeoutDuration) + .add("spec", spec) + .add("status", status) .add("uid", uid) .add("updateTime", updateTime) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java new file mode 100644 index 000000000..74fe3672b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java @@ -0,0 +1,146 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import java.util.Objects; + +@Generated +public class EndpointSpec { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** The endpoint type. A branch can only have one READ_WRITE endpoint. */ + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + /** */ + @JsonProperty("pooler_mode") + private EndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("settings") + private EndpointSettings settings; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + public EndpointSpec setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public EndpointSpec setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public EndpointSpec setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public EndpointSpec setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public EndpointSpec setPoolerMode(EndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public EndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public EndpointSpec setSettings(EndpointSettings settings) { + this.settings = settings; + return this; + } + + public EndpointSettings getSettings() { + return settings; + } + + public EndpointSpec setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointSpec that = (EndpointSpec) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(settings, that.settings) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + disabled, + endpointType, + poolerMode, + settings, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(EndpointSpec.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("disabled", disabled) + .add("endpointType", endpointType) + .add("poolerMode", poolerMode) + .add("settings", settings) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java new file mode 100644 index 000000000..4781f087d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java @@ -0,0 +1,246 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class EndpointStatus { + /** The maximum number of Compute Units. */ + @JsonProperty("autoscaling_limit_max_cu") + private Double autoscalingLimitMaxCu; + + /** The minimum number of Compute Units. */ + @JsonProperty("autoscaling_limit_min_cu") + private Double autoscalingLimitMinCu; + + /** */ + @JsonProperty("current_state") + private EndpointStatusState currentState; + + /** + * Whether to restrict connections to the compute endpoint. Enabling this option schedules a + * suspend compute operation. A disabled compute endpoint cannot be enabled by a connection or + * console action. + */ + @JsonProperty("disabled") + private Boolean disabled; + + /** The endpoint type. A branch can only have one READ_WRITE endpoint. */ + @JsonProperty("endpoint_type") + private EndpointType endpointType; + + /** + * The hostname of the compute endpoint. This is the hostname specified when connecting to a + * database. + */ + @JsonProperty("host") + private String host; + + /** A timestamp indicating when the compute endpoint was last active. */ + @JsonProperty("last_active_time") + private Timestamp lastActiveTime; + + /** */ + @JsonProperty("pending_state") + private EndpointStatusState pendingState; + + /** */ + @JsonProperty("pooler_mode") + private EndpointPoolerMode poolerMode; + + /** */ + @JsonProperty("settings") + private EndpointSettings settings; + + /** A timestamp indicating when the compute endpoint was last started. */ + @JsonProperty("start_time") + private Timestamp startTime; + + /** A timestamp indicating when the compute endpoint was last suspended. */ + @JsonProperty("suspend_time") + private Timestamp suspendTime; + + /** Duration of inactivity after which the compute endpoint is automatically suspended. */ + @JsonProperty("suspend_timeout_duration") + private Duration suspendTimeoutDuration; + + public EndpointStatus setAutoscalingLimitMaxCu(Double autoscalingLimitMaxCu) { + this.autoscalingLimitMaxCu = autoscalingLimitMaxCu; + return this; + } + + public Double getAutoscalingLimitMaxCu() { + return autoscalingLimitMaxCu; + } + + public EndpointStatus setAutoscalingLimitMinCu(Double autoscalingLimitMinCu) { + this.autoscalingLimitMinCu = autoscalingLimitMinCu; + return this; + } + + public Double getAutoscalingLimitMinCu() { + return autoscalingLimitMinCu; + } + + public EndpointStatus setCurrentState(EndpointStatusState currentState) { + this.currentState = currentState; + return this; + } + + public EndpointStatusState getCurrentState() { + return currentState; + } + + public EndpointStatus setDisabled(Boolean disabled) { + this.disabled = disabled; + return this; + } + + public Boolean getDisabled() { + return disabled; + } + + public EndpointStatus setEndpointType(EndpointType endpointType) { + this.endpointType = endpointType; + return this; + } + + public EndpointType getEndpointType() { + return endpointType; + } + + public EndpointStatus setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + public EndpointStatus setLastActiveTime(Timestamp lastActiveTime) { + this.lastActiveTime = lastActiveTime; + return this; + } + + public Timestamp getLastActiveTime() { + return lastActiveTime; + } + + public EndpointStatus setPendingState(EndpointStatusState pendingState) { + this.pendingState = pendingState; + return this; + } + + public EndpointStatusState getPendingState() { + return pendingState; + } + + public EndpointStatus setPoolerMode(EndpointPoolerMode poolerMode) { + this.poolerMode = poolerMode; + return this; + } + + public EndpointPoolerMode getPoolerMode() { + return poolerMode; + } + + public EndpointStatus setSettings(EndpointSettings settings) { + this.settings = settings; + return this; + } + + public EndpointSettings getSettings() { + return settings; + } + + public EndpointStatus setStartTime(Timestamp startTime) { + this.startTime = startTime; + return this; + } + + public Timestamp getStartTime() { + return startTime; + } + + public EndpointStatus setSuspendTime(Timestamp suspendTime) { + this.suspendTime = suspendTime; + return this; + } + + public Timestamp getSuspendTime() { + return suspendTime; + } + + public EndpointStatus setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { + this.suspendTimeoutDuration = suspendTimeoutDuration; + return this; + } + + public Duration getSuspendTimeoutDuration() { + return suspendTimeoutDuration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointStatus that = (EndpointStatus) o; + return Objects.equals(autoscalingLimitMaxCu, that.autoscalingLimitMaxCu) + && Objects.equals(autoscalingLimitMinCu, that.autoscalingLimitMinCu) + && Objects.equals(currentState, that.currentState) + && Objects.equals(disabled, that.disabled) + && Objects.equals(endpointType, that.endpointType) + && Objects.equals(host, that.host) + && Objects.equals(lastActiveTime, that.lastActiveTime) + && Objects.equals(pendingState, that.pendingState) + && Objects.equals(poolerMode, that.poolerMode) + && Objects.equals(settings, that.settings) + && Objects.equals(startTime, that.startTime) + && Objects.equals(suspendTime, that.suspendTime) + && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); + } + + @Override + public int hashCode() { + return Objects.hash( + autoscalingLimitMaxCu, + autoscalingLimitMinCu, + currentState, + disabled, + endpointType, + host, + lastActiveTime, + pendingState, + poolerMode, + settings, + startTime, + suspendTime, + suspendTimeoutDuration); + } + + @Override + public String toString() { + return new ToStringer(EndpointStatus.class) + .add("autoscalingLimitMaxCu", autoscalingLimitMaxCu) + .add("autoscalingLimitMinCu", autoscalingLimitMinCu) + .add("currentState", currentState) + .add("disabled", disabled) + .add("endpointType", endpointType) + .add("host", host) + .add("lastActiveTime", lastActiveTime) + .add("pendingState", pendingState) + .add("poolerMode", poolerMode) + .add("settings", settings) + .add("startTime", startTime) + .add("suspendTime", suspendTime) + .add("suspendTimeoutDuration", suspendTimeoutDuration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java similarity index 88% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java index b0d141670..57714e82b 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatusState.java @@ -6,7 +6,7 @@ /** The state of the compute endpoint. */ @Generated -public enum EndpointState { +public enum EndpointStatusState { ACTIVE, IDLE, INIT, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java index 0f7827672..034c68aea 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Project.java @@ -5,77 +5,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.protobuf.Duration; import com.google.protobuf.Timestamp; import java.util.Objects; @Generated public class Project { - /** The logical size limit for a branch. */ - @JsonProperty("branch_logical_size_limit_bytes") - private Long branchLogicalSizeLimitBytes; - - /** The most recent time when any endpoint of this project was active. */ - @JsonProperty("compute_last_active_time") - private Timestamp computeLastActiveTime; - /** A timestamp indicating when the project was created. */ @JsonProperty("create_time") private Timestamp createTime; - /** */ - @JsonProperty("default_endpoint_settings") - private ProjectDefaultEndpointSettings defaultEndpointSettings; - - /** Human-readable project name. */ - @JsonProperty("display_name") - private String displayName; - - /** */ - @JsonProperty("effective_default_endpoint_settings") - private ProjectDefaultEndpointSettings effectiveDefaultEndpointSettings; - - /** */ - @JsonProperty("effective_display_name") - private String effectiveDisplayName; - - /** */ - @JsonProperty("effective_history_retention_duration") - private Duration effectiveHistoryRetentionDuration; - - /** */ - @JsonProperty("effective_pg_version") - private Long effectivePgVersion; - - /** */ - @JsonProperty("effective_settings") - private ProjectSettings effectiveSettings; - - /** - * The number of seconds to retain the shared history for point in time recovery for all branches - * in this project. - */ - @JsonProperty("history_retention_duration") - private Duration historyRetentionDuration; - /** The resource name of the project. Format: projects/{project_id} */ @JsonProperty("name") private String name; - /** The major Postgres version number. */ - @JsonProperty("pg_version") - private Long pgVersion; - - /** */ - @JsonProperty("settings") - private ProjectSettings settings; + /** The desired state of a Project. */ + @JsonProperty("spec") + private ProjectSpec spec; - /** - * The current space occupied by the project in storage. Synthetic storage size combines the - * logical data size and Write-Ahead Log (WAL) size for all branches in a project. - */ - @JsonProperty("synthetic_storage_size_bytes") - private Long syntheticStorageSizeBytes; + /** The current status of a Project. */ + @JsonProperty("status") + private ProjectStatus status; /** System generated unique ID for the project. */ @JsonProperty("uid") @@ -85,24 +34,6 @@ public class Project { @JsonProperty("update_time") private Timestamp updateTime; - public Project setBranchLogicalSizeLimitBytes(Long branchLogicalSizeLimitBytes) { - this.branchLogicalSizeLimitBytes = branchLogicalSizeLimitBytes; - return this; - } - - public Long getBranchLogicalSizeLimitBytes() { - return branchLogicalSizeLimitBytes; - } - - public Project setComputeLastActiveTime(Timestamp computeLastActiveTime) { - this.computeLastActiveTime = computeLastActiveTime; - return this; - } - - public Timestamp getComputeLastActiveTime() { - return computeLastActiveTime; - } - public Project setCreateTime(Timestamp createTime) { this.createTime = createTime; return this; @@ -112,80 +43,6 @@ public Timestamp getCreateTime() { return createTime; } - public Project setDefaultEndpointSettings( - ProjectDefaultEndpointSettings defaultEndpointSettings) { - this.defaultEndpointSettings = defaultEndpointSettings; - return this; - } - - public ProjectDefaultEndpointSettings getDefaultEndpointSettings() { - return defaultEndpointSettings; - } - - public Project setDisplayName(String displayName) { - this.displayName = displayName; - return this; - } - - public String getDisplayName() { - return displayName; - } - - public Project setEffectiveDefaultEndpointSettings( - ProjectDefaultEndpointSettings effectiveDefaultEndpointSettings) { - this.effectiveDefaultEndpointSettings = effectiveDefaultEndpointSettings; - return this; - } - - public ProjectDefaultEndpointSettings getEffectiveDefaultEndpointSettings() { - return effectiveDefaultEndpointSettings; - } - - public Project setEffectiveDisplayName(String effectiveDisplayName) { - this.effectiveDisplayName = effectiveDisplayName; - return this; - } - - public String getEffectiveDisplayName() { - return effectiveDisplayName; - } - - public Project setEffectiveHistoryRetentionDuration(Duration effectiveHistoryRetentionDuration) { - this.effectiveHistoryRetentionDuration = effectiveHistoryRetentionDuration; - return this; - } - - public Duration getEffectiveHistoryRetentionDuration() { - return effectiveHistoryRetentionDuration; - } - - public Project setEffectivePgVersion(Long effectivePgVersion) { - this.effectivePgVersion = effectivePgVersion; - return this; - } - - public Long getEffectivePgVersion() { - return effectivePgVersion; - } - - public Project setEffectiveSettings(ProjectSettings effectiveSettings) { - this.effectiveSettings = effectiveSettings; - return this; - } - - public ProjectSettings getEffectiveSettings() { - return effectiveSettings; - } - - public Project setHistoryRetentionDuration(Duration historyRetentionDuration) { - this.historyRetentionDuration = historyRetentionDuration; - return this; - } - - public Duration getHistoryRetentionDuration() { - return historyRetentionDuration; - } - public Project setName(String name) { this.name = name; return this; @@ -195,31 +52,22 @@ public String getName() { return name; } - public Project setPgVersion(Long pgVersion) { - this.pgVersion = pgVersion; - return this; - } - - public Long getPgVersion() { - return pgVersion; - } - - public Project setSettings(ProjectSettings settings) { - this.settings = settings; + public Project setSpec(ProjectSpec spec) { + this.spec = spec; return this; } - public ProjectSettings getSettings() { - return settings; + public ProjectSpec getSpec() { + return spec; } - public Project setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) { - this.syntheticStorageSizeBytes = syntheticStorageSizeBytes; + public Project setStatus(ProjectStatus status) { + this.status = status; return this; } - public Long getSyntheticStorageSizeBytes() { - return syntheticStorageSizeBytes; + public ProjectStatus getStatus() { + return status; } public Project setUid(String uid) { @@ -245,65 +93,26 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Project that = (Project) o; - return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes) - && Objects.equals(computeLastActiveTime, that.computeLastActiveTime) - && Objects.equals(createTime, that.createTime) - && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) - && Objects.equals(displayName, that.displayName) - && Objects.equals(effectiveDefaultEndpointSettings, that.effectiveDefaultEndpointSettings) - && Objects.equals(effectiveDisplayName, that.effectiveDisplayName) - && Objects.equals(effectiveHistoryRetentionDuration, that.effectiveHistoryRetentionDuration) - && Objects.equals(effectivePgVersion, that.effectivePgVersion) - && Objects.equals(effectiveSettings, that.effectiveSettings) - && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) + return Objects.equals(createTime, that.createTime) && Objects.equals(name, that.name) - && Objects.equals(pgVersion, that.pgVersion) - && Objects.equals(settings, that.settings) - && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes) + && Objects.equals(spec, that.spec) + && Objects.equals(status, that.status) && Objects.equals(uid, that.uid) && Objects.equals(updateTime, that.updateTime); } @Override public int hashCode() { - return Objects.hash( - branchLogicalSizeLimitBytes, - computeLastActiveTime, - createTime, - defaultEndpointSettings, - displayName, - effectiveDefaultEndpointSettings, - effectiveDisplayName, - effectiveHistoryRetentionDuration, - effectivePgVersion, - effectiveSettings, - historyRetentionDuration, - name, - pgVersion, - settings, - syntheticStorageSizeBytes, - uid, - updateTime); + return Objects.hash(createTime, name, spec, status, uid, updateTime); } @Override public String toString() { return new ToStringer(Project.class) - .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes) - .add("computeLastActiveTime", computeLastActiveTime) .add("createTime", createTime) - .add("defaultEndpointSettings", defaultEndpointSettings) - .add("displayName", displayName) - .add("effectiveDefaultEndpointSettings", effectiveDefaultEndpointSettings) - .add("effectiveDisplayName", effectiveDisplayName) - .add("effectiveHistoryRetentionDuration", effectiveHistoryRetentionDuration) - .add("effectivePgVersion", effectivePgVersion) - .add("effectiveSettings", effectiveSettings) - .add("historyRetentionDuration", historyRetentionDuration) .add("name", name) - .add("pgVersion", pgVersion) - .add("settings", settings) - .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes) + .add("spec", spec) + .add("status", status) .add("uid", uid) .add("updateTime", updateTime) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java new file mode 100644 index 000000000..d823fbc78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectSpec.java @@ -0,0 +1,110 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import java.util.Objects; + +@Generated +public class ProjectSpec { + /** */ + @JsonProperty("default_endpoint_settings") + private ProjectDefaultEndpointSettings defaultEndpointSettings; + + /** Human-readable project name. */ + @JsonProperty("display_name") + private String displayName; + + /** + * The number of seconds to retain the shared history for point in time recovery for all branches + * in this project. + */ + @JsonProperty("history_retention_duration") + private Duration historyRetentionDuration; + + /** The major Postgres version number. */ + @JsonProperty("pg_version") + private Long pgVersion; + + /** */ + @JsonProperty("settings") + private ProjectSettings settings; + + public ProjectSpec setDefaultEndpointSettings( + ProjectDefaultEndpointSettings defaultEndpointSettings) { + this.defaultEndpointSettings = defaultEndpointSettings; + return this; + } + + public ProjectDefaultEndpointSettings getDefaultEndpointSettings() { + return defaultEndpointSettings; + } + + public ProjectSpec setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ProjectSpec setHistoryRetentionDuration(Duration historyRetentionDuration) { + this.historyRetentionDuration = historyRetentionDuration; + return this; + } + + public Duration getHistoryRetentionDuration() { + return historyRetentionDuration; + } + + public ProjectSpec setPgVersion(Long pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public Long getPgVersion() { + return pgVersion; + } + + public ProjectSpec setSettings(ProjectSettings settings) { + this.settings = settings; + return this; + } + + public ProjectSettings getSettings() { + return settings; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProjectSpec that = (ProjectSpec) o; + return Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) + && Objects.equals(displayName, that.displayName) + && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(settings, that.settings); + } + + @Override + public int hashCode() { + return Objects.hash( + defaultEndpointSettings, displayName, historyRetentionDuration, pgVersion, settings); + } + + @Override + public String toString() { + return new ToStringer(ProjectSpec.class) + .add("defaultEndpointSettings", defaultEndpointSettings) + .add("displayName", displayName) + .add("historyRetentionDuration", historyRetentionDuration) + .add("pgVersion", pgVersion) + .add("settings", settings) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java new file mode 100644 index 000000000..3c6eb9012 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java @@ -0,0 +1,160 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +@Generated +public class ProjectStatus { + /** The logical size limit for a branch. */ + @JsonProperty("branch_logical_size_limit_bytes") + private Long branchLogicalSizeLimitBytes; + + /** The most recent time when any endpoint of this project was active. */ + @JsonProperty("compute_last_active_time") + private Timestamp computeLastActiveTime; + + /** The effective default endpoint settings. */ + @JsonProperty("default_endpoint_settings") + private ProjectDefaultEndpointSettings defaultEndpointSettings; + + /** The effective human-readable project name. */ + @JsonProperty("display_name") + private String displayName; + + /** The effective number of seconds to retain the shared history for point in time recovery. */ + @JsonProperty("history_retention_duration") + private Duration historyRetentionDuration; + + /** The effective major Postgres version number. */ + @JsonProperty("pg_version") + private Long pgVersion; + + /** The effective project settings. */ + @JsonProperty("settings") + private ProjectSettings settings; + + /** The current space occupied by the project in storage. */ + @JsonProperty("synthetic_storage_size_bytes") + private Long syntheticStorageSizeBytes; + + public ProjectStatus setBranchLogicalSizeLimitBytes(Long branchLogicalSizeLimitBytes) { + this.branchLogicalSizeLimitBytes = branchLogicalSizeLimitBytes; + return this; + } + + public Long getBranchLogicalSizeLimitBytes() { + return branchLogicalSizeLimitBytes; + } + + public ProjectStatus setComputeLastActiveTime(Timestamp computeLastActiveTime) { + this.computeLastActiveTime = computeLastActiveTime; + return this; + } + + public Timestamp getComputeLastActiveTime() { + return computeLastActiveTime; + } + + public ProjectStatus setDefaultEndpointSettings( + ProjectDefaultEndpointSettings defaultEndpointSettings) { + this.defaultEndpointSettings = defaultEndpointSettings; + return this; + } + + public ProjectDefaultEndpointSettings getDefaultEndpointSettings() { + return defaultEndpointSettings; + } + + public ProjectStatus setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public ProjectStatus setHistoryRetentionDuration(Duration historyRetentionDuration) { + this.historyRetentionDuration = historyRetentionDuration; + return this; + } + + public Duration getHistoryRetentionDuration() { + return historyRetentionDuration; + } + + public ProjectStatus setPgVersion(Long pgVersion) { + this.pgVersion = pgVersion; + return this; + } + + public Long getPgVersion() { + return pgVersion; + } + + public ProjectStatus setSettings(ProjectSettings settings) { + this.settings = settings; + return this; + } + + public ProjectSettings getSettings() { + return settings; + } + + public ProjectStatus setSyntheticStorageSizeBytes(Long syntheticStorageSizeBytes) { + this.syntheticStorageSizeBytes = syntheticStorageSizeBytes; + return this; + } + + public Long getSyntheticStorageSizeBytes() { + return syntheticStorageSizeBytes; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProjectStatus that = (ProjectStatus) o; + return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes) + && Objects.equals(computeLastActiveTime, that.computeLastActiveTime) + && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) + && Objects.equals(displayName, that.displayName) + && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) + && Objects.equals(pgVersion, that.pgVersion) + && Objects.equals(settings, that.settings) + && Objects.equals(syntheticStorageSizeBytes, that.syntheticStorageSizeBytes); + } + + @Override + public int hashCode() { + return Objects.hash( + branchLogicalSizeLimitBytes, + computeLastActiveTime, + defaultEndpointSettings, + displayName, + historyRetentionDuration, + pgVersion, + settings, + syntheticStorageSizeBytes); + } + + @Override + public String toString() { + return new ToStringer(ProjectStatus.class) + .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes) + .add("computeLastActiveTime", computeLastActiveTime) + .add("defaultEndpointSettings", defaultEndpointSettings) + .add("displayName", displayName) + .add("historyRetentionDuration", historyRetentionDuration) + .add("pgVersion", pgVersion) + .add("settings", settings) + .add("syntheticStorageSizeBytes", syntheticStorageSizeBytes) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java old mode 100755 new mode 100644 index 5b14da636..c3d49125e --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java @@ -5,10 +5,15 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class AnomalyDetectionConfig { + /** List of fully qualified table names to exclude from anomaly detection. */ + @JsonProperty("excluded_table_full_names") + private Collection excludedTableFullNames; + /** Run id of the last run of the workflow */ @JsonProperty("last_run_id") private String lastRunId; @@ -17,6 +22,16 @@ public class AnomalyDetectionConfig { @JsonProperty("latest_run_status") private AnomalyDetectionRunStatus latestRunStatus; + public AnomalyDetectionConfig setExcludedTableFullNames( + Collection excludedTableFullNames) { + this.excludedTableFullNames = excludedTableFullNames; + return this; + } + + public Collection getExcludedTableFullNames() { + return excludedTableFullNames; + } + public AnomalyDetectionConfig setLastRunId(String lastRunId) { this.lastRunId = lastRunId; return this; @@ -40,18 +55,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; - return Objects.equals(lastRunId, that.lastRunId) + return Objects.equals(excludedTableFullNames, that.excludedTableFullNames) + && Objects.equals(lastRunId, that.lastRunId) && Objects.equals(latestRunStatus, that.latestRunStatus); } @Override public int hashCode() { - return Objects.hash(lastRunId, latestRunStatus); + return Objects.hash(excludedTableFullNames, lastRunId, latestRunStatus); } @Override public String toString() { return new ToStringer(AnomalyDetectionConfig.class) + .add("excludedTableFullNames", excludedTableFullNames) .add("lastRunId", lastRunId) .add("latestRunStatus", latestRunStatus) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java old mode 100755 new mode 100644 index f6e090ef5..071f11697 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -58,10 +58,29 @@ public class SharedDataObject { private Collection partitions; /** - * A user-provided new name for the data object within the share. If this new - * name is not provided, the object's original name will be used as the - * `shared_as` name. The `shared_as` name must be unique within a share. For - * tables, the new name must follow the format of `.`. + * A user-provided alias name for table-like data objects within the share. + * + * Use this field for table-like objects (for example: TABLE, VIEW, + * MATERIALIZED_VIEW, STREAMING_TABLE, FOREIGN_TABLE). For non-table objects + * (for example: VOLUME, MODEL, NOTEBOOK_FILE, FUNCTION), use + * `string_shared_as` instead. + * + * Important: For non-table objects, this field must be omitted entirely. + * + * Format: Must be a 2-part name `.` (e.g., + * "sales_schema.orders_table") - Both schema and table names must contain + * only alphanumeric characters and underscores - No periods, spaces, forward + * slashes, or control characters are allowed within each part - Do not + * include the catalog name (use 2 parts, not 3) + * + * Behavior: - If not provided, the service automatically generates the alias + * as `.
` from the object's original name - If you don't want + * to specify this field, omit it entirely from the request (do not pass an + * empty string) - The `shared_as` name must be unique within the share + * + * Examples: - Valid: "analytics_schema.customer_view" - Invalid: + * "catalog.analytics_schema.customer_view" (3 parts not allowed) - Invalid: + * "analytics-schema.customer-view" (hyphens not allowed) */ @JsonProperty("shared_as") private String sharedAs; @@ -82,10 +101,30 @@ public class SharedDataObject { private SharedDataObjectStatus status; /** - * A user-provided new name for the shared object within the share. If this new name is not not - * provided, the object's original name will be used as the `string_shared_as` name. The - * `string_shared_as` name must be unique for objects of the same type within a Share. For - * notebooks, the new name should be the new notebook file name. + * A user-provided alias name for non-table data objects within the share. + * + *

Use this field for non-table objects (for example: VOLUME, MODEL, NOTEBOOK_FILE, FUNCTION). + * For table-like objects (for example: TABLE, VIEW, MATERIALIZED_VIEW, STREAMING_TABLE, + * FOREIGN_TABLE), use `shared_as` instead. + * + *

Important: For table-like objects, this field must be omitted entirely. + * + *

Format: - For VOLUME: Must be a 2-part name `.` (e.g., + * "data_schema.ml_models") - For FUNCTION: Must be a 2-part name `.` + * (e.g., "udf_schema.calculate_tax") - For MODEL: Must be a 2-part name + * `.` (e.g., "models.prediction_model") - For NOTEBOOK_FILE: Should be + * the notebook file name (e.g., "analysis_notebook.py") - All names must contain only + * alphanumeric characters and underscores - No periods, spaces, forward slashes, or control + * characters are allowed within each part + * + *

Behavior: - If not provided, the service automatically generates the alias from the object's + * original name - If you don't want to specify this field, omit it entirely from the request (do + * not pass an empty string) - The `string_shared_as` name must be unique for objects of the same + * type within the share + * + *

Examples: - Valid for VOLUME: "data_schema.training_data" - Valid for FUNCTION: + * "analytics.calculate_revenue" - Invalid: "catalog.data_schema.training_data" (3 parts not + * allowed for volumes) - Invalid: "data-schema.training-data" (hyphens not allowed) */ @JsonProperty("string_shared_as") private String stringSharedAs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java old mode 100755 new mode 100644 index 98d3df913..62727e197 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java @@ -53,7 +53,14 @@ public class QueryVectorIndexRequest { @JsonProperty("query_vector") private Collection queryVector; - /** */ + /** + * If set, the top 50 results are reranked with the Databricks Reranker model before returning the + * `num_results` results to the user. The setting `columns_to_rerank` selects which columns are + * used for reranking. For each datapoint, the columns selected are concatenated before being sent + * to the reranking model. See + * https://docs.databricks.com/aws/en/vector-search/query-vector-search#rerank for more + * information. + */ @JsonProperty("reranker") private RerankerConfig reranker;