From 1a0b28f158b60c396fa7548ca23a90bf86da928f Mon Sep 17 00:00:00 2001 From: "databricks-ci-ghec-1[bot]" <184311507+databricks-ci-ghec-1[bot]@users.noreply.github.com> Date: Fri, 16 Jan 2026 09:44:44 +0000 Subject: [PATCH] Update SDK to d6ecfb0633332a524f52f6ab319b073dd3f7493e --- .codegen/_openapi_sha | 2 +- .gitattributes | 3 + NEXT_CHANGELOG.md | 12 +++ .../pipelines/AutoFullRefreshPolicy.java | 64 ++++++++++++++ .../IngestionPipelineDefinition.java | 16 ++++ .../pipelines/OperationTimeWindow.java | 83 +++++++++++++++++++ .../pipelines/TableSpecificConfig.java | 24 +++++- .../sdk/service/postgres/EndpointHosts.java | 47 +++++++++++ .../sdk/service/postgres/EndpointStatus.java | 72 +++------------- .../sdk/service/postgres/EndpointType.java | 4 +- .../sdk/service/postgres/ProjectStatus.java | 17 ---- .../sdk/service/sharing/RecipientsAPI.java | 10 +-- .../service/sharing/RecipientsService.java | 10 +-- .../vectorsearch/EndpointStatusState.java | 1 + 14 files changed, 272 insertions(+), 93 deletions(-) create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java mode change 100755 => 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java create mode 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java mode change 100755 => 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java mode change 100755 => 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java mode change 100755 => 100644 databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 932e3cdd2..a2eb0f1e1 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -5757e4a5f208a1f416f8f94b00febb3118fdb940 \ No newline at end of file +d6ecfb0633332a524f52f6ab319b073dd3f7493e \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index daa5607e0..a77167576 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1980,6 +1980,7 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true @@ -2021,6 +2022,7 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/MaturityLevel.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true @@ -2095,6 +2097,7 @@ /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java linguist-generated=true +/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true /home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index e3d9cda43..1bee575b5 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,15 @@ ### Internal Changes ### API Changes +* Add `fullRefreshWindow` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`. +* Add `autoFullRefreshPolicy` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`. +* Add `hosts` field for `com.databricks.sdk.service.postgres.EndpointStatus`. +* Add `ENDPOINT_TYPE_READ_WRITE` and `ENDPOINT_TYPE_READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`. +* Add `DELETED` enum value for `com.databricks.sdk.service.vectorsearch.EndpointStatusState`. +* [Breaking] Change `createBranch()`, `createEndpoint()` and `createProject()` methods for `workspaceClient.postgres()` service with new required argument order. +* Change `branchId` field for `com.databricks.sdk.service.postgres.CreateBranchRequest` to no longer be required. +* Change `endpointId` field for `com.databricks.sdk.service.postgres.CreateEndpointRequest` to no longer be required. +* Change `projectId` field for `com.databricks.sdk.service.postgres.CreateProjectRequest` to no longer be required. +* [Breaking] Remove `host`, `lastActiveTime`, `startTime` and `suspendTime` fields for `com.databricks.sdk.service.postgres.EndpointStatus`. +* [Breaking] Remove `computeLastActiveTime` field for `com.databricks.sdk.service.postgres.ProjectStatus`. +* [Breaking] Remove `READ_WRITE` and `READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java new file mode 100644 index 000000000..4df67193a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Policy for auto full refresh. */ +@Generated +public class AutoFullRefreshPolicy { + /** (Required, Mutable) Whether to enable auto full refresh or not. */ + @JsonProperty("enabled") + private Boolean enabled; + + /** + * (Optional, Mutable) Specify the minimum interval in hours between the timestamp at which a + * table was last full refreshed and the current timestamp for triggering auto full If unspecified + * and autoFullRefresh is enabled then by default min_interval_hours is 24 hours. + */ + @JsonProperty("min_interval_hours") + private Long minIntervalHours; + + public AutoFullRefreshPolicy setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public AutoFullRefreshPolicy setMinIntervalHours(Long minIntervalHours) { + this.minIntervalHours = minIntervalHours; + return this; + } + + public Long getMinIntervalHours() { + return minIntervalHours; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFullRefreshPolicy that = (AutoFullRefreshPolicy) o; + return Objects.equals(enabled, that.enabled) + && Objects.equals(minIntervalHours, that.minIntervalHours); + } + + @Override + public int hashCode() { + return Objects.hash(enabled, minIntervalHours); + } + + @Override + public String toString() { + return new ToStringer(AutoFullRefreshPolicy.class) + .add("enabled", enabled) + .add("minIntervalHours", minIntervalHours) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java old mode 100755 new mode 100644 index 1e96af349..90077192e --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -17,6 +17,10 @@ public class IngestionPipelineDefinition { @JsonProperty("connection_name") private String connectionName; + /** (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. */ + @JsonProperty("full_refresh_window") + private OperationTimeWindow fullRefreshWindow; + /** * Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs * directly without the need to specify a UC connection or ingestion gateway. The `source_catalog` @@ -73,6 +77,15 @@ public String getConnectionName() { return connectionName; } + public IngestionPipelineDefinition setFullRefreshWindow(OperationTimeWindow fullRefreshWindow) { + this.fullRefreshWindow = fullRefreshWindow; + return this; + } + + public OperationTimeWindow getFullRefreshWindow() { + return fullRefreshWindow; + } + public IngestionPipelineDefinition setIngestFromUcForeignCatalog( Boolean ingestFromUcForeignCatalog) { this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog; @@ -144,6 +157,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; IngestionPipelineDefinition that = (IngestionPipelineDefinition) o; return Objects.equals(connectionName, that.connectionName) + && Objects.equals(fullRefreshWindow, that.fullRefreshWindow) && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog) && Objects.equals(ingestionGatewayId, that.ingestionGatewayId) && Objects.equals(netsuiteJarPath, that.netsuiteJarPath) @@ -157,6 +171,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( connectionName, + fullRefreshWindow, ingestFromUcForeignCatalog, ingestionGatewayId, netsuiteJarPath, @@ -170,6 +185,7 @@ public int hashCode() { public String toString() { return new ToStringer(IngestionPipelineDefinition.class) .add("connectionName", connectionName) + .add("fullRefreshWindow", fullRefreshWindow) .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog) .add("ingestionGatewayId", ingestionGatewayId) .add("netsuiteJarPath", netsuiteJarPath) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java new file mode 100644 index 000000000..1977f2078 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java @@ -0,0 +1,83 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Proto representing a window */ +@Generated +public class OperationTimeWindow { + /** + * Days of week in which the window is allowed to happen If not specified all days of the week + * will be used. + */ + @JsonProperty("days_of_week") + private Collection daysOfWeek; + + /** An integer between 0 and 23 denoting the start hour for the window in the 24-hour day. */ + @JsonProperty("start_hour") + private Long startHour; + + /** + * Time zone id of window. See + * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html + * for details. If not specified, UTC will be used. + */ + @JsonProperty("time_zone_id") + private String timeZoneId; + + public OperationTimeWindow setDaysOfWeek(Collection daysOfWeek) { + this.daysOfWeek = daysOfWeek; + return this; + } + + public Collection getDaysOfWeek() { + return daysOfWeek; + } + + public OperationTimeWindow setStartHour(Long startHour) { + this.startHour = startHour; + return this; + } + + public Long getStartHour() { + return startHour; + } + + public OperationTimeWindow setTimeZoneId(String timeZoneId) { + this.timeZoneId = timeZoneId; + return this; + } + + public String getTimeZoneId() { + return timeZoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OperationTimeWindow that = (OperationTimeWindow) o; + return Objects.equals(daysOfWeek, that.daysOfWeek) + && Objects.equals(startHour, that.startHour) + && Objects.equals(timeZoneId, that.timeZoneId); + } + + @Override + public int hashCode() { + return Objects.hash(daysOfWeek, startHour, timeZoneId); + } + + @Override + public String toString() { + return new ToStringer(OperationTimeWindow.class) + .add("daysOfWeek", daysOfWeek) + .add("startHour", startHour) + .add("timeZoneId", timeZoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java index 5a1f380e3..a063aa86c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -10,6 +10,16 @@ @Generated public class TableSpecificConfig { + /** + * (Optional, Mutable) Policy for auto full refresh, if enabled pipeline will automatically try to + * fix issues by doing a full refresh on the table in the retry run. auto_full_refresh_policy in + * table configuration will override the above level auto_full_refresh_policy. For example, { + * "auto_full_refresh_policy": { "enabled": true, "min_interval_hours": 23, } } If unspecified, + * auto full refresh is disabled. + */ + @JsonProperty("auto_full_refresh_policy") + private AutoFullRefreshPolicy autoFullRefreshPolicy; + /** * A list of column names to be excluded for the ingestion. When not specified, include_columns * fully controls what columns to be ingested. When specified, all other columns including future @@ -66,6 +76,15 @@ public class TableSpecificConfig { @JsonProperty("workday_report_parameters") private IngestionPipelineDefinitionWorkdayReportParameters workdayReportParameters; + public TableSpecificConfig setAutoFullRefreshPolicy(AutoFullRefreshPolicy autoFullRefreshPolicy) { + this.autoFullRefreshPolicy = autoFullRefreshPolicy; + return this; + } + + public AutoFullRefreshPolicy getAutoFullRefreshPolicy() { + return autoFullRefreshPolicy; + } + public TableSpecificConfig setExcludeColumns(Collection excludeColumns) { this.excludeColumns = excludeColumns; return this; @@ -157,7 +176,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TableSpecificConfig that = (TableSpecificConfig) o; - return Objects.equals(excludeColumns, that.excludeColumns) + return Objects.equals(autoFullRefreshPolicy, that.autoFullRefreshPolicy) + && Objects.equals(excludeColumns, that.excludeColumns) && Objects.equals(includeColumns, that.includeColumns) && Objects.equals(primaryKeys, that.primaryKeys) && Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig) @@ -171,6 +191,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + autoFullRefreshPolicy, excludeColumns, includeColumns, primaryKeys, @@ -185,6 +206,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(TableSpecificConfig.class) + .add("autoFullRefreshPolicy", autoFullRefreshPolicy) .add("excludeColumns", excludeColumns) .add("includeColumns", includeColumns) .add("primaryKeys", primaryKeys) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java new file mode 100644 index 000000000..3c859f22e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.postgres; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Encapsulates various hostnames (r/w or r/o, pooled or not) for an endpoint. */ +@Generated +public class EndpointHosts { + /** + * The hostname to connect to this endpoint. For read-write endpoints, this is a read-write + * hostname which connects to the primary compute. For read-only endpoints, this is a read-only + * hostname which allows read-only operations. + */ + @JsonProperty("host") + private String host; + + public EndpointHosts setHost(String host) { + this.host = host; + return this; + } + + public String getHost() { + return host; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointHosts that = (EndpointHosts) o; + return Objects.equals(host, that.host); + } + + @Override + public int hashCode() { + return Objects.hash(host); + } + + @Override + public String toString() { + return new ToStringer(EndpointHosts.class).add("host", host).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java index 53654a5cc..d989aeac1 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointStatus.java @@ -6,7 +6,6 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.protobuf.Duration; -import com.google.protobuf.Timestamp; import java.util.Objects; @Generated @@ -35,16 +34,9 @@ public class EndpointStatus { @JsonProperty("endpoint_type") private EndpointType endpointType; - /** - * The hostname of the compute endpoint. This is the hostname specified when connecting to a - * database. - */ - @JsonProperty("host") - private String host; - - /** A timestamp indicating when the compute endpoint was last active. */ - @JsonProperty("last_active_time") - private Timestamp lastActiveTime; + /** Contains host information for connecting to the endpoint. */ + @JsonProperty("hosts") + private EndpointHosts hosts; /** */ @JsonProperty("pending_state") @@ -54,14 +46,6 @@ public class EndpointStatus { @JsonProperty("settings") private EndpointSettings settings; - /** A timestamp indicating when the compute endpoint was last started. */ - @JsonProperty("start_time") - private Timestamp startTime; - - /** A timestamp indicating when the compute endpoint was last suspended. */ - @JsonProperty("suspend_time") - private Timestamp suspendTime; - /** Duration of inactivity after which the compute endpoint is automatically suspended. */ @JsonProperty("suspend_timeout_duration") private Duration suspendTimeoutDuration; @@ -111,22 +95,13 @@ public EndpointType getEndpointType() { return endpointType; } - public EndpointStatus setHost(String host) { - this.host = host; - return this; - } - - public String getHost() { - return host; - } - - public EndpointStatus setLastActiveTime(Timestamp lastActiveTime) { - this.lastActiveTime = lastActiveTime; + public EndpointStatus setHosts(EndpointHosts hosts) { + this.hosts = hosts; return this; } - public Timestamp getLastActiveTime() { - return lastActiveTime; + public EndpointHosts getHosts() { + return hosts; } public EndpointStatus setPendingState(EndpointStatusState pendingState) { @@ -147,24 +122,6 @@ public EndpointSettings getSettings() { return settings; } - public EndpointStatus setStartTime(Timestamp startTime) { - this.startTime = startTime; - return this; - } - - public Timestamp getStartTime() { - return startTime; - } - - public EndpointStatus setSuspendTime(Timestamp suspendTime) { - this.suspendTime = suspendTime; - return this; - } - - public Timestamp getSuspendTime() { - return suspendTime; - } - public EndpointStatus setSuspendTimeoutDuration(Duration suspendTimeoutDuration) { this.suspendTimeoutDuration = suspendTimeoutDuration; return this; @@ -184,12 +141,9 @@ public boolean equals(Object o) { && Objects.equals(currentState, that.currentState) && Objects.equals(disabled, that.disabled) && Objects.equals(endpointType, that.endpointType) - && Objects.equals(host, that.host) - && Objects.equals(lastActiveTime, that.lastActiveTime) + && Objects.equals(hosts, that.hosts) && Objects.equals(pendingState, that.pendingState) && Objects.equals(settings, that.settings) - && Objects.equals(startTime, that.startTime) - && Objects.equals(suspendTime, that.suspendTime) && Objects.equals(suspendTimeoutDuration, that.suspendTimeoutDuration); } @@ -201,12 +155,9 @@ public int hashCode() { currentState, disabled, endpointType, - host, - lastActiveTime, + hosts, pendingState, settings, - startTime, - suspendTime, suspendTimeoutDuration); } @@ -218,12 +169,9 @@ public String toString() { .add("currentState", currentState) .add("disabled", disabled) .add("endpointType", endpointType) - .add("host", host) - .add("lastActiveTime", lastActiveTime) + .add("hosts", hosts) .add("pendingState", pendingState) .add("settings", settings) - .add("startTime", startTime) - .add("suspendTime", suspendTime) .add("suspendTimeoutDuration", suspendTimeoutDuration) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java index a548140e3..680c4ea97 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointType.java @@ -7,6 +7,6 @@ /** The compute endpoint type. Either `read_write` or `read_only`. */ @Generated public enum EndpointType { - READ_ONLY, - READ_WRITE, + ENDPOINT_TYPE_READ_ONLY, + ENDPOINT_TYPE_READ_WRITE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java index 2f76598e2..f0be6807d 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/ProjectStatus.java @@ -6,7 +6,6 @@ import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.protobuf.Duration; -import com.google.protobuf.Timestamp; import java.util.Objects; @Generated @@ -15,10 +14,6 @@ public class ProjectStatus { @JsonProperty("branch_logical_size_limit_bytes") private Long branchLogicalSizeLimitBytes; - /** The most recent time when any endpoint of this project was active. */ - @JsonProperty("compute_last_active_time") - private Timestamp computeLastActiveTime; - /** The effective default endpoint settings. */ @JsonProperty("default_endpoint_settings") private ProjectDefaultEndpointSettings defaultEndpointSettings; @@ -56,15 +51,6 @@ public Long getBranchLogicalSizeLimitBytes() { return branchLogicalSizeLimitBytes; } - public ProjectStatus setComputeLastActiveTime(Timestamp computeLastActiveTime) { - this.computeLastActiveTime = computeLastActiveTime; - return this; - } - - public Timestamp getComputeLastActiveTime() { - return computeLastActiveTime; - } - public ProjectStatus setDefaultEndpointSettings( ProjectDefaultEndpointSettings defaultEndpointSettings) { this.defaultEndpointSettings = defaultEndpointSettings; @@ -135,7 +121,6 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ProjectStatus that = (ProjectStatus) o; return Objects.equals(branchLogicalSizeLimitBytes, that.branchLogicalSizeLimitBytes) - && Objects.equals(computeLastActiveTime, that.computeLastActiveTime) && Objects.equals(defaultEndpointSettings, that.defaultEndpointSettings) && Objects.equals(displayName, that.displayName) && Objects.equals(historyRetentionDuration, that.historyRetentionDuration) @@ -149,7 +134,6 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( branchLogicalSizeLimitBytes, - computeLastActiveTime, defaultEndpointSettings, displayName, historyRetentionDuration, @@ -163,7 +147,6 @@ public int hashCode() { public String toString() { return new ToStringer(ProjectStatus.class) .add("branchLogicalSizeLimitBytes", branchLogicalSizeLimitBytes) - .add("computeLastActiveTime", computeLastActiveTime) .add("defaultEndpointSettings", defaultEndpointSettings) .add("displayName", displayName) .add("historyRetentionDuration", historyRetentionDuration) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java old mode 100755 new mode 100644 index 0d95cd1c0..07fe0f094 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java @@ -64,9 +64,9 @@ public RecipientInfo get(String name) { } /** - * Gets a share recipient from the metastore if: - * - *

* the caller is the owner of the share recipient, or: * is a metastore admin + * Gets a share recipient from the metastore. The caller must be one of: * A user with + * **USE_RECIPIENT** privilege on the metastore * The owner of the share recipient * A metastore + * admin */ public RecipientInfo get(GetRecipientRequest request) { return impl.get(request); @@ -109,8 +109,8 @@ public GetRecipientSharePermissionsResponse sharePermissions(String name) { } /** - * Gets the share permissions for the specified Recipient. The caller must have the USE_RECIPIENT - * privilege on the metastore or be the owner of the Recipient. + * Gets the share permissions for the specified Recipient. The caller must have the + * **USE_RECIPIENT** privilege on the metastore or be the owner of the Recipient. */ public GetRecipientSharePermissionsResponse sharePermissions(SharePermissionsRequest request) { return impl.sharePermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java old mode 100755 new mode 100644 index 29d35db4c..a85d11b93 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsService.java @@ -38,9 +38,9 @@ public interface RecipientsService { void delete(DeleteRecipientRequest deleteRecipientRequest); /** - * Gets a share recipient from the metastore if: - * - *

* the caller is the owner of the share recipient, or: * is a metastore admin + * Gets a share recipient from the metastore. The caller must be one of: * A user with + * **USE_RECIPIENT** privilege on the metastore * The owner of the share recipient * A metastore + * admin */ RecipientInfo get(GetRecipientRequest getRecipientRequest); @@ -59,8 +59,8 @@ public interface RecipientsService { RecipientInfo rotateToken(RotateRecipientToken rotateRecipientToken); /** - * Gets the share permissions for the specified Recipient. The caller must have the USE_RECIPIENT - * privilege on the metastore or be the owner of the Recipient. + * Gets the share permissions for the specified Recipient. The caller must have the + * **USE_RECIPIENT** privilege on the metastore or be the owner of the Recipient. */ GetRecipientSharePermissionsResponse sharePermissions( SharePermissionsRequest sharePermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java old mode 100755 new mode 100644 index 302e74033..ac0cb0ac2 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java @@ -7,6 +7,7 @@ /** Current state of the endpoint */ @Generated public enum EndpointStatusState { + DELETED, OFFLINE, ONLINE, PROVISIONING,