Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
5757e4a5f208a1f416f8f94b00febb3118fdb940
d6ecfb0633332a524f52f6ab319b073dd3f7493e
3 changes: 3 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -1980,6 +1980,7 @@
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true
Expand Down Expand Up @@ -2021,6 +2022,7 @@
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/MaturityLevel.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/NotebookLibrary.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Notifications.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/OperationTimeWindow.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PathPattern.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java linguist-generated=true
Expand Down Expand Up @@ -2095,6 +2097,7 @@
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleOperation.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/Endpoint.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointHosts.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointOperationMetadata.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSettings.java linguist-generated=true
/home/ubuntu/workspace/databricks-sdk-java/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/EndpointSpec.java linguist-generated=true
Expand Down
12 changes: 12 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,15 @@
### Internal Changes

### API Changes
* Add `fullRefreshWindow` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`.
* Add `autoFullRefreshPolicy` field for `com.databricks.sdk.service.pipelines.TableSpecificConfig`.
* Add `hosts` field for `com.databricks.sdk.service.postgres.EndpointStatus`.
* Add `ENDPOINT_TYPE_READ_WRITE` and `ENDPOINT_TYPE_READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`.
* Add `DELETED` enum value for `com.databricks.sdk.service.vectorsearch.EndpointStatusState`.
* [Breaking] Change `createBranch()`, `createEndpoint()` and `createProject()` methods for `workspaceClient.postgres()` service with new required argument order.
* Change `branchId` field for `com.databricks.sdk.service.postgres.CreateBranchRequest` to no longer be required.
* Change `endpointId` field for `com.databricks.sdk.service.postgres.CreateEndpointRequest` to no longer be required.
* Change `projectId` field for `com.databricks.sdk.service.postgres.CreateProjectRequest` to no longer be required.
* [Breaking] Remove `host`, `lastActiveTime`, `startTime` and `suspendTime` fields for `com.databricks.sdk.service.postgres.EndpointStatus`.
* [Breaking] Remove `computeLastActiveTime` field for `com.databricks.sdk.service.postgres.ProjectStatus`.
* [Breaking] Remove `READ_WRITE` and `READ_ONLY` enum values for `com.databricks.sdk.service.postgres.EndpointType`.
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

package com.databricks.sdk.service.pipelines;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;

/** Policy for auto full refresh. */
@Generated
public class AutoFullRefreshPolicy {
/** (Required, Mutable) Whether to enable auto full refresh or not. */
@JsonProperty("enabled")
private Boolean enabled;

/**
* (Optional, Mutable) Specify the minimum interval in hours between the timestamp at which a
* table was last full refreshed and the current timestamp for triggering auto full If unspecified
* and autoFullRefresh is enabled then by default min_interval_hours is 24 hours.
*/
@JsonProperty("min_interval_hours")
private Long minIntervalHours;

public AutoFullRefreshPolicy setEnabled(Boolean enabled) {
this.enabled = enabled;
return this;
}

public Boolean getEnabled() {
return enabled;
}

public AutoFullRefreshPolicy setMinIntervalHours(Long minIntervalHours) {
this.minIntervalHours = minIntervalHours;
return this;
}

public Long getMinIntervalHours() {
return minIntervalHours;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AutoFullRefreshPolicy that = (AutoFullRefreshPolicy) o;
return Objects.equals(enabled, that.enabled)
&& Objects.equals(minIntervalHours, that.minIntervalHours);
}

@Override
public int hashCode() {
return Objects.hash(enabled, minIntervalHours);
}

@Override
public String toString() {
return new ToStringer(AutoFullRefreshPolicy.class)
.add("enabled", enabled)
.add("minIntervalHours", minIntervalHours)
.toString();
}
}
16 changes: 16 additions & 0 deletions ...-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ public class IngestionPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;

/** (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. */
@JsonProperty("full_refresh_window")
private OperationTimeWindow fullRefreshWindow;

/**
* Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs
* directly without the need to specify a UC connection or ingestion gateway. The `source_catalog`
Expand Down Expand Up @@ -73,6 +77,15 @@ public String getConnectionName() {
return connectionName;
}

public IngestionPipelineDefinition setFullRefreshWindow(OperationTimeWindow fullRefreshWindow) {
this.fullRefreshWindow = fullRefreshWindow;
return this;
}

public OperationTimeWindow getFullRefreshWindow() {
return fullRefreshWindow;
}

public IngestionPipelineDefinition setIngestFromUcForeignCatalog(
Boolean ingestFromUcForeignCatalog) {
this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog;
Expand Down Expand Up @@ -144,6 +157,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
IngestionPipelineDefinition that = (IngestionPipelineDefinition) o;
return Objects.equals(connectionName, that.connectionName)
&& Objects.equals(fullRefreshWindow, that.fullRefreshWindow)
&& Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog)
&& Objects.equals(ingestionGatewayId, that.ingestionGatewayId)
&& Objects.equals(netsuiteJarPath, that.netsuiteJarPath)
Expand All @@ -157,6 +171,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
connectionName,
fullRefreshWindow,
ingestFromUcForeignCatalog,
ingestionGatewayId,
netsuiteJarPath,
Expand All @@ -170,6 +185,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(IngestionPipelineDefinition.class)
.add("connectionName", connectionName)
.add("fullRefreshWindow", fullRefreshWindow)
.add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog)
.add("ingestionGatewayId", ingestionGatewayId)
.add("netsuiteJarPath", netsuiteJarPath)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

package com.databricks.sdk.service.pipelines;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collection;
import java.util.Objects;

/** Proto representing a window */
@Generated
public class OperationTimeWindow {
/**
* Days of week in which the window is allowed to happen If not specified all days of the week
* will be used.
*/
@JsonProperty("days_of_week")
private Collection<DayOfWeek> daysOfWeek;

/** An integer between 0 and 23 denoting the start hour for the window in the 24-hour day. */
@JsonProperty("start_hour")
private Long startHour;

/**
* Time zone id of window. See
* https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
* for details. If not specified, UTC will be used.
*/
@JsonProperty("time_zone_id")
private String timeZoneId;

public OperationTimeWindow setDaysOfWeek(Collection<DayOfWeek> daysOfWeek) {
this.daysOfWeek = daysOfWeek;
return this;
}

public Collection<DayOfWeek> getDaysOfWeek() {
return daysOfWeek;
}

public OperationTimeWindow setStartHour(Long startHour) {
this.startHour = startHour;
return this;
}

public Long getStartHour() {
return startHour;
}

public OperationTimeWindow setTimeZoneId(String timeZoneId) {
this.timeZoneId = timeZoneId;
return this;
}

public String getTimeZoneId() {
return timeZoneId;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OperationTimeWindow that = (OperationTimeWindow) o;
return Objects.equals(daysOfWeek, that.daysOfWeek)
&& Objects.equals(startHour, that.startHour)
&& Objects.equals(timeZoneId, that.timeZoneId);
}

@Override
public int hashCode() {
return Objects.hash(daysOfWeek, startHour, timeZoneId);
}

@Override
public String toString() {
return new ToStringer(OperationTimeWindow.class)
.add("daysOfWeek", daysOfWeek)
.add("startHour", startHour)
.add("timeZoneId", timeZoneId)
.toString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,16 @@

@Generated
public class TableSpecificConfig {
/**
* (Optional, Mutable) Policy for auto full refresh, if enabled pipeline will automatically try to
* fix issues by doing a full refresh on the table in the retry run. auto_full_refresh_policy in
* table configuration will override the above level auto_full_refresh_policy. For example, {
* "auto_full_refresh_policy": { "enabled": true, "min_interval_hours": 23, } } If unspecified,
* auto full refresh is disabled.
*/
@JsonProperty("auto_full_refresh_policy")
private AutoFullRefreshPolicy autoFullRefreshPolicy;

/**
* A list of column names to be excluded for the ingestion. When not specified, include_columns
* fully controls what columns to be ingested. When specified, all other columns including future
Expand Down Expand Up @@ -66,6 +76,15 @@ public class TableSpecificConfig {
@JsonProperty("workday_report_parameters")
private IngestionPipelineDefinitionWorkdayReportParameters workdayReportParameters;

public TableSpecificConfig setAutoFullRefreshPolicy(AutoFullRefreshPolicy autoFullRefreshPolicy) {
this.autoFullRefreshPolicy = autoFullRefreshPolicy;
return this;
}

public AutoFullRefreshPolicy getAutoFullRefreshPolicy() {
return autoFullRefreshPolicy;
}

public TableSpecificConfig setExcludeColumns(Collection<String> excludeColumns) {
this.excludeColumns = excludeColumns;
return this;
Expand Down Expand Up @@ -157,7 +176,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TableSpecificConfig that = (TableSpecificConfig) o;
return Objects.equals(excludeColumns, that.excludeColumns)
return Objects.equals(autoFullRefreshPolicy, that.autoFullRefreshPolicy)
&& Objects.equals(excludeColumns, that.excludeColumns)
&& Objects.equals(includeColumns, that.includeColumns)
&& Objects.equals(primaryKeys, that.primaryKeys)
&& Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig)
Expand All @@ -171,6 +191,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
autoFullRefreshPolicy,
excludeColumns,
includeColumns,
primaryKeys,
Expand All @@ -185,6 +206,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(TableSpecificConfig.class)
.add("autoFullRefreshPolicy", autoFullRefreshPolicy)
.add("excludeColumns", excludeColumns)
.add("includeColumns", includeColumns)
.add("primaryKeys", primaryKeys)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

package com.databricks.sdk.service.postgres;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;

/** Encapsulates various hostnames (r/w or r/o, pooled or not) for an endpoint. */
@Generated
public class EndpointHosts {
/**
* The hostname to connect to this endpoint. For read-write endpoints, this is a read-write
* hostname which connects to the primary compute. For read-only endpoints, this is a read-only
* hostname which allows read-only operations.
*/
@JsonProperty("host")
private String host;

public EndpointHosts setHost(String host) {
this.host = host;
return this;
}

public String getHost() {
return host;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EndpointHosts that = (EndpointHosts) o;
return Objects.equals(host, that.host);
}

@Override
public int hashCode() {
return Objects.hash(host);
}

@Override
public String toString() {
return new ToStringer(EndpointHosts.class).add("host", host).toString();
}
}
Loading
Loading