Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8f5eedbc991c4f04ce1284406577b0c92d59a224
e1ea3f5ba0bc5b53be94f56535a67ba701a52a52
6,367 changes: 3,191 additions & 3,176 deletions .gitattributes

Large diffs are not rendered by default.

13 changes: 13 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,16 @@
### Internal Changes

### API Changes
* Add `workspaceClient.workspaceEntityTagAssignments()` service.
* Add `clone()` method for `workspaceClient.pipelines()` service.
* Add `datasetCatalog` and `datasetSchema` fields for `com.databricks.sdk.service.dashboards.CreateDashboardRequest`.
* Add `datasetCatalog` and `datasetSchema` fields for `com.databricks.sdk.service.dashboards.UpdateDashboardRequest`.
* Add `purgeData` field for `com.databricks.sdk.service.database.DeleteSyncedDatabaseTableRequest`.
* Add `cronSchedule` field for `com.databricks.sdk.service.ml.MaterializedFeature`.
* Add `truncation` field for `com.databricks.sdk.service.pipelines.PipelineEvent`.
* Add `gcpServiceAccount` field for `com.databricks.sdk.service.provisioning.CreateGcpKeyInfo`.
* Add `gcpServiceAccount` field for `com.databricks.sdk.service.provisioning.GcpKeyInfo`.
* Add `hasDeltaUniformIceberg` field for `com.databricks.sdk.service.sharing.TableInternalAttributes`.
* Add `FOREIGN_TABLE` and `VOLUME` enum values for `com.databricks.sdk.service.sharing.SharedDataObjectDataObjectType`.
* Change `timeWindow` field for `com.databricks.sdk.service.ml.Feature` to no longer be required.
* [Breaking] Change `timeWindow` field for `com.databricks.sdk.service.ml.Feature` to no longer be required.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import com.databricks.sdk.support.Generated;

/** Next Id: 48 */
/** Next Id: 51 */
@Generated
public enum ConnectionType {
BIGQUERY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import com.databricks.sdk.support.Generated;

/** Latest kind: SECRET_EXTERNAL_AWS_SECRETS_MANAGER = 273; Next id:274 */
/** Latest kind: CONNECTION_CROWDSTRIKE_EVENT_STREAM_M2M = 281; Next id: 282 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,7 @@ public class ClusterAttributes {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,7 @@ public class ClusterDetails {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,7 @@ public class ClusterSpec {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,7 @@ public class CreateCluster {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,7 @@ public class EditCluster {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,7 @@ public class UpdateClusterResource {

/**
* Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
* space when its Spark workers are running low on disk space. This feature requires specific AWS
* permissions to function correctly - refer to the User Guide for more details.
* space when its Spark workers are running low on disk space.
*/
@JsonProperty("enable_elastic_disk")
private Boolean enableElasticDisk;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
package com.databricks.sdk.service.dashboards;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;

Expand All @@ -13,6 +15,24 @@ public class CreateDashboardRequest {
@JsonProperty("dashboard")
private Dashboard dashboard;

/**
* Sets the default catalog for all datasets in this dashboard. Does not impact table references
* that use fully qualified catalog names (ex: samples.nyctaxi.trips). Leave blank to keep each
* dataset’s existing configuration.
*/
@JsonIgnore
@QueryParam("dataset_catalog")
private String datasetCatalog;

/**
* Sets the default schema for all datasets in this dashboard. Does not impact table references
* that use fully qualified schema names (ex: nyctaxi.trips). Leave blank to keep each dataset’s
* existing configuration.
*/
@JsonIgnore
@QueryParam("dataset_schema")
private String datasetSchema;

public CreateDashboardRequest setDashboard(Dashboard dashboard) {
this.dashboard = dashboard;
return this;
Expand All @@ -22,21 +42,45 @@ public Dashboard getDashboard() {
return dashboard;
}

public CreateDashboardRequest setDatasetCatalog(String datasetCatalog) {
this.datasetCatalog = datasetCatalog;
return this;
}

public String getDatasetCatalog() {
return datasetCatalog;
}

public CreateDashboardRequest setDatasetSchema(String datasetSchema) {
this.datasetSchema = datasetSchema;
return this;
}

public String getDatasetSchema() {
return datasetSchema;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateDashboardRequest that = (CreateDashboardRequest) o;
return Objects.equals(dashboard, that.dashboard);
return Objects.equals(dashboard, that.dashboard)
&& Objects.equals(datasetCatalog, that.datasetCatalog)
&& Objects.equals(datasetSchema, that.datasetSchema);
}

@Override
public int hashCode() {
return Objects.hash(dashboard);
return Objects.hash(dashboard, datasetCatalog, datasetSchema);
}

@Override
public String toString() {
return new ToStringer(CreateDashboardRequest.class).add("dashboard", dashboard).toString();
return new ToStringer(CreateDashboardRequest.class)
.add("dashboard", dashboard)
.add("datasetCatalog", datasetCatalog)
.add("datasetSchema", datasetSchema)
.toString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
package com.databricks.sdk.service.dashboards;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
Expand All @@ -17,6 +18,24 @@ public class UpdateDashboardRequest {
/** UUID identifying the dashboard. */
@JsonIgnore private String dashboardId;

/**
* Sets the default catalog for all datasets in this dashboard. Does not impact table references
* that use fully qualified catalog names (ex: samples.nyctaxi.trips). Leave blank to keep each
* dataset’s existing configuration.
*/
@JsonIgnore
@QueryParam("dataset_catalog")
private String datasetCatalog;

/**
* Sets the default schema for all datasets in this dashboard. Does not impact table references
* that use fully qualified schema names (ex: nyctaxi.trips). Leave blank to keep each dataset’s
* existing configuration.
*/
@JsonIgnore
@QueryParam("dataset_schema")
private String datasetSchema;

public UpdateDashboardRequest setDashboard(Dashboard dashboard) {
this.dashboard = dashboard;
return this;
Expand All @@ -35,25 +54,47 @@ public String getDashboardId() {
return dashboardId;
}

public UpdateDashboardRequest setDatasetCatalog(String datasetCatalog) {
this.datasetCatalog = datasetCatalog;
return this;
}

public String getDatasetCatalog() {
return datasetCatalog;
}

public UpdateDashboardRequest setDatasetSchema(String datasetSchema) {
this.datasetSchema = datasetSchema;
return this;
}

public String getDatasetSchema() {
return datasetSchema;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UpdateDashboardRequest that = (UpdateDashboardRequest) o;
return Objects.equals(dashboard, that.dashboard)
&& Objects.equals(dashboardId, that.dashboardId);
&& Objects.equals(dashboardId, that.dashboardId)
&& Objects.equals(datasetCatalog, that.datasetCatalog)
&& Objects.equals(datasetSchema, that.datasetSchema);
}

@Override
public int hashCode() {
return Objects.hash(dashboard, dashboardId);
return Objects.hash(dashboard, dashboardId, datasetCatalog, datasetSchema);
}

@Override
public String toString() {
return new ToStringer(UpdateDashboardRequest.class)
.add("dashboard", dashboard)
.add("dashboardId", dashboardId)
.add("datasetCatalog", datasetCatalog)
.add("datasetSchema", datasetSchema)
.toString();
}
}
Loading
Loading