Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions packages/cubejs-backend-native/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions packages/cubejs-backend-native/src/sql4sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ async fn get_sql(
.generate_sql(
session.server.transport.clone(),
Arc::new(session.state.get_load_request_meta("sql")),
Arc::clone(&session.state),
)
.await?;

Expand Down
24 changes: 23 additions & 1 deletion packages/cubejs-bigquery-driver/src/BigQueryDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,13 @@ interface BigQueryDriverOptions extends BigQueryOptions {
type BigQueryDriverOptionsInitialized =
Required<BigQueryDriverOptions, 'pollTimeout' | 'pollMaxInterval'>;

// BigQuery type mappings for types not in the base DbTypeToGenericType
const BigQueryToGenericType: Record<string, string> = {
bignumeric: 'decimal',
bigdecimal: 'decimal',
decimal: 'decimal'
};

/**
* BigQuery driver.
*/
Expand Down Expand Up @@ -294,7 +301,17 @@ export class BigQueryDriver extends BaseDriver implements DriverInterface {
public async tableColumnTypes(table: string) {
const [schema, name] = table.split('.');
const [bigQueryTable] = await this.bigquery.dataset(schema).table(name).getMetadata();
return bigQueryTable.schema.fields.map((c: any) => ({ name: c.name, type: this.toGenericType(c.type) }));
return bigQueryTable.schema.fields.map((c: any) => {
// BigQuery NUMERIC is always (38, 9), BIGNUMERIC is (76, 38)
// https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types
if (c.type === 'NUMERIC' || c.type === 'DECIMAL') {
return { name: c.name, type: this.toGenericType(c.type, 38, 9) };
}
if (c.type === 'BIGNUMERIC' || c.type === 'BIGDECIMAL') {
return { name: c.name, type: this.toGenericType(c.type, 76, 38) };
}
return { name: c.name, type: this.toGenericType(c.type) };
});
}

public async createSchemaIfNotExists(schemaName: string): Promise<void> {
Expand Down Expand Up @@ -437,4 +454,9 @@ export class BigQueryDriver extends BaseDriver implements DriverInterface {
incrementalSchemaLoading: true,
};
}

protected override toGenericType(columnType: string, precision?: number | null, scale?: number | null): string {
const mappedType = BigQueryToGenericType[columnType.toLowerCase()] || columnType;
return super.toGenericType(mappedType, precision, scale);
}
}
14 changes: 7 additions & 7 deletions rust/cubesql/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions rust/cubesql/cubesql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@ homepage = "https://cube.dev"

[dependencies]
arc-swap = "1"
datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = "e7d183cb3686377810f240dd851e943b4c926f0f", default-features = false, features = [
datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = "3171c65655464732abc6182377d2750342a6a0c6", default-features = false, features = [
"regex_expressions",
"unicode_expressions",
] }
thiserror = "2"
cubeclient = { path = "../cubeclient" }
pg-srv = { path = "../pg-srv" }
sqlparser = { git = 'https://github.com/cube-js/sqlparser-rs.git', rev = "16f051486de78a23a0ff252155dd59fc2d35497d" }
sqlparser = { git = 'https://github.com/cube-js/sqlparser-rs.git', rev = "1423a0c16ebba665bdbfdd0e2b8d36f417baa514" }
base64 = "0.13.0"
tokio = { version = "^1.35", features = ["full", "rt", "tracing"] }
serde = { version = "^1.0", features = ["derive"] }
Expand Down
25 changes: 23 additions & 2 deletions rust/cubesql/cubesql/src/compile/engine/df/wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::{
},
},
config::ConfigObj,
sql::AuthContextRef,
sql::{AuthContextRef, SessionState},
transport::{
AliasedColumn, DataSource, LoadRequestMeta, MetaContext, SpanId, SqlGenerator,
SqlTemplates, TransportLoadRequestQuery, TransportService,
Expand Down Expand Up @@ -642,13 +642,15 @@ impl CubeScanWrapperNode {
&self,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
) -> result::Result<CubeScanWrappedSqlNode, CubeError> {
let schema = self.schema();
let wrapped_plan = self.wrapped_plan.clone();
let (sql, request, member_fields) = Self::generate_sql_for_node(
&self.meta,
transport,
load_request_meta,
state,
self.clone().set_max_limit_for_node(wrapped_plan),
true,
Vec::new(),
Expand Down Expand Up @@ -920,6 +922,7 @@ impl CubeScanWrapperNode {
meta: &MetaContext,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
node: Arc<LogicalPlan>,
can_rename_columns: bool,
values: Vec<Option<String>>,
Expand Down Expand Up @@ -964,6 +967,7 @@ impl CubeScanWrapperNode {
meta,
transport,
load_request_meta,
state,
node,
can_rename_columns,
values,
Expand Down Expand Up @@ -997,6 +1001,7 @@ impl CubeScanWrapperNode {
meta: &'ctx MetaContext,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
node: Arc<LogicalPlan>,
can_rename_columns: bool,
values: Vec<Option<String>>,
Expand All @@ -1007,6 +1012,7 @@ impl CubeScanWrapperNode {
meta,
transport,
load_request_meta,
state,
node,
can_rename_columns,
values,
Expand Down Expand Up @@ -1141,6 +1147,7 @@ impl WrappedSelectNode {
meta: &MetaContext,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
sql: &mut SqlQuery,
data_source: Option<&str>,
) -> result::Result<HashMap<String, String>, CubeError> {
Expand All @@ -1156,6 +1163,7 @@ impl WrappedSelectNode {
meta,
transport.clone(),
load_request_meta.clone(),
state.clone(),
subquery.clone(),
true,
sql.values.clone(),
Expand Down Expand Up @@ -3100,6 +3108,7 @@ impl WrappedSelectNode {
meta: &MetaContext,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
node: &Arc<dyn UserDefinedLogicalNode + Send + Sync>,
can_rename_columns: bool,
values: Vec<Option<String>>,
Expand Down Expand Up @@ -3198,6 +3207,7 @@ impl WrappedSelectNode {
meta,
transport.clone(),
load_request_meta.clone(),
state.clone(),
&mut sql,
Some(data_source),
)
Expand Down Expand Up @@ -3257,6 +3267,7 @@ impl WrappedSelectNode {
meta,
transport.clone(),
load_request_meta.clone(),
state.clone(),
lp.clone(),
true,
sql.values.clone(),
Expand Down Expand Up @@ -3402,8 +3413,14 @@ impl WrappedSelectNode {
.all(|member| meta.find_dimension_with_name(member).is_some())
});

let timezone = state
.query_timezone
.read()
.map_err(|_| CubeError::internal("Failed to acquire timezone read lock".to_string()))?
.clone();

let load_request = V1LoadRequestQuery {
timezone: None,
timezone,
measures: Some(
aggregate
.iter()
Expand Down Expand Up @@ -3553,6 +3570,7 @@ impl WrappedSelectNode {
meta: &MetaContext,
transport: Arc<dyn TransportService>,
load_request_meta: Arc<LoadRequestMeta>,
state: Arc<SessionState>,
node: &Arc<dyn UserDefinedLogicalNode + Send + Sync>,
can_rename_columns: bool,
values: Vec<Option<String>>,
Expand All @@ -3564,6 +3582,7 @@ impl WrappedSelectNode {
meta,
transport,
load_request_meta,
state,
node,
can_rename_columns,
values,
Expand All @@ -3581,6 +3600,7 @@ impl WrappedSelectNode {
meta,
transport.clone(),
load_request_meta.clone(),
state.clone(),
self.from.clone(),
true,
values.clone(),
Expand All @@ -3593,6 +3613,7 @@ impl WrappedSelectNode {
meta,
transport.clone(),
load_request_meta.clone(),
state,
&mut sql,
data_source.as_deref(),
)
Expand Down
Loading
Loading