Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(databricks-jdbc-driver): Fix extract epoch from timestamp SQL Generation #9160

Merged
merged 2 commits into from
Jan 30, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ export class DatabricksQuery extends BaseQuery {
templates.functions.GREATEST = 'GREATEST({{ args_concat }})';
templates.functions.TRUNC = 'CASE WHEN ({{ args[0] }}) >= 0 THEN FLOOR({{ args_concat }}) ELSE CEIL({{ args_concat }}) END';
templates.expressions.timestamp_literal = 'from_utc_timestamp(\'{{ value }}\', \'UTC\')';
templates.expressions.extract = 'EXTRACT({{ date_part }} FROM {{ expr }})';
templates.expressions.extract = '{% if date_part|lower == "epoch" %}unix_timestamp({{ expr }}){% else %}EXTRACT({{ date_part }} FROM {{ expr }}){% endif %}';
templates.expressions.interval_single_date_part = 'INTERVAL \'{{ num }}\' {{ date_part }}';
templates.quotes.identifiers = '`';
templates.quotes.escape = '``';
Expand Down
51 changes: 51 additions & 0 deletions rust/cubesql/cubesql/src/compile/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15945,6 +15945,57 @@ LIMIT {{ limit }}{% endif %}"#.to_string(),
assert!(sql.contains(" AS DECIMAL(38,10))"));
}

#[tokio::test]
async fn test_extract_epoch_pushdown() {
if !Rewriter::sql_push_down_enabled() {
return;
}
init_testing_logger();

let query = "
SELECT LOWER(customer_gender),
MAX(CAST(FLOOR(EXTRACT(EPOCH FROM order_date) / 31536000) AS bigint)) AS max_years
FROM KibanaSampleDataEcommerce
GROUP BY 1
";

// Generic
let query_plan =
convert_select_to_query_plan(query.to_string(), DatabaseProtocol::PostgreSQL).await;

let physical_plan = query_plan.as_physical_plan().await.unwrap();
println!(
"Physical plan: {}",
displayable(physical_plan.as_ref()).indent()
);

let logical_plan = query_plan.as_logical_plan();
let sql = logical_plan.find_cube_scan_wrapped_sql().wrapped_sql.sql;
assert!(sql.contains("EXTRACT(EPOCH"));

// Databricks
let query_plan = convert_select_to_query_plan_customized(
query.to_string(),
DatabaseProtocol::PostgreSQL,
vec![
("expressions/timestamp_literal".to_string(), "from_utc_timestamp('{{ value }}', 'UTC')".to_string()),
("expressions/extract".to_string(), "{% if date_part|lower == \"epoch\" %}unix_timestamp({{ expr }}){% else %}EXTRACT({{ date_part }} FROM {{ expr }}){% endif %}".to_string()),
],
)
.await;

let physical_plan = query_plan.as_physical_plan().await.unwrap();
println!(
"Physical plan: {}",
displayable(physical_plan.as_ref()).indent()
);

let logical_plan = query_plan.as_logical_plan();
let sql = logical_plan.find_cube_scan_wrapped_sql().wrapped_sql.sql;
assert!(!sql.contains("EXTRACT(EPOCH"));
assert!(sql.contains("unix_timestamp"));
}

#[tokio::test]
async fn test_push_down_to_grouped_query_with_filters() {
if !Rewriter::sql_push_down_enabled() {
Expand Down
Loading