diff --git a/datafusion/spark/src/function/datetime/date_sub.rs b/datafusion/spark/src/function/datetime/date_sub.rs index aa10c05b8a99..c19d04e617a4 100644 --- a/datafusion/spark/src/function/datetime/date_sub.rs +++ b/datafusion/spark/src/function/datetime/date_sub.rs @@ -114,7 +114,7 @@ fn spark_date_sub(args: &[ArrayRef]) -> Result { } _ => { return internal_err!( - "Spark `date_add` function: argument must be int8, int16, int32, got {:?}", + "Spark `date_sub` function: argument must be int8, int16, int32, got {:?}", days_arg.data_type() ); } diff --git a/datafusion/spark/src/function/datetime/make_interval.rs b/datafusion/spark/src/function/datetime/make_interval.rs new file mode 100644 index 000000000000..ae0e63cc9921 --- /dev/null +++ b/datafusion/spark/src/function/datetime/make_interval.rs @@ -0,0 +1,581 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use std::any::Any; +use std::sync::Arc; + +use arrow::array::{Array, ArrayRef, IntervalMonthDayNanoBuilder, PrimitiveArray}; +use arrow::datatypes::DataType::Interval; +use arrow::datatypes::IntervalUnit::MonthDayNano; +use arrow::datatypes::{DataType, IntervalMonthDayNano}; +use datafusion_common::{ + exec_err, plan_datafusion_err, DataFusionError, Result, ScalarValue, +}; +use datafusion_expr::{ + ColumnarValue, ScalarFunctionArgs, ScalarUDFImpl, Signature, Volatility, +}; +use datafusion_functions::utils::make_scalar_function; + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct SparkMakeInterval { + signature: Signature, +} + +impl Default for SparkMakeInterval { + fn default() -> Self { + Self::new() + } +} + +impl SparkMakeInterval { + pub fn new() -> Self { + Self { + signature: Signature::user_defined(Volatility::Immutable), + } + } +} + +impl ScalarUDFImpl for SparkMakeInterval { + fn as_any(&self) -> &dyn Any { + self + } + + fn name(&self) -> &str { + "make_interval" + } + + fn signature(&self) -> &Signature { + &self.signature + } + + fn return_type(&self, _arg_types: &[DataType]) -> Result { + Ok(Interval(MonthDayNano)) + } + + fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result { + if args.args.is_empty() { + return Ok(ColumnarValue::Scalar(ScalarValue::IntervalMonthDayNano( + Some(IntervalMonthDayNano::new(0, 0, 0)), + ))); + } + make_scalar_function(make_interval_kernel, vec![])(&args.args) + } + + fn coerce_types(&self, arg_types: &[DataType]) -> Result> { + let length = arg_types.len(); + match length { + x if x > 7 => { + exec_err!( + "make_interval expects between 0 and 7 arguments, got {}", + arg_types.len() + ) + } + _ => Ok((0..arg_types.len()) + .map(|i| { + if i == 6 { + DataType::Float64 + } else { + DataType::Int32 + } + }) + .collect()), + } + } +} + +fn make_interval_kernel(args: &[ArrayRef]) -> Result { + use arrow::array::AsArray; + use arrow::datatypes::{Float64Type, Int32Type}; + + let n_rows = args[0].len(); + + let years = args[0] + .as_primitive_opt::() + .ok_or_else(|| plan_datafusion_err!("make_interval arg[0] must be Int32"))?; + let months = args + .get(1) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[1] must be Int32") + }) + }) + .transpose()?; + let weeks = args + .get(2) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[2] must be Int32") + }) + }) + .transpose()?; + let days: Option<&PrimitiveArray> = args + .get(3) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[3] must be Int32") + }) + }) + .transpose()?; + let hours: Option<&PrimitiveArray> = args + .get(4) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[4] must be Int32") + }) + }) + .transpose()?; + let mins: Option<&PrimitiveArray> = args + .get(5) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[5] must be Int32") + }) + }) + .transpose()?; + let secs: Option<&PrimitiveArray> = args + .get(6) + .map(|a| { + a.as_primitive_opt::().ok_or_else(|| { + plan_datafusion_err!("make_dt_interval arg[6] must be Float64") + }) + }) + .transpose()?; + + let mut builder = IntervalMonthDayNanoBuilder::with_capacity(n_rows); + + for i in 0..n_rows { + // if one column is NULL → result NULL + let any_null_present = years.is_null(i) + || months.as_ref().is_some_and(|a| a.is_null(i)) + || weeks.as_ref().is_some_and(|a| a.is_null(i)) + || days.as_ref().is_some_and(|a| a.is_null(i)) + || hours.as_ref().is_some_and(|a| a.is_null(i)) + || mins.as_ref().is_some_and(|a| a.is_null(i)) + || secs + .as_ref() + .is_some_and(|a| a.is_null(i) || !a.value(i).is_finite()); + + if any_null_present { + builder.append_null(); + continue; + } + + // default values 0 or 0.0 + let y = years.value(i); + let mo = months.as_ref().map_or(0, |a| a.value(i)); + let w = weeks.as_ref().map_or(0, |a| a.value(i)); + let d = days.as_ref().map_or(0, |a| a.value(i)); + let h = hours.as_ref().map_or(0, |a| a.value(i)); + let mi = mins.as_ref().map_or(0, |a| a.value(i)); + let s = secs.as_ref().map_or(0.0, |a| a.value(i)); + + match make_interval_month_day_nano(y, mo, w, d, h, mi, s) { + Some(v) => builder.append_value(v), + None => { + builder.append_null(); + continue; + } + } + } + + Ok(Arc::new(builder.finish())) +} + +pub fn make_interval_month_day_nano( + year: i32, + month: i32, + week: i32, + day: i32, + hour: i32, + min: i32, + sec: f64, +) -> Option { + // checks if overflow + let months = year.checked_mul(12).and_then(|v| v.checked_add(month))?; + let total_days = week.checked_mul(7).and_then(|v| v.checked_add(day))?; + + let hours_nanos = (hour as i64).checked_mul(3_600_000_000_000)?; + let mins_nanos = (min as i64).checked_mul(60_000_000_000)?; + + let sec_int = sec.trunc() as i64; + let frac = sec - sec.trunc(); + let mut frac_nanos = (frac * 1_000_000_000.0).round() as i64; + + if frac_nanos.abs() >= 1_000_000_000 { + if frac_nanos > 0 { + frac_nanos -= 1_000_000_000; + } else { + frac_nanos += 1_000_000_000; + } + } + + let secs_nanos = sec_int.checked_mul(1_000_000_000)?; + + let total_nanos = hours_nanos + .checked_add(mins_nanos) + .and_then(|v| v.checked_add(secs_nanos)) + .and_then(|v| v.checked_add(frac_nanos))?; + + Some(IntervalMonthDayNano::new(months, total_days, total_nanos)) +} + +#[cfg(test)] +mod tests { + use arrow::array::{Float64Array, Int32Array, IntervalMonthDayNanoArray}; + use arrow::datatypes::Field; + use datafusion_common::config::ConfigOptions; + use datafusion_common::Result; + + use super::*; + fn run_make_interval_month_day_nano(arrs: Vec) -> Result { + make_interval_kernel(&arrs) + } + + #[test] + fn nulls_propagate_per_row() { + let year = Arc::new(Int32Array::from(vec![ + None, + Some(2), + Some(3), + Some(4), + Some(5), + Some(6), + Some(7), + Some(8), + Some(9), + ])); + let month = Arc::new(Int32Array::from(vec![ + Some(1), + None, + Some(3), + Some(4), + Some(5), + Some(6), + Some(7), + Some(8), + Some(9), + ])); + let week = Arc::new(Int32Array::from(vec![ + Some(1), + Some(2), + None, + Some(4), + Some(5), + Some(6), + Some(7), + Some(8), + Some(9), + ])); + let day = Arc::new(Int32Array::from(vec![ + Some(1), + Some(2), + Some(3), + None, + Some(5), + Some(6), + Some(7), + Some(8), + Some(9), + ])); + let hour = Arc::new(Int32Array::from(vec![ + Some(1), + Some(2), + Some(3), + Some(4), + None, + Some(6), + Some(7), + Some(8), + Some(9), + ])); + let min = Arc::new(Int32Array::from(vec![ + Some(1), + Some(2), + Some(3), + Some(4), + Some(5), + None, + Some(7), + Some(8), + Some(9), + ])); + let sec = Arc::new(Float64Array::from(vec![ + Some(1.0), + Some(2.0), + Some(3.0), + Some(4.0), + Some(5.0), + Some(6.0), + None, + Some(f64::INFINITY), + Some(f64::NEG_INFINITY), + ])); + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, min, sec, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal("expected IntervalMonthDayNano".into()) + }) + .unwrap(); + + for i in 0..out.len() { + assert!(out.is_null(i), "row {i} should be NULL"); + } + } + + #[test] + fn error_months_overflow_should_be_null() { + // months = year*12 + month → NULL + let year = Arc::new(Int32Array::from(vec![Some(i32::MAX)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(1)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let min = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let sec = Arc::new(Float64Array::from(vec![Some(0.0)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, min, sec, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal("expected IntervalMonthDayNano".into()) + }) + .unwrap(); + + for i in 0..out.len() { + assert!(out.is_null(i), "row {i} should be NULL"); + } + } + #[test] + fn error_days_overflow_should_be_null() { + // months = year*12 + month → NULL + let year = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(1)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(i32::MAX)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let min = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let sec = Arc::new(Float64Array::from(vec![Some(0.0)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, min, sec, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal("expected IntervalMonthDayNano".into()) + }) + .unwrap(); + + for i in 0..out.len() { + assert!(out.is_null(i), "row {i} should be NULL"); + } + } + #[test] + fn error_min_overflow_should_be_null() { + let year = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let min = Arc::new(Int32Array::from(vec![Some(i32::MAX)])) as ArrayRef; + let sec = Arc::new(Float64Array::from(vec![Some(0.0)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, min, sec, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal("expected IntervalMonthDayNano".into()) + }) + .unwrap(); + + for i in 0..out.len() { + assert!(out.is_null(i), "row {i} should be NULL"); + } + } + #[test] + fn error_sec_overflow_should_be_null() { + let year = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let min = Arc::new(Int32Array::from(vec![Some(0)])) as ArrayRef; + let sec = Arc::new(Float64Array::from(vec![Some(f64::MAX)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, min, sec, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal("expected IntervalMonthDayNano".into()) + }) + .unwrap(); + + for i in 0..out.len() { + assert!(out.is_null(i), "row {i} should be NULL"); + } + } + + #[test] + fn happy_path_all_present_single_row() { + // 1y 2m 3w 4d 5h 6m 7.25s + let year = Arc::new(Int32Array::from(vec![Some(1)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(2)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(3)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(4)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(5)])) as ArrayRef; + let mins = Arc::new(Int32Array::from(vec![Some(6)])) as ArrayRef; + let secs = Arc::new(Float64Array::from(vec![Some(7.25)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, mins, secs, + ]) + .unwrap(); + assert_eq!(out.data_type(), &Interval(MonthDayNano)); + + let out = out + .as_any() + .downcast_ref::() + .unwrap(); + assert_eq!(out.len(), 1); + assert_eq!(out.null_count(), 0); + + let v: IntervalMonthDayNano = out.value(0); + assert_eq!(v.months, 12 + 2); // 14 + assert_eq!(v.days, 3 * 7 + 4); // 25 + let expected_nanos = (5_i64 * 3600 + 6 * 60 + 7) * 1_000_000_000 + 250_000_000; + assert_eq!(v.nanoseconds, expected_nanos); + } + + #[test] + fn negative_components_and_fractional_seconds() { + // -1y -2m -1w -1d -1h -1m -1.5s + let year = Arc::new(Int32Array::from(vec![Some(-1)])) as ArrayRef; + let month = Arc::new(Int32Array::from(vec![Some(-2)])) as ArrayRef; + let week = Arc::new(Int32Array::from(vec![Some(-1)])) as ArrayRef; + let day = Arc::new(Int32Array::from(vec![Some(-1)])) as ArrayRef; + let hour = Arc::new(Int32Array::from(vec![Some(-1)])) as ArrayRef; + let mins = Arc::new(Int32Array::from(vec![Some(-1)])) as ArrayRef; + let secs = Arc::new(Float64Array::from(vec![Some(-1.5)])) as ArrayRef; + + let out = run_make_interval_month_day_nano(vec![ + year, month, week, day, hour, mins, secs, + ]) + .unwrap(); + let out = out + .as_any() + .downcast_ref::() + .unwrap(); + + assert_eq!(out.len(), 1); + assert_eq!(out.null_count(), 0); + let v = out.value(0); + + assert_eq!(v.months, -12 + (-2)); // -14 + assert_eq!(v.days, -7 + (-1)); // -8 + + // -(1h + 1m + 1.5s) en nanos + let expected_nanos = -((3600_i64 + 60 + 1) * 1_000_000_000 + 500_000_000); + assert_eq!(v.nanoseconds, expected_nanos); + } + + fn invoke_make_interval_with_args( + args: Vec, + number_rows: usize, + ) -> Result { + let arg_fields = args + .iter() + .map(|arg| Field::new("a", arg.data_type(), true).into()) + .collect::>(); + let args = ScalarFunctionArgs { + args, + arg_fields, + number_rows, + return_field: Field::new("f", Interval(MonthDayNano), true).into(), + config_options: Arc::new(ConfigOptions::default()), + }; + SparkMakeInterval::new().invoke_with_args(args) + } + + #[test] + fn zero_args_returns_zero_seconds() -> Result<()> { + let number_rows = 2; + let res: ColumnarValue = invoke_make_interval_with_args(vec![], number_rows)?; + + match res { + ColumnarValue::Array(arr) => { + let arr = arr + .as_any() + .downcast_ref::() + .ok_or_else(|| { + DataFusionError::Internal( + "expected IntervalMonthDayNanoArray".into(), + ) + })?; + if arr.len() != number_rows { + return Err(DataFusionError::Internal(format!( + "expected array length {number_rows}, got {}", + arr.len() + ))); + } + for i in 0..number_rows { + let iv = arr.value(i); + if (iv.months, iv.days, iv.nanoseconds) != (0, 0, 0) { + return Err(DataFusionError::Internal(format!( + "row {i}: expected (0,0,0), got ({},{},{})", + iv.months, iv.days, iv.nanoseconds + ))); + } + } + } + ColumnarValue::Scalar(ScalarValue::IntervalMonthDayNano(Some(iv))) => { + if (iv.months, iv.days, iv.nanoseconds) != (0, 0, 0) { + return Err(DataFusionError::Internal(format!( + "expected scalar 0s, got ({},{},{})", + iv.months, iv.days, iv.nanoseconds + ))); + } + } + other => { + return Err(DataFusionError::Internal(format!( + "expected Array or Scalar IntervalMonthDayNano, got {other:?}" + ))); + } + } + + Ok(()) + } +} diff --git a/datafusion/spark/src/function/datetime/mod.rs b/datafusion/spark/src/function/datetime/mod.rs index 0e37284cc6ea..c4dee81a2cd2 100644 --- a/datafusion/spark/src/function/datetime/mod.rs +++ b/datafusion/spark/src/function/datetime/mod.rs @@ -18,6 +18,7 @@ pub mod date_add; pub mod date_sub; pub mod last_day; +pub mod make_interval; pub mod next_day; use datafusion_expr::ScalarUDF; @@ -27,6 +28,7 @@ use std::sync::Arc; make_udf_function!(date_add::SparkDateAdd, date_add); make_udf_function!(date_sub::SparkDateSub, date_sub); make_udf_function!(last_day::SparkLastDay, last_day); +make_udf_function!(make_interval::SparkMakeInterval, make_interval); make_udf_function!(next_day::SparkNextDay, next_day); pub mod expr_fn { @@ -47,6 +49,11 @@ pub mod expr_fn { "Returns the last day of the month which the date belongs to.", arg1 )); + export_functions!(( + make_interval, + "Make interval from years, months, weeks, days, hours, mins and secs.", + arg1 arg2 + )); // TODO: add once ANSI support is added: // "When both of the input parameters are not NULL and day_of_week is an invalid input, the function throws SparkIllegalArgumentException if spark.sql.ansi.enabled is set to true, otherwise NULL." export_functions!(( @@ -57,5 +64,11 @@ pub mod expr_fn { } pub fn functions() -> Vec> { - vec![date_add(), date_sub(), last_day(), next_day()] + vec![ + date_add(), + date_sub(), + last_day(), + make_interval(), + next_day(), + ] } diff --git a/datafusion/sqllogictest/test_files/spark/datetime/make_interval.slt b/datafusion/sqllogictest/test_files/spark/datetime/make_interval.slt new file mode 100644 index 000000000000..d6c5199b87b7 --- /dev/null +++ b/datafusion/sqllogictest/test_files/spark/datetime/make_interval.slt @@ -0,0 +1,112 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This file was originally created by a porting script from: +# https://github.com/lakehq/sail/tree/43b6ed8221de5c4c4adbedbb267ae1351158b43c/crates/sail-spark-connect/tests/gold_data/function +# This file is part of the implementation of the datafusion-spark function library. +# For more information, please see: +# https://github.com/apache/datafusion/issues/15914 + +query IIIIIIR? +SELECT + y, m, w, d, h, mi, s, + make_interval(y, m, w, d, h, mi, s) AS interval +FROM VALUES + (NULL,2, 3, 4, 5, 6, 7.5), + (1, NULL,3, 4, 5, 6, 7.5), + (1, 2, NULL,4, 5, 6, 7.5), + (1, 2, 3, NULL,5, 6, 7.5), + (1, 2, 3, 4, NULL,6, 7.5), + (1, 2, 3, 4, 5, NULL,7.5), + (1, 2, 3, 4, 5, 6, CAST(NULL AS DOUBLE)), + (1, 1, 1, 1, 1, 1, 1.0) +AS v(y, m, w, d, h, mi, s); +---- +NULL 2 3 4 5 6 7.5 NULL +1 NULL 3 4 5 6 7.5 NULL +1 2 NULL 4 5 6 7.5 NULL +1 2 3 NULL 5 6 7.5 NULL +1 2 3 4 NULL 6 7.5 NULL +1 2 3 4 5 NULL 7.5 NULL +1 2 3 4 5 6 NULL NULL +1 1 1 1 1 1 1 13 mons 8 days 1 hours 1 mins 1.000000000 secs + +query IIIIIIR? +SELECT + y, m, w, d, h, mi, s, + make_interval(y, m, w, d, h, mi, s) AS interval +FROM VALUES + (0, 0, 0, 0, 0, 0, arrow_cast('NaN','Float64')) +AS v(y, m, w, d, h, mi, s); +---- +0 0 0 0 0 0 NaN NULL + +query IIIIIIR? +SELECT + y, m, w, d, h, mi, s, + make_interval(y, m, w, d, h, mi, s) AS interval +FROM VALUES + (0, 0, 0, 0, 0, 0, CAST('Infinity' AS DOUBLE)) +AS v(y, m, w, d, h, mi, s); +---- +0 0 0 0 0 0 Infinity NULL + +query IIIIIIR? +SELECT + y, m, w, d, h, mi, s, + make_interval(y, m, w, d, h, mi, s) AS interval +FROM VALUES + (0, 0, 0, 0, 0, 0, CAST('-Infinity' AS DOUBLE)) +AS v(y, m, w, d, h, mi, s); +---- +0 0 0 0 0 0 -Infinity NULL + +query ? +SELECT make_interval(2147483647, 1, 0, 0, 0, 0, 0.0); +---- +NULL + +query ? +SELECT make_interval(0, 0, 2147483647, 1, 0, 0, 0.0); +---- +NULL + +query ? +SELECT make_interval(0, 0, 0, 0, 2147483647, 1, 0.0); +---- +NULL + +# Intervals being rendered as empty string, see issue: +# https://github.com/apache/datafusion/issues/17455 +# We expect something like 0.00 secs with query ? +query T +SELECT make_interval(0, 0, 0, 0, 0, 0, 0.0) || ''; +---- +(empty) + +# Intervals being rendered as empty string, see issue: +# https://github.com/apache/datafusion/issues/17455 +# We expect something like 0.00 secs with query ? +query T +SELECT make_interval() || ''; +---- +(empty) + +query ? +SELECT INTERVAL '1' SECOND AS iv; +---- +1.000000000 secs