Skip to content

[SPARK-52586][SQL] Introduce AnyTimeType #51293

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class DataTypeAstBuilder extends SqlBaseParserBaseVisitor[AnyRef] {
*/
override def visitTimeDataType(ctx: TimeDataTypeContext): DataType = withOrigin(ctx) {
val precision = if (ctx.precision == null) {
TimeType.MICROS_PRECISION
TimeType.DEFAULT_PRECISION
} else {
ctx.precision.getText.toInt
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,3 +175,17 @@ private[spark] object AnsiIntervalType extends AbstractDataType {

override private[sql] def defaultConcreteType: DataType = DayTimeIntervalType()
}

/**
* A TIME type of any valid precision.
*/
private[sql] abstract class AnyTimeType extends DatetimeType

private[spark] object AnyTimeType extends AbstractDataType {
override private[sql] def simpleString: String = "time"

override private[sql] def acceptsType(other: DataType): Boolean =
other.isInstanceOf[AnyTimeType]

override private[sql] def defaultConcreteType: DataType = TimeType(TimeType.DEFAULT_PRECISION)
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.sql.errors.DataTypeErrors
* @since 4.1.0
*/
@Unstable
case class TimeType(precision: Int) extends DatetimeType {
case class TimeType(precision: Int) extends AnyTimeType {

if (precision < TimeType.MIN_PRECISION || precision > TimeType.MAX_PRECISION) {
throw DataTypeErrors.unsupportedTimePrecisionError(precision)
Expand All @@ -51,6 +51,7 @@ object TimeType {
val MIN_PRECISION: Int = 0
val MICROS_PRECISION: Int = 6
val MAX_PRECISION: Int = MICROS_PRECISION
val DEFAULT_PRECISION: Int = MICROS_PRECISION

def apply(): TimeType = new TimeType(MICROS_PRECISION)
def apply(): TimeType = new TimeType(DEFAULT_PRECISION)
}
Original file line number Diff line number Diff line change
Expand Up @@ -2568,10 +2568,7 @@ case class MakeTimestampNTZ(left: Expression, right: Expression)
Seq(left.dataType, right.dataType)
)

override def inputTypes: Seq[AbstractDataType] =
Seq(
DateType,
TypeCollection(TimeType.MIN_PRECISION to TimeType.MAX_PRECISION map TimeType.apply: _*))
override def inputTypes: Seq[AbstractDataType] = Seq(DateType, AnyTimeType)

override def prettyName: String = "make_timestamp_ntz"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.spark.sql.catalyst.util.TimeFormatter
import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.types.StringTypeWithCollation
import org.apache.spark.sql.types.{AbstractDataType, DataType, DecimalType, IntegerType, ObjectType, TimeType, TypeCollection}
import org.apache.spark.sql.types.{AbstractDataType, AnyTimeType, DataType, DecimalType, IntegerType, ObjectType, TimeType}
import org.apache.spark.unsafe.types.UTF8String

/**
Expand Down Expand Up @@ -208,8 +208,7 @@ case class MinutesOfTime(child: Expression)
Seq(child.dataType)
)

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(TimeType.MIN_PRECISION to TimeType.MAX_PRECISION map TimeType.apply: _*))
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)

override def children: Seq[Expression] = Seq(child)

Expand Down Expand Up @@ -268,8 +267,7 @@ case class HoursOfTime(child: Expression)
Seq(child.dataType)
)

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(TimeType.MIN_PRECISION to TimeType.MAX_PRECISION map TimeType.apply: _*))
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)

override def children: Seq[Expression] = Seq(child)

Expand Down Expand Up @@ -330,8 +328,7 @@ case class SecondsOfTimeWithFraction(child: Expression)
Seq(child.dataType, IntegerType))
}

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(TimeType.MIN_PRECISION to TimeType.MAX_PRECISION map TimeType.apply: _*))
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)

override def children: Seq[Expression] = Seq(child)

Expand All @@ -353,8 +350,7 @@ case class SecondsOfTime(child: Expression)
Seq(child.dataType)
)

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(TimeType.MIN_PRECISION to TimeType.MAX_PRECISION map TimeType.apply: _*))
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimeType)

override def children: Seq[Expression] = Seq(child)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
"inputSql" : "\"0:0:0\"",
"inputType" : "\"STRING\"",
"paramIndex" : "second",
"requiredType" : "(\"TIME(0)\" or \"TIME(1)\" or \"TIME(2)\" or \"TIME(3)\" or \"TIME(4)\" or \"TIME(5)\" or \"TIME(6)\")",
"requiredType" : "\"TIME\"",
"sqlExpr" : "\"make_timestamp_ntz(DATE '2025-06-20', 0:0:0)\""
},
"queryContext" : [ {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
"inputSql" : "\"0:0:0\"",
"inputType" : "\"STRING\"",
"paramIndex" : "second",
"requiredType" : "(\"TIME(0)\" or \"TIME(1)\" or \"TIME(2)\" or \"TIME(3)\" or \"TIME(4)\" or \"TIME(5)\" or \"TIME(6)\")",
"requiredType" : "\"TIME\"",
"sqlExpr" : "\"make_timestamp_ntz(DATE '2025-06-20', 0:0:0)\""
},
"queryContext" : [ {
Expand Down