Skip to content

Commit c2942b7

Browse files
committed
[SPARK-52460][SQL][FOLLOWUP] Rename timeToMicros to makeTime
### What changes were proposed in this pull request? In the PR, I propose to rename the internal method `timeToMicros()` to `makeTime()` because: 1. It makes nanoseconds but not microseconds values 2. Actually it make a TIME values from time fields, but do not converts TIME values to micros. ### Why are the changes needed? To improve code maintenance, and don't confuse other devs. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the related test suites: ``` $ build/sbt "test:testOnly *DateTimeUtilsSuite" ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51380 from MaxGekk/rename-timeToMicros. Authored-by: Max Gekk <max.gekk@gmail.com> Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent 077b27c commit c2942b7

File tree

3 files changed

+9
-9
lines changed

3 files changed

+9
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -555,7 +555,7 @@ case class MakeTime(
555555
override def replacement: Expression = StaticInvoke(
556556
classOf[DateTimeUtils.type],
557557
TimeType(TimeType.MICROS_PRECISION),
558-
"timeToMicros",
558+
"makeTime",
559559
children,
560560
inputTypes
561561
)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -788,14 +788,14 @@ object DateTimeUtils extends SparkDateTimeUtils {
788788
}
789789

790790
/**
791-
* Converts separate time fields in a long that represents microseconds since the start of
791+
* Converts separate time fields in a long that represents nanoseconds since the start of
792792
* the day
793793
* @param hours the hour, from 0 to 23
794794
* @param minutes the minute, from 0 to 59
795795
* @param secsAndMicros the second, from 0 to 59.999999
796-
* @return A time value represented as microseconds since the start of the day
796+
* @return A time value represented as nanoseconds since the start of the day
797797
*/
798-
def timeToMicros(hours: Int, minutes: Int, secsAndMicros: Decimal): Long = {
798+
def makeTime(hours: Int, minutes: Int, secsAndMicros: Decimal): Long = {
799799
try {
800800
val unscaledSecFrac = secsAndMicros.toUnscaledLong
801801
val fullSecs = Math.floorDiv(unscaledSecFrac, MICROS_PER_SECOND)

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1179,21 +1179,21 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
11791179
val secAndMicros = Decimal(sec + (micros / MICROS_PER_SECOND.toFloat), 16, 6)
11801180

11811181
// Valid case
1182-
val microSecsTime = timeToMicros(hour, min, secAndMicros)
1183-
assert(microSecsTime === localTime(hour.toByte, min.toByte, sec.toByte, micros))
1182+
val nanoSecsTime = makeTime(hour, min, secAndMicros)
1183+
assert(nanoSecsTime === localTime(hour.toByte, min.toByte, sec.toByte, micros))
11841184

11851185
// Invalid hour
11861186
checkError(
11871187
exception = intercept[SparkDateTimeException] {
1188-
timeToMicros(-1, min, secAndMicros)
1188+
makeTime(-1, min, secAndMicros)
11891189
},
11901190
condition = "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
11911191
parameters = Map("rangeMessage" -> "Invalid value for HourOfDay (valid values 0 - 23): -1"))
11921192

11931193
// Invalid minute
11941194
checkError(
11951195
exception = intercept[SparkDateTimeException] {
1196-
timeToMicros(hour, -1, secAndMicros)
1196+
makeTime(hour, -1, secAndMicros)
11971197
},
11981198
condition = "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
11991199
parameters = Map("rangeMessage" ->
@@ -1209,7 +1209,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
12091209
).foreach { invalidSecond =>
12101210
checkError(
12111211
exception = intercept[SparkDateTimeException] {
1212-
timeToMicros(hour, min, Decimal(invalidSecond, 16, 6))
1212+
makeTime(hour, min, Decimal(invalidSecond, 16, 6))
12131213
},
12141214
condition = "DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION",
12151215
parameters = Map("rangeMessage" ->

0 commit comments

Comments
 (0)