Skip to content

Commit

Permalink
fix timestamp type precision lost
Browse files Browse the repository at this point in the history
  • Loading branch information
gnehil committed Dec 1, 2023
1 parent 3efe6ff commit cf8d35d
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ import org.apache.spark.sql.types._
import org.slf4j.LoggerFactory

import java.sql.Timestamp
import java.time.format.DateTimeFormatter
import java.time.{LocalDateTime, ZoneOffset}
import java.util.Locale
import scala.collection.JavaConversions._
import scala.collection.mutable

Expand Down Expand Up @@ -164,8 +166,7 @@ private[spark] object SchemaUtils {
case DoubleType => row.getDouble(ordinal)
case StringType => Option(row.getUTF8String(ordinal)).map(_.toString).getOrElse(DataUtil.NULL_VALUE)
case TimestampType =>
LocalDateTime.ofEpochSecond(row.getLong(ordinal) / 100000, (row.getLong(ordinal) % 1000).toInt, ZoneOffset.UTC)
new Timestamp(row.getLong(ordinal) / 1000).toString
DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)).toString
case DateType => DateTimeUtils.toJavaDate(row.getInt(ordinal)).toString
case BinaryType => row.getBinary(ordinal)
case dt: DecimalType => row.getDecimal(ordinal, dt.precision, dt.scale).toJavaBigDecimal
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,15 @@ package org.apache.doris.spark.sql
import org.apache.doris.sdk.thrift.{TPrimitiveType, TScanColumnDesc}
import org.apache.doris.spark.exception.DorisException
import org.apache.doris.spark.rest.models.{Field, Schema}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.types._
import org.hamcrest.core.StringStartsWith.startsWith
import org.junit.{Assert, Ignore, Test}

import java.sql.Timestamp
import java.time.temporal.ChronoField
import scala.collection.JavaConverters._

@Ignore
class TestSchemaUtils extends ExpectedExceptionTest {
@Test
def testConvertToStruct(): Unit = {
Expand Down Expand Up @@ -54,8 +56,8 @@ class TestSchemaUtils extends ExpectedExceptionTest {
Assert.assertEquals(DataTypes.LongType, SchemaUtils.getCatalystType("BIGINT", 0, 0))
Assert.assertEquals(DataTypes.FloatType, SchemaUtils.getCatalystType("FLOAT", 0, 0))
Assert.assertEquals(DataTypes.DoubleType, SchemaUtils.getCatalystType("DOUBLE", 0, 0))
Assert.assertEquals(DataTypes.StringType, SchemaUtils.getCatalystType("DATE", 0, 0))
Assert.assertEquals(DataTypes.StringType, SchemaUtils.getCatalystType("DATETIME", 0, 0))
Assert.assertEquals(DataTypes.DateType, SchemaUtils.getCatalystType("DATE", 0, 0))
Assert.assertEquals(DataTypes.TimestampType, SchemaUtils.getCatalystType("DATETIME", 0, 0))
Assert.assertEquals(DataTypes.BinaryType, SchemaUtils.getCatalystType("BINARY", 0, 0))
Assert.assertEquals(DecimalType(9, 3), SchemaUtils.getCatalystType("DECIMAL", 9, 3))
Assert.assertEquals(DataTypes.StringType, SchemaUtils.getCatalystType("CHAR", 0, 0))
Expand Down Expand Up @@ -113,4 +115,16 @@ class TestSchemaUtils extends ExpectedExceptionTest {

}

@Test
def rowColumnValueTest(): Unit = {

val timestamp = Timestamp.valueOf("2021-01-01 11:12:23.345678")
val row = InternalRow.fromSeq(Seq(
timestamp.getTime / 1000 * 1000000 + timestamp.getNanos / 1000
))

Assert.assertEquals("2021-01-01 11:12:23.345678", SchemaUtils.rowColumnValue(row, 0, TimestampType))

}

}

0 comments on commit cf8d35d

Please sign in to comment.