Skip to content

Commit

Permalink
[SPARK-41481][CORE][SQL] Reuse INVALID_TYPED_LITERAL instead of `_L…
Browse files Browse the repository at this point in the history
…EGACY_ERROR_TEMP_0020`

### What changes were proposed in this pull request?
This pr aims to reuse error class `INVALID_TYPED_LITERAL` instead of `_LEGACY_ERROR_TEMP_1020`.

### Why are the changes needed?
Proper names of error classes to improve user experience with Spark SQL.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Pass Github Actions.

Closes #39025 from LuciferYang/SPARK-41481.

Authored-by: yangjie01 <yangjie01@baidu.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
  • Loading branch information
LuciferYang authored and MaxGekk committed Dec 13, 2022
1 parent af8dd41 commit 9b69331
Show file tree
Hide file tree
Showing 6 changed files with 96 additions and 59 deletions.
5 changes: 0 additions & 5 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -1610,11 +1610,6 @@
"Function trim doesn't support with type <trimOption>. Please use BOTH, LEADING or TRAILING as trim type."
]
},
"_LEGACY_ERROR_TEMP_0020" : {
"message" : [
"Cannot parse the INTERVAL value: <value>."
]
},
"_LEGACY_ERROR_TEMP_0022" : {
"message" : [
"<msg>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2417,7 +2417,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit
IntervalUtils.stringToInterval(UTF8String.fromString(value))
} catch {
case e: IllegalArgumentException =>
val ex = QueryParsingErrors.cannotParseIntervalValueError(value, ctx)
val ex = QueryParsingErrors.cannotParseValueTypeError(valueType, value, ctx)
ex.setStackTrace(e.getStackTrace)
throw ex
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,13 +219,6 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase {
ctx)
}

def cannotParseIntervalValueError(value: String, ctx: TypeConstructorContext): Throwable = {
new ParseException(
errorClass = "_LEGACY_ERROR_TEMP_0020",
messageParameters = Map("value" -> value),
ctx)
}

def literalValueTypeUnsupportedError(
unsupportedType: String,
supportedTypes: Seq[String],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -602,8 +602,11 @@ class ExpressionParserSuite extends AnalysisTest {
assertEqual("INTERVAL '1 year 2 month'", ymIntervalLiteral)
checkError(
exception = parseException("Interval 'interval 1 yearsss 2 monthsss'"),
errorClass = "_LEGACY_ERROR_TEMP_0020",
parameters = Map("value" -> "interval 1 yearsss 2 monthsss"),
errorClass = "INVALID_TYPED_LITERAL",
parameters = Map(
"valueType" -> "\"INTERVAL\"",
"value" -> "'interval 1 yearsss 2 monthsss'"
),
context = ExpectedContext(
fragment = "Interval 'interval 1 yearsss 2 monthsss'",
start = 0,
Expand All @@ -616,8 +619,11 @@ class ExpressionParserSuite extends AnalysisTest {
assertEqual("INTERVAL '1 day 2 hour 3 minute 4.005006 second'", dtIntervalLiteral)
checkError(
exception = parseException("Interval 'interval 1 daysss 2 hoursss'"),
errorClass = "_LEGACY_ERROR_TEMP_0020",
parameters = Map("value" -> "interval 1 daysss 2 hoursss"),
errorClass = "INVALID_TYPED_LITERAL",
parameters = Map(
"valueType" -> "\"INTERVAL\"",
"value" -> "'interval 1 daysss 2 hoursss'"
),
context = ExpectedContext(
fragment = "Interval 'interval 1 daysss 2 hoursss'",
start = 0,
Expand All @@ -639,8 +645,11 @@ class ExpressionParserSuite extends AnalysisTest {
assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral)
checkError(
exception = parseException("Interval 'interval 3 monthsss 1 hoursss'"),
errorClass = "_LEGACY_ERROR_TEMP_0020",
parameters = Map("value" -> "interval 3 monthsss 1 hoursss"),
errorClass = "INVALID_TYPED_LITERAL",
parameters = Map(
"valueType" -> "\"INTERVAL\"",
"value" -> "'interval 3 monthsss 1 hoursss'"
),
context = ExpectedContext(
fragment = "Interval 'interval 3 monthsss 1 hoursss'",
start = 0,
Expand Down
60 changes: 40 additions & 20 deletions sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -2398,9 +2398,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "中文 interval 1 day"
"value" : "'中文 interval 1 day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2419,9 +2421,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "interval中文 1 day"
"value" : "'interval中文 1 day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2440,9 +2444,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "interval 1中文day"
"value" : "'interval 1中文day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -2579,9 +2585,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "+"
"value" : "'+'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2600,9 +2608,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "+."
"value" : "'+.'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2621,9 +2631,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1"
"value" : "'1'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2642,9 +2654,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1.2"
"value" : "'1.2'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2663,9 +2677,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "- 2"
"value" : "'- 2'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2684,9 +2700,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1 day -"
"value" : "'1 day -'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2705,9 +2723,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1 day 1"
"value" : "'1 day 1'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
60 changes: 40 additions & 20 deletions sql/core/src/test/resources/sql-tests/results/interval.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -2211,9 +2211,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "中文 interval 1 day"
"value" : "'中文 interval 1 day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2232,9 +2234,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "interval中文 1 day"
"value" : "'interval中文 1 day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2253,9 +2257,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "interval 1中文day"
"value" : "'interval 1中文day'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -2392,9 +2398,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "+"
"value" : "'+'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2413,9 +2421,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "+."
"value" : "'+.'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2434,9 +2444,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1"
"value" : "'1'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2455,9 +2467,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1.2"
"value" : "'1.2'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2476,9 +2490,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "- 2"
"value" : "'- 2'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2497,9 +2513,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1 day -"
"value" : "'1 day -'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -2518,9 +2536,11 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0020",
"errorClass" : "INVALID_TYPED_LITERAL",
"sqlState" : "42000",
"messageParameters" : {
"value" : "1 day 1"
"value" : "'1 day 1'",
"valueType" : "\"INTERVAL\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down

0 comments on commit 9b69331

Please sign in to comment.