From e5140b045133226e5d98638518bee4a6f830b115 Mon Sep 17 00:00:00 2001 From: SandishKumarHN Date: Fri, 28 Oct 2022 13:43:18 -0700 Subject: [PATCH] error class name changes, more details to error message error class name changes, more details to error message --- .../sql/protobuf/ProtobufDeserializer.scala | 3 +- .../sql/protobuf/utils/ProtobufUtils.scala | 4 +- .../sql/protobuf/ProtobufSerdeSuite.scala | 20 ++-- .../main/resources/error/error-classes.json | 110 +++++++++--------- .../sql/errors/QueryCompilationErrors.scala | 54 +++++---- .../sql/errors/QueryExecutionErrors.scala | 2 +- 6 files changed, 99 insertions(+), 94 deletions(-) diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala index 23ef0e21a6ddb..46366ba268b09 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala @@ -195,7 +195,8 @@ private[sql] class ProtobufDeserializer( (updater, ordinal, value) => val byte_array = value match { case s: ByteString => s.toByteArray - case _ => throw QueryCompilationErrors.invalidByteStringFormatError() + case unsupported => + throw QueryCompilationErrors.invalidByteStringFormatError(unsupported) } updater.set(ordinal, byte_array) diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala index 49d4acb1aed9c..9e99c2f6a9045 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala @@ -201,7 +201,7 @@ private[sql] object ProtobufUtils extends Logging { fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(dscFile) } catch { case ex: InvalidProtocolBufferException => - throw QueryCompilationErrors.descrioptorParseError(ex) + throw QueryCompilationErrors.descrioptorParseError(descFilePath, ex) case ex: IOException => throw QueryCompilationErrors.cannotFindDescriptorFileError(descFilePath, ex) } @@ -214,7 +214,7 @@ private[sql] object ProtobufUtils extends Logging { fileDescriptorList } catch { case e: Descriptors.DescriptorValidationException => - throw QueryCompilationErrors.failedParsingDescriptorError(e) + throw QueryCompilationErrors.failedParsingDescriptorError(descFilePath, e) } } diff --git a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala index 207df011b4e75..840535654ed6a 100644 --- a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala +++ b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala @@ -71,7 +71,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { protoFile, Deserializer, fieldMatch, - errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR", + errorClass = "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE", params = Map( "protobufType" -> "MissMatchTypeInRoot", "toType" -> toSQLType(CATALYST_STRUCT))) @@ -80,7 +80,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { protoFile, Serializer, fieldMatch, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "MissMatchTypeInRoot", "toType" -> toSQLType(CATALYST_STRUCT))) @@ -98,7 +98,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { protoFile, Serializer, BY_NAME, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "FieldMissingInProto", "toType" -> toSQLType(CATALYST_STRUCT))) @@ -106,7 +106,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { assertFailedConversionMessage(protoFile, Serializer, BY_NAME, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "FieldMissingInProto", "toType" -> toSQLType(nonnullCatalyst))) @@ -124,7 +124,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { Deserializer, fieldMatch, catalyst, - errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR", + errorClass = "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE", params = Map( "protobufType" -> "MissMatchTypeInDeepNested", "toType" -> toSQLType(catalyst))) @@ -134,7 +134,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { Serializer, fieldMatch, catalyst, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "MissMatchTypeInDeepNested", "toType" -> toSQLType(catalyst))) @@ -149,7 +149,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { protoFile, Serializer, BY_NAME, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "FieldMissingInSQLRoot", "toType" -> toSQLType(CATALYST_STRUCT))) @@ -166,7 +166,7 @@ class ProtobufSerdeSuite extends SharedSparkSession { protoNestedFile, Serializer, BY_NAME, - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", params = Map( "protobufType" -> "FieldMissingInSQLNested", "toType" -> toSQLType(CATALYST_STRUCT))) @@ -196,10 +196,10 @@ class ProtobufSerdeSuite extends SharedSparkSession { val expectMsg = serdeFactory match { case Deserializer => - s"[PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR] Unable to convert" + + s"[CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE] Unable to convert" + s" ${protoSchema.getName} of Protobuf to SQL type ${toSQLType(catalystSchema)}." case Serializer => - s"[UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE] Unable to convert SQL type" + + s"[UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE] Unable to convert SQL type" + s" ${toSQLType(catalystSchema)} to Protobuf type ${protoSchema.getName}." } diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index eab9a6f3ea1b7..7df7946d28fde 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -17,29 +17,49 @@ ], "sqlState" : "22005" }, + "CANNOT_CONSTRUCT_PROTOBUF_DESCRIPTOR" : { + "message" : [ + "Error constructing FileDescriptor for " + ] + }, + "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : { + "message" : [ + "Unable to convert of Protobuf to SQL type ." + ] + }, + "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : { + "message" : [ + "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = )." + ] + }, "CANNOT_DECODE_URL" : { "message" : [ "Cannot decode url : ." ], "sqlState" : "42000" }, - "CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR" : { - "message" : [ - "Error reading Protobuf descriptor file at path: " - ] - }, "CANNOT_INFER_DATE" : { "message" : [ "Cannot infer date in schema inference when LegacyTimeParserPolicy is \"LEGACY\". Legacy Date formatter does not support strict date format matching which is required to avoid inferring timestamps and other non-date entries to date." ], "sqlState" : "22007" }, + "CANNOT_LOAD_PROTOBUF_CLASS" : { + "message" : [ + "Could not load Protobuf class with name " + ] + }, "CANNOT_PARSE_DECIMAL" : { "message" : [ "Cannot parse decimal" ], "sqlState" : "42000" }, + "CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : { + "message" : [ + "Error parsing file descriptor byte[] into Descriptor object" + ] + }, "CANNOT_PARSE_TIMESTAMP" : { "message" : [ ". If necessary set to \"false\" to bypass this error." @@ -70,11 +90,6 @@ ], "sqlState" : "22005" }, - "CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR" : { - "message" : [ - "Cannot convert SQL to Protobuf because cannot be written since it's not defined in ENUM " - ] - }, "COLUMN_NOT_IN_GROUP_BY_CLAUSE" : { "message" : [ "The expression is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in `first()` (or `first_value()`) if you don't care which value you get." @@ -527,9 +542,9 @@ "Invalid bucket file: " ] }, - "INVALID_BYTE_STRING_ERROR" : { + "INVALID_BYTE_STRING" : { "message" : [ - "Invalid ByteString format" + "The expected format is ByteString, but was ()." ] }, "INVALID_COLUMN_OR_FIELD_DATA_TYPE" : { @@ -576,6 +591,11 @@ " is an invalid property value, please use quotes, e.g. SET =" ] }, + "INVALID_PROTOBUF_MESSAGE_TYPE" : { + "message" : [ + " is not a Protobuf message type" + ] + }, "INVALID_SQL_SYNTAX" : { "message" : [ "Invalid SQL syntax: " @@ -594,7 +614,7 @@ } } }, - "MALFORMED_PROTOBUF_MESSAGE_ERROR" : { + "MALFORMED_PROTOBUF_MESSAGE" : { "message" : [ "Malformed Protobuf messages are detected in message deserialization. Parse Mode: . To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'." ] @@ -649,22 +669,22 @@ ], "sqlState" : "42000" }, - "NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA" : { - "message" : [ - "Cannot find in Protobuf schema" - ] - }, "NO_HANDLER_FOR_UDAF" : { "message" : [ "No handler for UDAF ''. Use sparkSession.udf.register(...) instead." ] }, - "NO_PROTOBUF_MESSAGE_TYPE_ERROR" : { + "NO_PROTOBUF_MESSAGE_TYPE" : { "message" : [ "No MessageTypes returned, " ] }, - "NO_UDF_INTERFACE_ERROR" : { + "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : { + "message" : [ + "Cannot find in Protobuf schema" + ] + }, + "NO_UDF_INTERFACE" : { "message" : [ "UDF class doesn't implement any UDF interface" ] @@ -736,32 +756,22 @@ ], "sqlState" : "42000" }, - "PROTOBUF_CLASS_LOAD_ERROR" : { - "message" : [ - "Could not load Protobuf class with name " - ] - }, - "PROTOBUF_DEPENDENCY_ERROR" : { + "PROTOBUF_DEPENDENCY_NOT_FOUND" : { "message" : [ "Could not find dependency: " ] }, - "PROTOBUF_DESCRIPTOR_ERROR" : { - "message" : [ - "Error parsing descriptor byte[] into Descriptor object" - ] - }, - "PROTOBUF_DESCRIPTOR_PARSING_ERROR" : { + "PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : { "message" : [ - "Error constructing FileDescriptor" + "Error reading Protobuf descriptor file at path: " ] }, - "PROTOBUF_FIELD_MISSING_ERROR" : { + "PROTOBUF_FIELD_MISSING" : { "message" : [ "Searching for in Protobuf schema at gave matches. Candidates: " ] }, - "PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA" : { + "PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : { "message" : [ "Found in Protobuf schema but there is no match in the SQL schema" ] @@ -771,29 +781,24 @@ "Type mismatch encountered for field: " ] }, - "PROTOBUF_MESSAGE_TYPE_ERROR" : { + "PROTOBUF_FIELD_TYPE_TO_SQL_TYPE_ERROR" : { "message" : [ - " is not a Protobuf message type" + "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = )." ] }, - "PROTOBUF_RECURSION_ERROR" : { + "PROTOBUF_MESSAGE_NOT_FOUND" : { "message" : [ - "Found recursive reference in Protobuf schema, which can not be processed by Spark: " + "Unable to locate Message in Descriptor" ] }, - "PROTOBUF_TYPE_NOT_SUPPORT_ERROR" : { + "PROTOBUF_TYPE_NOT_SUPPORT" : { "message" : [ "Protobuf type not yet supported: ." ] }, - "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR" : { - "message" : [ - "Unable to convert of Protobuf to SQL type ." - ] - }, - "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR" : { + "RECURSIVE_PROTOBUF_SCHEMA" : { "message" : [ - "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = )." + "Found recursive reference in Protobuf schema, which can not be processed by Spark: " ] }, "RENAME_SRC_PATH_NOT_FOUND" : { @@ -850,9 +855,9 @@ ], "sqlState" : "22023" }, - "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR" : { + "SQL_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR" : { "message" : [ - "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = )." + "Cannot convert SQL to Protobuf because cannot be written since it's not defined in ENUM " ] }, "TABLE_OR_VIEW_ALREADY_EXISTS" : { @@ -887,16 +892,11 @@ "Unable to acquire bytes of memory, got " ] }, - "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE" : { + "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : { "message" : [ "Unable to convert SQL type to Protobuf type ." ] }, - "UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR" : { - "message" : [ - "Unable to locate Message in Descriptor" - ] - }, "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { "message" : [ "Attempting to treat as a Message, but it was " diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 699084bad986d..9b80057181b54 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -3005,7 +3005,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def udfClassDoesNotImplementAnyUDFInterfaceError(className: String): Throwable = { new AnalysisException( - errorClass = "NO_UDF_INTERFACE_ERROR", + errorClass = "NO_UDF_INTERFACE", messageParameters = Map("className" -> className)) } @@ -3217,7 +3217,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { protobufType: String, sqlType: DataType): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR", + errorClass = "PROTOBUF_FIELD_TYPE_TO_SQL_TYPE_ERROR", messageParameters = Map( "protobufColumn" -> protobufColumn, "sqlColumn" -> toSQLId(sqlColumn), @@ -3231,7 +3231,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { sqlType: DataType, protobufType: String): Throwable = { new AnalysisException( - errorClass = "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR", + errorClass = "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE", messageParameters = Map( "sqlColumn" -> toSQLId(sqlColumn), "protobufColumn" -> protobufColumn, @@ -3245,7 +3245,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { data: String, enumString: String): Throwable = { new AnalysisException( - errorClass = "CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR", + errorClass = "SQL_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR", messageParameters = Map( "sqlColumn" -> toSQLId(sqlColumn), "protobufColumn" -> protobufColumn, @@ -3258,7 +3258,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { sqlType: DataType, cause: Throwable): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR", + errorClass = "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE", messageParameters = Map( "protobufType" -> protobufType, "toType" -> toSQLType(sqlType)), @@ -3270,7 +3270,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { sqlType: DataType, cause: Throwable): Throwable = { new AnalysisException( - errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE", messageParameters = Map( "protobufType" -> protobufType, "toType" -> toSQLType(sqlType)), @@ -3279,7 +3279,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def protobufTypeUnsupportedYetError(protobufType: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_TYPE_NOT_SUPPORT_ERROR", + errorClass = "PROTOBUF_TYPE_NOT_SUPPORT", messageParameters = Map("protobufType" -> protobufType)) } @@ -3295,13 +3295,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def cannotFindCatalystTypeInProtobufSchemaError(catalystFieldPath: String): Throwable = { new AnalysisException( - errorClass = "NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA", + errorClass = "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA", messageParameters = Map("catalystFieldPath" -> catalystFieldPath)) } def cannotFindProtobufFieldInCatalystError(field: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA", + errorClass = "PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA", messageParameters = Map("field" -> field)) } @@ -3310,7 +3310,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { matchSize: String, matches: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_FIELD_MISSING_ERROR", + errorClass = "PROTOBUF_FIELD_MISSING", messageParameters = Map( "field" -> field, "protobufSchema" -> protobufSchema, @@ -3320,40 +3320,40 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def unableToLocateProtobufMessageError(messageName: String): Throwable = { new AnalysisException( - errorClass = "UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR", + errorClass = "PROTOBUF_MESSAGE_NOT_FOUND", messageParameters = Map("messageName" -> messageName)) } - def descrioptorParseError(cause: Throwable): Throwable = { + def descrioptorParseError(descFilePath: String, cause: Throwable): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_DESCRIPTOR_ERROR", - messageParameters = Map.empty(), + errorClass = "CANNOT_PARSE_PROTOBUF_DESCRIPTOR", + messageParameters = Map.empty("descFilePath" -> descFilePath), cause = Option(cause.getCause)) } def cannotFindDescriptorFileError(filePath: String, cause: Throwable): Throwable = { new AnalysisException( - errorClass = "CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR", + errorClass = "PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND", messageParameters = Map("filePath" -> filePath), cause = Option(cause.getCause)) } def noProtobufMessageTypeReturnError(descriptorName: String): Throwable = { new AnalysisException( - errorClass = "NO_PROTOBUF_MESSAGE_TYPE_ERROR", + errorClass = "NO_PROTOBUF_MESSAGE_TYPE", messageParameters = Map("descriptorName" -> descriptorName)) } - def failedParsingDescriptorError(cause: Throwable): Throwable = { + def failedParsingDescriptorError(descFilePath: String, cause: Throwable): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_DESCRIPTOR_PARSING_ERROR", - messageParameters = Map.empty(), + errorClass = "CANNOT_CONSTRUCT_PROTOBUF_DESCRIPTOR", + messageParameters = Map.empty("descFilePath" -> descFilePath), cause = Option(cause.getCause)) } def foundRecursionInProtobufSchema(fieldDescriptor: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_RECURSION_ERROR", + errorClass = "RECURSIVE_PROTOBUF_SCHEMA", messageParameters = Map("fieldDescriptor" -> fieldDescriptor)) } @@ -3369,24 +3369,28 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { cause: Throwable): Throwable = { val message = if (hasDots) "" else ". Ensure the class name includes package prefix." new AnalysisException( - errorClass = "PROTOBUF_CLASS_LOAD_ERROR", + errorClass = "CANNOT_LOAD_PROTOBUF_CLASS", messageParameters = Map("protobufClassName" -> protobufClassName, "message" -> message), cause = Option(cause.getCause)) } def protobufMessageTypeError(protobufClassName: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_MESSAGE_TYPE_ERROR", + errorClass = "INVALID_PROTOBUF_MESSAGE_TYPE", messageParameters = Map("protobufClassName" -> protobufClassName)) } def protobufDescriptorDependencyError(dependencyName: String): Throwable = { new AnalysisException( - errorClass = "PROTOBUF_DEPENDENCY_ERROR", + errorClass = "PROTOBUF_DEPENDENCY_NOT_FOUND", messageParameters = Map("dependencyName" -> dependencyName)) } - def invalidByteStringFormatError(): Throwable = { - new AnalysisException(errorClass = "INVALID_BYTE_STRING_ERROR", messageParameters = Map.empty) + def invalidByteStringFormatError(unsupported: Any): Throwable = { + new AnalysisException( + errorClass = "INVALID_BYTE_STRING", + messageParameters = Map( + "unsupported" -> unsupported.toString, + "class" -> unsupported.getClass.toString)) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index d9a27a61ad82f..0afeee97e9227 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2707,7 +2707,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def malformedProtobufMessageDetectedInMessageParsingError(e: Throwable): Throwable = { new SparkException( - errorClass = "MALFORMED_PROTOBUF_MESSAGE_ERROR", + errorClass = "MALFORMED_PROTOBUF_MESSAGE", messageParameters = Map( "failFastMode" -> FailFastMode.name), cause = e)