Skip to content

Commit

Permalink
error class name changes, more details to error message
Browse files Browse the repository at this point in the history
error class name changes, more details to error message
  • Loading branch information
SandishKumarHN committed Oct 29, 2022
1 parent dd63be8 commit e5140b0
Show file tree
Hide file tree
Showing 6 changed files with 99 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,8 @@ private[sql] class ProtobufDeserializer(
(updater, ordinal, value) =>
val byte_array = value match {
case s: ByteString => s.toByteArray
case _ => throw QueryCompilationErrors.invalidByteStringFormatError()
case unsupported =>
throw QueryCompilationErrors.invalidByteStringFormatError(unsupported)
}
updater.set(ordinal, byte_array)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ private[sql] object ProtobufUtils extends Logging {
fileDescriptorSet = DescriptorProtos.FileDescriptorSet.parseFrom(dscFile)
} catch {
case ex: InvalidProtocolBufferException =>
throw QueryCompilationErrors.descrioptorParseError(ex)
throw QueryCompilationErrors.descrioptorParseError(descFilePath, ex)
case ex: IOException =>
throw QueryCompilationErrors.cannotFindDescriptorFileError(descFilePath, ex)
}
Expand All @@ -214,7 +214,7 @@ private[sql] object ProtobufUtils extends Logging {
fileDescriptorList
} catch {
case e: Descriptors.DescriptorValidationException =>
throw QueryCompilationErrors.failedParsingDescriptorError(e)
throw QueryCompilationErrors.failedParsingDescriptorError(descFilePath, e)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
protoFile,
Deserializer,
fieldMatch,
errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR",
errorClass = "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE",
params = Map(
"protobufType" -> "MissMatchTypeInRoot",
"toType" -> toSQLType(CATALYST_STRUCT)))
Expand All @@ -80,7 +80,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
protoFile,
Serializer,
fieldMatch,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "MissMatchTypeInRoot",
"toType" -> toSQLType(CATALYST_STRUCT)))
Expand All @@ -98,15 +98,15 @@ class ProtobufSerdeSuite extends SharedSparkSession {
protoFile,
Serializer,
BY_NAME,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "FieldMissingInProto",
"toType" -> toSQLType(CATALYST_STRUCT)))

assertFailedConversionMessage(protoFile,
Serializer,
BY_NAME,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "FieldMissingInProto",
"toType" -> toSQLType(nonnullCatalyst)))
Expand All @@ -124,7 +124,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
Deserializer,
fieldMatch,
catalyst,
errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR",
errorClass = "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE",
params = Map(
"protobufType" -> "MissMatchTypeInDeepNested",
"toType" -> toSQLType(catalyst)))
Expand All @@ -134,7 +134,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
Serializer,
fieldMatch,
catalyst,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "MissMatchTypeInDeepNested",
"toType" -> toSQLType(catalyst)))
Expand All @@ -149,7 +149,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
protoFile,
Serializer,
BY_NAME,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "FieldMissingInSQLRoot",
"toType" -> toSQLType(CATALYST_STRUCT)))
Expand All @@ -166,7 +166,7 @@ class ProtobufSerdeSuite extends SharedSparkSession {
protoNestedFile,
Serializer,
BY_NAME,
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE",
errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
params = Map(
"protobufType" -> "FieldMissingInSQLNested",
"toType" -> toSQLType(CATALYST_STRUCT)))
Expand Down Expand Up @@ -196,10 +196,10 @@ class ProtobufSerdeSuite extends SharedSparkSession {

val expectMsg = serdeFactory match {
case Deserializer =>
s"[PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR] Unable to convert" +
s"[CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE] Unable to convert" +
s" ${protoSchema.getName} of Protobuf to SQL type ${toSQLType(catalystSchema)}."
case Serializer =>
s"[UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE] Unable to convert SQL type" +
s"[UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE] Unable to convert SQL type" +
s" ${toSQLType(catalystSchema)} to Protobuf type ${protoSchema.getName}."
}

Expand Down
110 changes: 55 additions & 55 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,29 +17,49 @@
],
"sqlState" : "22005"
},
"CANNOT_CONSTRUCT_PROTOBUF_DESCRIPTOR" : {
"message" : [
"Error constructing FileDescriptor for <descFilePath>"
]
},
"CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : {
"message" : [
"Unable to convert <protobufType> of Protobuf to SQL type <toType>."
]
},
"CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
]
},
"CANNOT_DECODE_URL" : {
"message" : [
"Cannot decode url : <url>."
],
"sqlState" : "42000"
},
"CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR" : {
"message" : [
"Error reading Protobuf descriptor file at path: <filePath>"
]
},
"CANNOT_INFER_DATE" : {
"message" : [
"Cannot infer date in schema inference when LegacyTimeParserPolicy is \"LEGACY\". Legacy Date formatter does not support strict date format matching which is required to avoid inferring timestamps and other non-date entries to date."
],
"sqlState" : "22007"
},
"CANNOT_LOAD_PROTOBUF_CLASS" : {
"message" : [
"Could not load Protobuf class with name <protobufClassName><message>"
]
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal"
],
"sqlState" : "42000"
},
"CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : {
"message" : [
"Error parsing file <descFilePath> descriptor byte[] into Descriptor object"
]
},
"CANNOT_PARSE_TIMESTAMP" : {
"message" : [
"<message>. If necessary set <ansiConfig> to \"false\" to bypass this error."
Expand Down Expand Up @@ -70,11 +90,6 @@
],
"sqlState" : "22005"
},
"CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because <data> cannot be written since it's not defined in ENUM <enumString>"
]
},
"COLUMN_NOT_IN_GROUP_BY_CLAUSE" : {
"message" : [
"The expression <expression> is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in `first()` (or `first_value()`) if you don't care which value you get."
Expand Down Expand Up @@ -527,9 +542,9 @@
"Invalid bucket file: <path>"
]
},
"INVALID_BYTE_STRING_ERROR" : {
"INVALID_BYTE_STRING" : {
"message" : [
"Invalid ByteString format"
"The expected format is ByteString, but was <unsupported> (<class>)."
]
},
"INVALID_COLUMN_OR_FIELD_DATA_TYPE" : {
Expand Down Expand Up @@ -576,6 +591,11 @@
"<value> is an invalid property value, please use quotes, e.g. SET <key>=<value>"
]
},
"INVALID_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"<protobufClassName> is not a Protobuf message type"
]
},
"INVALID_SQL_SYNTAX" : {
"message" : [
"Invalid SQL syntax: <inputString>"
Expand All @@ -594,7 +614,7 @@
}
}
},
"MALFORMED_PROTOBUF_MESSAGE_ERROR" : {
"MALFORMED_PROTOBUF_MESSAGE" : {
"message" : [
"Malformed Protobuf messages are detected in message deserialization. Parse Mode: <failFastMode>. To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'."
]
Expand Down Expand Up @@ -649,22 +669,22 @@
],
"sqlState" : "42000"
},
"NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA" : {
"message" : [
"Cannot find <catalystFieldPath> in Protobuf schema"
]
},
"NO_HANDLER_FOR_UDAF" : {
"message" : [
"No handler for UDAF '<functionName>'. Use sparkSession.udf.register(...) instead."
]
},
"NO_PROTOBUF_MESSAGE_TYPE_ERROR" : {
"NO_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"No MessageTypes returned, <descriptorName>"
]
},
"NO_UDF_INTERFACE_ERROR" : {
"NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : {
"message" : [
"Cannot find <catalystFieldPath> in Protobuf schema"
]
},
"NO_UDF_INTERFACE" : {
"message" : [
"UDF class <className> doesn't implement any UDF interface"
]
Expand Down Expand Up @@ -736,32 +756,22 @@
],
"sqlState" : "42000"
},
"PROTOBUF_CLASS_LOAD_ERROR" : {
"message" : [
"Could not load Protobuf class with name <protobufClassName><message>"
]
},
"PROTOBUF_DEPENDENCY_ERROR" : {
"PROTOBUF_DEPENDENCY_NOT_FOUND" : {
"message" : [
"Could not find dependency: <dependencyName>"
]
},
"PROTOBUF_DESCRIPTOR_ERROR" : {
"message" : [
"Error parsing descriptor byte[] into Descriptor object"
]
},
"PROTOBUF_DESCRIPTOR_PARSING_ERROR" : {
"PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : {
"message" : [
"Error constructing FileDescriptor"
"Error reading Protobuf descriptor file at path: <filePath>"
]
},
"PROTOBUF_FIELD_MISSING_ERROR" : {
"PROTOBUF_FIELD_MISSING" : {
"message" : [
"Searching for <field> in Protobuf schema at <protobufSchema> gave <matchSize> matches. Candidates: <matches>"
]
},
"PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA" : {
"PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : {
"message" : [
"Found <field> in Protobuf schema but there is no match in the SQL schema"
]
Expand All @@ -771,29 +781,24 @@
"Type mismatch encountered for field: <field>"
]
},
"PROTOBUF_MESSAGE_TYPE_ERROR" : {
"PROTOBUF_FIELD_TYPE_TO_SQL_TYPE_ERROR" : {
"message" : [
"<protobufClassName> is not a Protobuf message type"
"Cannot convert Protobuf <protobufColumn> to SQL <sqlColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
]
},
"PROTOBUF_RECURSION_ERROR" : {
"PROTOBUF_MESSAGE_NOT_FOUND" : {
"message" : [
"Found recursive reference in Protobuf schema, which can not be processed by Spark: <fieldDescriptor>"
"Unable to locate Message <messageName> in Descriptor"
]
},
"PROTOBUF_TYPE_NOT_SUPPORT_ERROR" : {
"PROTOBUF_TYPE_NOT_SUPPORT" : {
"message" : [
"Protobuf type not yet supported: <protobufType>."
]
},
"PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR" : {
"message" : [
"Unable to convert <protobufType> of Protobuf to SQL type <toType>."
]
},
"PROTOBUF_TYPE_TO_SQL_TYPE_ERROR" : {
"RECURSIVE_PROTOBUF_SCHEMA" : {
"message" : [
"Cannot convert Protobuf <protobufColumn> to SQL <sqlColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
"Found recursive reference in Protobuf schema, which can not be processed by Spark: <fieldDescriptor>"
]
},
"RENAME_SRC_PATH_NOT_FOUND" : {
Expand Down Expand Up @@ -850,9 +855,9 @@
],
"sqlState" : "22023"
},
"SQL_TYPE_TO_PROTOBUF_TYPE_ERROR" : {
"SQL_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because <data> cannot be written since it's not defined in ENUM <enumString>"
]
},
"TABLE_OR_VIEW_ALREADY_EXISTS" : {
Expand Down Expand Up @@ -887,16 +892,11 @@
"Unable to acquire <requestedBytes> bytes of memory, got <receivedBytes>"
]
},
"UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE" : {
"UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Unable to convert SQL type <toType> to Protobuf type <protobufType>."
]
},
"UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR" : {
"message" : [
"Unable to locate Message <messageName> in Descriptor"
]
},
"UNKNOWN_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Attempting to treat <descriptorName> as a Message, but it was <containingType>"
Expand Down
Loading

0 comments on commit e5140b0

Please sign in to comment.