diff --git a/connector/protobuf/pom.xml b/connector/protobuf/pom.xml index 9296506577ba4..14d790072d41c 100644 --- a/connector/protobuf/pom.xml +++ b/connector/protobuf/pom.xml @@ -123,6 +123,7 @@ com.google.protobuf:protoc:${protobuf.version} ${protobuf.version} + direct src/test/resources/protobuf diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala index 685f21c1915c3..1c6dbf54ad08e 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDeserializer.scala @@ -64,7 +64,7 @@ private[sql] class ProtobufDeserializer( case ise: AnalysisException => throw QueryCompilationErrors.cannotConvertProtobufTypeToCatalystTypeError( rootDescriptor.getName, - rootCatalystType.sql, + rootCatalystType, ise) } @@ -170,8 +170,9 @@ private[sql] class ProtobufDeserializer( case (INT, ShortType) => (updater, ordinal, value) => updater.setShort(ordinal, value.asInstanceOf[Short]) - case (BOOLEAN | INT | FLOAT | DOUBLE | LONG | STRING | ENUM | BYTE_STRING, - ArrayType(dataType: DataType, containsNull)) if protoType.isRepeated => + case ( + BOOLEAN | INT | FLOAT | DOUBLE | LONG | STRING | ENUM | BYTE_STRING, + ArrayType(dataType: DataType, containsNull)) if protoType.isRepeated => newArrayWriter(protoType, protoPath, catalystPath, dataType, containsNull) case (LONG, LongType) => @@ -245,7 +246,7 @@ private[sql] class ProtobufDeserializer( toFieldStr(catalystPath), s"${protoType} ${protoType.toProto.getLabel} ${protoType.getJavaType}" + s" ${protoType.getType}", - catalystType.sql) + catalystType) } } diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala index ee64916dfe14e..b09db6b6b2c23 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala @@ -57,7 +57,7 @@ private[sql] class ProtobufSerializer( case ise: AnalysisException => throw QueryCompilationErrors.cannotConvertSqlTypeToProtobufError( rootDescriptor.getName, - rootCatalystType.sql, + rootCatalystType, ise) } if (nullable) { (data: Any) => @@ -104,7 +104,9 @@ private[sql] class ProtobufSerializer( val data = getter.getUTF8String(ordinal).toString if (!enumSymbols.contains(data)) { throw QueryCompilationErrors.cannotConvertCatalystTypeToProtobufEnumTypeError( - toFieldStr(catalystPath), toFieldStr(protoPath), data, + toFieldStr(catalystPath), + toFieldStr(protoPath), + data, enumSymbols.mkString("\"", "\", \"", "\"")) } fieldDescriptor.getEnumType.findValueByName(data) @@ -122,7 +124,8 @@ private[sql] class ProtobufSerializer( case (TimestampType, MESSAGE) => (getter, ordinal) => val millis = DateTimeUtils.microsToMillis(getter.getLong(ordinal)) - Timestamp.newBuilder() + Timestamp + .newBuilder() .setSeconds((millis / 1000)) .setNanos(((millis % 1000) * 1000000).toInt) .build() @@ -199,7 +202,8 @@ private[sql] class ProtobufSerializer( val calendarInterval = IntervalUtils.fromIntervalString(dayTimeIntervalString) val millis = DateTimeUtils.microsToMillis(calendarInterval.microseconds) - val duration = Duration.newBuilder() + val duration = Duration + .newBuilder() .setSeconds((millis / 1000)) .setNanos(((millis % 1000) * 1000000).toInt) @@ -216,7 +220,7 @@ private[sql] class ProtobufSerializer( throw QueryCompilationErrors.cannotConvertCatalystTypeToProtobufTypeError( toFieldStr(catalystPath), toFieldStr(protoPath), - catalystType.sql, + catalystType, s"${fieldDescriptor} ${fieldDescriptor.toProto.getLabel} ${fieldDescriptor.getJavaType}" + s" ${fieldDescriptor.getType}") } diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala index f2127bbf9dc0f..5d7c1f033ba3b 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala @@ -79,9 +79,9 @@ private[sql] object ProtobufUtils extends Logging { /** * Validate that there are no Catalyst fields which don't have a matching Protobuf field, - * throwing [[AnalysisException]] if such extra fields are found. If - * `ignoreNullable` is false, consider nullable Catalyst fields to be eligible to be an extra - * field; otherwise, ignore nullable Catalyst fields when checking for extras. + * throwing [[AnalysisException]] if such extra fields are found. If `ignoreNullable` is + * false, consider nullable Catalyst fields to be eligible to be an extra field; otherwise, + * ignore nullable Catalyst fields when checking for extras. */ def validateNoExtraCatalystFields(ignoreNullable: Boolean): Unit = catalystSchema.fields.foreach { sqlField => @@ -94,8 +94,8 @@ private[sql] object ProtobufUtils extends Logging { /** * Validate that there are no Protobuf fields which don't have a matching Catalyst field, - * throwing [[AnalysisException]] if such extra fields are found. Only required - * (non-nullable) fields are checked; nullable fields are ignored. + * throwing [[AnalysisException]] if such extra fields are found. Only required (non-nullable) + * fields are checked; nullable fields are ignored. */ def validateNoExtraRequiredProtoFields(): Unit = { val extraFields = protoFieldArray.toSet -- matchedFields.map(_.fieldDescriptor) @@ -183,7 +183,8 @@ private[sql] object ProtobufUtils extends Logging { descriptor match { case Some(d) => d case None => - throw QueryCompilationErrors.unableToLocateProtobuMessageError(messageName) } + throw QueryCompilationErrors.unableToLocateProtobufMessageError(messageName) + } } private def parseFileDescriptor(descFilePath: String): Descriptors.FileDescriptor = { @@ -199,7 +200,7 @@ private[sql] object ProtobufUtils extends Logging { } val descriptorProto: DescriptorProtos.FileDescriptorProto = - fileDescriptorSet.getFile(fileDescriptorSet.getFileList.size() - 1) + fileDescriptorSet.getFileList.asScala.last var fileDescriptorList = List[Descriptors.FileDescriptor]() for (fd <- fileDescriptorSet.getFileList.asScala) { @@ -209,9 +210,8 @@ private[sql] object ProtobufUtils extends Logging { } } try { - val fileDescriptor: Descriptors.FileDescriptor = Descriptors.FileDescriptor.buildFrom( - descriptorProto, - fileDescriptorList.toArray) + val fileDescriptor: Descriptors.FileDescriptor = + Descriptors.FileDescriptor.buildFrom(descriptorProto, fileDescriptorList.toArray) if (fileDescriptor.getMessageTypes().isEmpty()) { throw QueryCompilationErrors.noProtobufMessageTypeReturnError(fileDescriptor.getName()) } diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala index 286cb36fb7096..0c1542c39bfc7 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala @@ -63,12 +63,14 @@ object SchemaConverters { case STRING => Some(StringType) case BYTE_STRING => Some(BinaryType) case ENUM => Some(StringType) - case MESSAGE if fd.getMessageType.getName == "Duration" && + case MESSAGE + if fd.getMessageType.getName == "Duration" && fd.getMessageType.getFields.size() == 2 && fd.getMessageType.getFields.get(0).getName.equals("seconds") && fd.getMessageType.getFields.get(1).getName.equals("nanos") => Some(DayTimeIntervalType.defaultConcreteType) - case MESSAGE if fd.getMessageType.getName == "Timestamp" && + case MESSAGE + if fd.getMessageType.getName == "Timestamp" && fd.getMessageType.getFields.size() == 2 && fd.getMessageType.getFields.get(0).getName.equals("seconds") && fd.getMessageType.getFields.get(1).getName.equals("nanos") => diff --git a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala index faa3296c02aa4..182486e3c67b1 100644 --- a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala +++ b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.protobuf import com.google.protobuf.Descriptors.Descriptor import com.google.protobuf.DynamicMessage -import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.NoopFilters +import org.apache.spark.sql.catalyst.expressions.Cast.toSQLType import org.apache.spark.sql.protobuf.utils.ProtobufUtils import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{IntegerType, StructType} @@ -163,15 +163,16 @@ class ProtobufSerdeSuite extends SharedSparkSession { fieldMatchType: MatchType, expectedCauseMessage: String, catalystSchema: StructType = CATALYST_STRUCT): Unit = { - val e = intercept[AnalysisException] { + val e = intercept[Exception] { serdeFactory.create(catalystSchema, protoSchema, fieldMatchType) } val expectMsg = serdeFactory match { case Deserializer => - s"Unable to convert ${protoSchema.getName} of Protobuf to SQL type ${catalystSchema.sql}." + s"[PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR] Unable to convert" + + s" ${protoSchema.getName} of Protobuf to SQL type ${toSQLType(catalystSchema)}." case Serializer => - s"Unable to convert SQL type ${catalystSchema.sql} to Protobuf type" + - s" ${protoSchema.getName}." + s"[UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE] Unable to convert SQL type" + + s" ${toSQLType(catalystSchema)} to Protobuf type ${protoSchema.getName}." } assert(e.getMessage === expectMsg) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index f14f7e3041d3e..9ce037e961924 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -23,6 +23,11 @@ ], "sqlState" : "42000" }, + "CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR" : { + "message" : [ + "Error reading Protobuf descriptor file at path: " + ] + }, "CANNOT_INFER_DATE" : { "message" : [ "Cannot infer date in schema inference when LegacyTimeParserPolicy is \"LEGACY\". Legacy Date formatter does not support strict date format matching which is required to avoid inferring timestamps and other non-date entries to date." @@ -65,6 +70,11 @@ ], "sqlState" : "22005" }, + "CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR" : { + "message" : [ + "Cannot convert SQL to Protobuf because cannot be written since it's not defined in ENUM " + ] + }, "COLUMN_NOT_IN_GROUP_BY_CLAUSE" : { "message" : [ "The expression is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in `first()` (or `first_value()`) if you don't care which value you get." @@ -460,6 +470,11 @@ "Invalid bucket file: " ] }, + "INVALID_BYTE_STRING_ERROR" : { + "message" : [ + "Invalid ByteString format" + ] + }, "INVALID_COLUMN_OR_FIELD_DATA_TYPE" : { "message" : [ "Column or field is of type while it's required to be ." @@ -572,11 +587,21 @@ ], "sqlState" : "42000" }, + "NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA" : { + "message" : [ + "Cannot find in Protobuf schema" + ] + }, "NO_HANDLER_FOR_UDAF" : { "message" : [ "No handler for UDAF ''. Use sparkSession.udf.register(...) instead." ] }, + "NO_PROTOBUF_MESSAGE_TYPE_ERROR" : { + "message" : [ + "No MessageTypes returned, " + ] + }, "NO_UDF_INTERFACE_ERROR" : { "message" : [ "UDF class doesn't implement any UDF interface" @@ -644,6 +669,61 @@ ], "sqlState" : "42000" }, + "PROTOBUF_CLASS_LOAD_ERROR" : { + "message" : [ + "Could not load Protobuf class with name " + ] + }, + "PROTOBUF_DESCRIPTOR_ERROR" : { + "message" : [ + "Error parsing descriptor byte[] into Descriptor object Error: " + ] + }, + "PROTOBUF_DESCRIPTOR_PARSING_ERROR" : { + "message" : [ + "Error constructing FileDescriptor, Error: " + ] + }, + "PROTOBUF_FIELD_MISSING_ERROR" : { + "message" : [ + "Searching for in Protobuf schema at gave matches. Candidates: " + ] + }, + "PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA" : { + "message" : [ + "Found in Protobuf schema but there is no match in the SQL schema" + ] + }, + "PROTOBUF_FIELD_TYPE_MISMATCH" : { + "message" : [ + "Type mismatch encountered for field: " + ] + }, + "PROTOBUF_MESSAGE_TYPE_ERROR" : { + "message" : [ + " is not a Protobuf message type" + ] + }, + "PROTOBUF_RECURSION_ERROR" : { + "message" : [ + "Found recursive reference in Protobuf schema, which can not be processed by Spark: " + ] + }, + "PROTOBUF_TYPE_NOT_SUPPORT_ERROR" : { + "message" : [ + "Protobuf type not yet supported: ." + ] + }, + "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR" : { + "message" : [ + "Unable to convert of Protobuf to SQL type ." + ] + }, + "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR" : { + "message" : [ + "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = )." + ] + }, "RENAME_SRC_PATH_NOT_FOUND" : { "message" : [ "Failed to rename as was not found" @@ -698,6 +778,11 @@ ], "sqlState" : "22023" }, + "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR" : { + "message" : [ + "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = )." + ] + }, "TABLE_OR_VIEW_ALREADY_EXISTS" : { "message" : [ "Cannot create table or view because it already exists.", @@ -730,6 +815,21 @@ "Unable to acquire bytes of memory, got " ] }, + "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE" : { + "message" : [ + "Unable to convert SQL type to Protobuf type ." + ] + }, + "UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR" : { + "message" : [ + "Unable to locate Message in Descriptor" + ] + }, + "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { + "message" : [ + "Attempting to treat as a Message, but it was " + ] + }, "UNPIVOT_REQUIRES_ATTRIBUTES" : { "message" : [ "UNPIVOT requires all given expressions to be columns when no expressions are given. These are not columns: []." @@ -4275,105 +4375,5 @@ "message" : [ "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" ] - }, - "_LEGACY_ERROR_TEMP_2251" : { - "message" : [ - "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = ." - ] - }, - "_LEGACY_ERROR_TEMP_2252" : { - "message" : [ - "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = ." - ] - }, - "_LEGACY_ERROR_TEMP_2253" : { - "message" : [ - "Cannot convert SQL to Protobuf because cannot be written since it's not defined in ENUM " - ] - }, - "_LEGACY_ERROR_TEMP_2254" : { - "message" : [ - "Unable to convert of Protobuf to SQL type ." - ] - }, - "_LEGACY_ERROR_TEMP_2255" : { - "message" : [ - "Unable to convert SQL type to Protobuf type ." - ] - }, - "_LEGACY_ERROR_TEMP_2256" : { - "message" : [ - "Protobuf type not yet supported: ." - ] - }, - "_LEGACY_ERROR_TEMP_2257" : { - "message" : [ - "Attempting to treat as a Message, but it was " - ] - }, - "_LEGACY_ERROR_TEMP_2258" : { - "message" : [ - "Cannot find in Protobuf schema" - ] - }, - "_LEGACY_ERROR_TEMP_2259" : { - "message" : [ - "Found in Protobuf schema but there is no match in the SQL schema" - ] - }, - "_LEGACY_ERROR_TEMP_2260" : { - "message" : [ - "Searching for in Protobuf schema at gave matches. Candidates: " - ] - }, - "_LEGACY_ERROR_TEMP_2261" : { - "message" : [ - "Unable to locate Message in Descriptor" - ] - }, - "_LEGACY_ERROR_TEMP_2262" : { - "message" : [ - "Error parsing descriptor byte[] into Descriptor object Error: " - ] - }, - "_LEGACY_ERROR_TEMP_2263" : { - "message" : [ - "Error reading Protobuf descriptor file at path: " - ] - }, - "_LEGACY_ERROR_TEMP_2264" : { - "message" : [ - "No MessageTypes returned, " - ] - }, - "_LEGACY_ERROR_TEMP_2265" : { - "message" : [ - "Error constructing FileDescriptor, Error: " - ] - }, - "_LEGACY_ERROR_TEMP_2266" : { - "message" : [ - "Found recursive reference in Protobuf schema, which can not be processed by Spark: " - ] - }, - "_LEGACY_ERROR_TEMP_2267" : { - "message" : [ - "Type mismatch encountered for field: " - ] - }, - "_LEGACY_ERROR_TEMP_2268" : { - "message" : [ - "Could not load Protobuf class with name " - ] - }, - "_LEGACY_ERROR_TEMP_2269" : { - "message" : [ - " is not a Protobuf message type" - ] - }, - "_LEGACY_ERROR_TEMP_2270" : { - "message" : [ - "Invalid ByteString format" - ] } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 7fc07e5d03ddd..14937c6149305 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -3216,27 +3216,27 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { protobufColumn: String, sqlColumn: String, protobufType: String, - sqlType: String): Throwable = { + sqlType: DataType): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2251", + errorClass = "PROTOBUF_TYPE_TO_SQL_TYPE_ERROR", messageParameters = Map( "protobufColumn" -> protobufColumn, "sqlColumn" -> sqlColumn, "protobufType" -> protobufType, - "sqlType" -> sqlType)) + "sqlType" -> toSQLType(sqlType))) } def cannotConvertCatalystTypeToProtobufTypeError( sqlColumn: String, protobufColumn: String, - sqlType: String, + sqlType: DataType, protobufType: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2252", + errorClass = "SQL_TYPE_TO_PROTOBUF_TYPE_ERROR", messageParameters = Map( "sqlColumn" -> sqlColumn, "protobufColumn" -> protobufColumn, - "sqlType" -> sqlType, + "sqlType" -> toSQLType(sqlType), "protobufType" -> protobufType)) } @@ -3246,7 +3246,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { data: String, enumString: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2253", + errorClass = "CATALYST_TYPE_TO_PROTOBUF_ENUM_TYPE_ERROR", messageParameters = Map( "sqlColumn" -> sqlColumn, "protobufColumn" -> protobufColumn, @@ -3254,41 +3254,41 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { "enumString" -> enumString)) } - def protobufTypeUnsupportedYetError(protobufType: String): Throwable = { - new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2256", - messageParameters = Map("protobufType" -> protobufType)) - } - def cannotConvertProtobufTypeToCatalystTypeError( protobufType: String, - sqlType: String, + sqlType: DataType, e1: Throwable): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2254", + errorClass = "PROTOBUF_TYPE_TO_CATALYST_TYPE_ERROR", messageParameters = Map( "protobufType" -> protobufType, - "toType" -> sqlType), + "toType" -> toSQLType(sqlType)), cause = Some(e1.getCause)) } def cannotConvertSqlTypeToProtobufError( protobufType: String, - sqlType: String, + sqlType: DataType, e1: Throwable): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2255", + errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_TYPE", messageParameters = Map( "protobufType" -> protobufType, - "toType" -> sqlType), + "toType" -> toSQLType(sqlType)), cause = Some(e1.getCause)) } + def protobufTypeUnsupportedYetError(protobufType: String): Throwable = { + new AnalysisException( + errorClass = "PROTOBUF_TYPE_NOT_SUPPORT_ERROR", + messageParameters = Map("protobufType" -> protobufType)) + } + def unknownProtobufMessageTypeError( descriptorName: String, containingType: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2257", + errorClass = "UNKNOWN_PROTOBUF_MESSAGE_TYPE", messageParameters = Map( "descriptorName" -> descriptorName, "containingType" -> containingType)) @@ -3296,13 +3296,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def cannotFindCatalystTypeInProtobufSchemaError(catalystFieldPath: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2258", + errorClass = "NO_CATALYST_TYPE_IN_PROTOBUF_SCHEMA", messageParameters = Map("catalystFieldPath" -> catalystFieldPath)) } def cannotFindProtobufFieldInCatalystError(field: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2259", + errorClass = "PROTOBUF_FIELD_MISSING_IN_CATALYST_SCHEMA", messageParameters = Map("field" -> field)) } @@ -3312,7 +3312,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { matchSize: String, matches: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2260", + errorClass = "PROTOBUF_FIELD_MISSING_ERROR", messageParameters = Map( "field" -> field, "protobufSchema" -> protobufSchema, @@ -3320,54 +3320,54 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { "matches" -> matches)) } - def unableToLocateProtobuMessageError(messageName: String): Throwable = { + def unableToLocateProtobufMessageError(messageName: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2261", + errorClass = "UNABLE_TO_LOCATE_PROTOBUF_MESSAGE_ERROR", messageParameters = Map("messageName" -> messageName)) } def descrioptorParseError(e1: Throwable): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2262", + errorClass = "PROTOBUF_DESCRIPTOR_ERROR", messageParameters = Map("errorMessage" -> e1.getMessage()), cause = Some(e1.getCause)) } def cannotFindDescriptorFileError(filePath: String, e1: Throwable): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2263", + errorClass = "CANNOT_FIND_PROTOBUF_DESCRIPTOR_FILE_ERROR", messageParameters = Map("filePath" -> filePath), cause = Some(e1.getCause)) } def noProtobufMessageTypeReturnError(descriptorName: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2264", + errorClass = "NO_PROTOBUF_MESSAGE_TYPE_ERROR", messageParameters = Map("descriptorName" -> descriptorName)) } def failedParsingDescriptorError(e1: Throwable): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2265", + errorClass = "PROTOBUF_DESCRIPTOR_PARSING_ERROR", messageParameters = Map("errorMessage" -> e1.getMessage()), cause = Some(e1.getCause)) } def foundRecursionInProtobufSchema(fieldDescriptor: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2266", + errorClass = "PROTOBUF_RECURSION_ERROR", messageParameters = Map("fieldDescriptor" -> fieldDescriptor)) } def protobufFieldTypeMismatchError(field: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2267", + errorClass = "PROTOBUF_FIELD_TYPE_MISMATCH", messageParameters = Map("field" -> field)) } def protobufClassLoadError(protobufClassName: String, errorMessage: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2268", + errorClass = "PROTOBUF_CLASS_LOAD_ERROR", messageParameters = Map( "protobufClassName" -> protobufClassName, "errorMessage" -> errorMessage)) @@ -3375,12 +3375,12 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def protobufMessageTypeError(protobufClassName: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_2269", + errorClass = "PROTOBUF_MESSAGE_TYPE_ERROR", messageParameters = Map("protobufClassName" -> protobufClassName)) } def invalidByteStringFormatError(): Throwable = { - new AnalysisException(errorClass = "_LEGACY_ERROR_TEMP_2270", messageParameters = Map.empty) + new AnalysisException(errorClass = "INVALID_BYTE_STRING_ERROR", messageParameters = Map.empty) } def malformedRecordsDetectedInRecordParsingError(e1: Throwable): Throwable = {