Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
zhengruifeng committed Jan 8, 2025
1 parent a1c827d commit 93d0114
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 26 deletions.
44 changes: 44 additions & 0 deletions core/src/test/scala/org/apache/spark/SerializerHelper.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

trait SerializerHelper {

protected def roundtripSerialize[T](obj: T): T = {
deserializeFromBytes(serializeToBytes(obj))
}

protected def serializeToBytes[T](o: T): Array[Byte] = {
val baos = new ByteArrayOutputStream
val oos = new ObjectOutputStream(baos)
try {
oos.writeObject(o)
baos.toByteArray
} finally {
oos.close()
}
}

protected def deserializeFromBytes[T](bytes: Array[Byte]): T = {
val bais = new ByteArrayInputStream(bytes)
val ois = new ObjectInputStream(bais)
ois.readObject().asInstanceOf[T]
}
}
23 changes: 1 addition & 22 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, ObjectInputStream, ObjectOutputStream}
import java.io.File
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Path}
import java.util.{Locale, TimeZone}
Expand Down Expand Up @@ -463,27 +463,6 @@ abstract class SparkFunSuite
condition = "TABLE_OR_VIEW_ALREADY_EXISTS",
parameters = Map("relationName" -> tableName))

protected def roundtripSerialize[T](obj: T): T = {
deserializeFromBytes(serializeToBytes(obj))
}

protected def serializeToBytes[T](o: T): Array[Byte] = {
val baos = new ByteArrayOutputStream
val oos = new ObjectOutputStream(baos)
try {
oos.writeObject(o)
baos.toByteArray
} finally {
oos.close()
}
}

protected def deserializeFromBytes[T](bytes: Array[Byte]): T = {
val bais = new ByteArrayInputStream(bytes)
val ois = new ObjectInputStream(bais)
ois.readObject().asInstanceOf[T]
}

case class ExpectedContext(
contextType: QueryContextType,
objectType: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ package org.apache.spark.util

import java.io.NotSerializableException

import org.apache.spark.SparkFunSuite
import org.apache.spark.{SerializerHelper, SparkFunSuite}

class BestEffortLazyValSuite extends SparkFunSuite {
class BestEffortLazyValSuite extends SparkFunSuite with SerializerHelper {

test("BestEffortLazy works") {
var test: Option[Object] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
*/
package org.apache.spark.util

import org.apache.spark.SparkFunSuite
import org.apache.spark.{SerializerHelper, SparkFunSuite}

class TransientBestEffortLazyValSuite extends SparkFunSuite {
class TransientBestEffortLazyValSuite extends SparkFunSuite with SerializerHelper {

test("TransientBestEffortLazyVal works") {
var test: Option[Object] = None
Expand Down

0 comments on commit 93d0114

Please sign in to comment.