-
Notifications
You must be signed in to change notification settings - Fork 138
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
269 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
79 changes: 79 additions & 0 deletions
79
refined/src/main/scala/frameless/refined/RefinedFieldEncoders.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
package frameless.refined | ||
|
||
import scala.reflect.ClassTag | ||
|
||
import org.apache.spark.sql.catalyst.expressions._ | ||
import org.apache.spark.sql.catalyst.expressions.objects.{ | ||
Invoke, NewInstance, UnwrapOption, WrapOption | ||
} | ||
import org.apache.spark.sql.types._ | ||
|
||
import eu.timepit.refined.api.RefType | ||
|
||
import frameless.{ TypedEncoder, RecordFieldEncoder } | ||
|
||
private[refined] trait RefinedFieldEncoders { | ||
/** | ||
* @tparam T the refined type (e.g. `String`) | ||
*/ | ||
implicit def optionRefined[F[_, _], T, R]( | ||
implicit | ||
i0: RefType[F], | ||
i1: TypedEncoder[T], | ||
i2: ClassTag[F[T, R]], | ||
): RecordFieldEncoder[Option[F[T, R]]] = | ||
RecordFieldEncoder[Option[F[T, R]]](new TypedEncoder[Option[F[T, R]]] { | ||
def nullable = true | ||
|
||
// `Refined` is a Value class: https://github.com/fthomas/refined/blob/master/modules/core/shared/src/main/scala-3.0-/eu/timepit/refined/api/Refined.scala#L8 | ||
def jvmRepr = ObjectType(classOf[Option[F[T, R]]]) | ||
|
||
def catalystRepr: DataType = i1.catalystRepr | ||
|
||
val innerJvmRepr = ObjectType(i2.runtimeClass) | ||
|
||
def fromCatalyst(path: Expression): Expression = { | ||
val javaValue = i1.fromCatalyst(path) | ||
val value = NewInstance(i2.runtimeClass, Seq(javaValue), innerJvmRepr) | ||
|
||
WrapOption(value, innerJvmRepr) | ||
} | ||
|
||
@inline def toCatalyst(path: Expression): Expression = { | ||
val value = UnwrapOption(innerJvmRepr, path) | ||
|
||
val javaValue = Invoke(value, "value", i1.jvmRepr, Nil) | ||
|
||
i1.toCatalyst(javaValue) | ||
} | ||
|
||
override def toString = s"optionRefined[${i2.runtimeClass.getName}]" | ||
}) | ||
|
||
/** | ||
* @tparam T the refined type (e.g. `String`) | ||
*/ | ||
implicit def refined[F[_, _], T, R]( | ||
implicit | ||
i0: RefType[F], | ||
i1: TypedEncoder[T], | ||
i2: ClassTag[F[T, R]], | ||
): RecordFieldEncoder[F[T, R]] = | ||
RecordFieldEncoder[F[T, R]](new TypedEncoder[F[T, R]] { | ||
def nullable = i1.nullable | ||
|
||
// `Refined` is a Value class: https://github.com/fthomas/refined/blob/master/modules/core/shared/src/main/scala-3.0-/eu/timepit/refined/api/Refined.scala#L8 | ||
def jvmRepr = i1.jvmRepr | ||
|
||
def catalystRepr: DataType = i1.catalystRepr | ||
|
||
def fromCatalyst(path: Expression): Expression = | ||
i1.fromCatalyst(path) | ||
|
||
@inline def toCatalyst(path: Expression): Expression = | ||
i1.toCatalyst(path) | ||
|
||
override def toString = s"refined[${i2.runtimeClass.getName}]" | ||
}) | ||
} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
package frameless | ||
|
||
import scala.reflect.ClassTag | ||
|
||
import eu.timepit.refined.api.{ RefType, Validate } | ||
|
||
package object refined extends RefinedFieldEncoders { | ||
implicit def refinedInjection[F[_, _], T, R]( | ||
implicit | ||
refType: RefType[F], | ||
validate: Validate[T, R] | ||
): Injection[F[T, R], T] = Injection( | ||
refType.unwrap, | ||
{ value => | ||
refType.refine[R](value) match { | ||
case Left(errMsg) => | ||
throw new IllegalArgumentException( | ||
s"Value $value does not satisfy refinement predicate: $errMsg") | ||
|
||
case Right(res) => res | ||
} | ||
}) | ||
|
||
implicit def refinedEncoder[F[_, _], T, R]( | ||
implicit | ||
i0: RefType[F], | ||
i1: Validate[T, R], | ||
i2: TypedEncoder[T], | ||
i3: ClassTag[F[T, R]] | ||
): TypedEncoder[F[T, R]] = TypedEncoder.usingInjection( | ||
i3, refinedInjection, i2) | ||
} | ||
|
120 changes: 120 additions & 0 deletions
120
refined/src/test/scala/frameless/RefinedFieldEncoderTests.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
package frameless | ||
|
||
import org.apache.spark.sql.Row | ||
import org.apache.spark.sql.types.{ | ||
IntegerType, ObjectType, StringType, StructField, StructType | ||
} | ||
|
||
import org.scalatest.matchers.should.Matchers | ||
|
||
class RefinedFieldEncoderTests extends TypedDatasetSuite with Matchers { | ||
test("Encode a bare refined type") { | ||
import eu.timepit.refined.auto._ | ||
import eu.timepit.refined.types.string.NonEmptyString | ||
|
||
val encoder: TypedEncoder[NonEmptyString] = { | ||
import frameless.refined.refinedEncoder | ||
TypedEncoder[NonEmptyString] | ||
} | ||
|
||
val ss = session | ||
import ss.implicits._ | ||
|
||
encoder.catalystRepr shouldBe StringType | ||
|
||
val nes: NonEmptyString = "Non Empty String" | ||
|
||
val unsafeDs = TypedDataset.createUnsafe(sc.parallelize(Seq(nes.value)).toDF())(encoder) | ||
|
||
val expected = Seq(nes) | ||
|
||
unsafeDs.collect.run() shouldBe expected | ||
} | ||
|
||
test("Encode case class with a refined field") { | ||
import RefinedTypesTests._ | ||
|
||
// Check jvmRepr | ||
import org.apache.spark.sql.types.ObjectType | ||
|
||
encoderA.jvmRepr shouldBe ObjectType(classOf[A]) | ||
|
||
// Check catalystRepr | ||
val expectedAStructType = StructType(Seq( | ||
StructField("a", IntegerType, false), | ||
StructField("s", StringType, false))) | ||
|
||
encoderA.catalystRepr shouldBe expectedAStructType | ||
|
||
// Check unsafe | ||
val unsafeDs: TypedDataset[A] = { | ||
val rdd = sc.parallelize(Seq(Row(as.a, as.s.toString))) | ||
val df = session.createDataFrame(rdd, expectedAStructType) | ||
|
||
TypedDataset.createUnsafe(df)(encoderA) | ||
} | ||
|
||
val expected = Seq(as) | ||
|
||
unsafeDs.collect.run() shouldBe expected | ||
|
||
// Check safe | ||
val safeDs = TypedDataset.create(expected) | ||
|
||
safeDs.collect.run() shouldBe expected | ||
} | ||
|
||
test("Encode case class with a refined optional field") { | ||
import RefinedTypesTests._ | ||
|
||
// Check jvmRepr | ||
encoderB.jvmRepr shouldBe ObjectType(classOf[B]) | ||
|
||
// Check catalystRepr | ||
val expectedBStructType = StructType(Seq( | ||
StructField("a", IntegerType, false), | ||
StructField("s", StringType, true))) | ||
|
||
encoderB.catalystRepr shouldBe expectedBStructType | ||
|
||
// Check unsafe | ||
val unsafeDs: TypedDataset[B] = { | ||
val rdd = sc.parallelize(Seq( | ||
Row(bs.a, bs.s.mkString), | ||
Row(2, null.asInstanceOf[String]), | ||
)) | ||
|
||
val df = session.createDataFrame(rdd, expectedBStructType) | ||
|
||
TypedDataset.createUnsafe(df)(encoderB) | ||
} | ||
|
||
val expected = Seq(bs, B(2, None)) | ||
|
||
unsafeDs.collect.run() shouldBe expected | ||
|
||
// Check safe | ||
val safeDs = TypedDataset.create(expected) | ||
|
||
safeDs.collect.run() shouldBe expected | ||
} | ||
} | ||
|
||
object RefinedTypesTests { | ||
import eu.timepit.refined.auto._ | ||
import eu.timepit.refined.types.string.NonEmptyString | ||
|
||
case class A(a: Int, s: NonEmptyString) | ||
case class B(a: Int, s: Option[NonEmptyString]) | ||
|
||
val nes: NonEmptyString = "Non Empty String" | ||
|
||
val as = A(-42, nes) | ||
val bs = B(-42, Option(nes)) | ||
|
||
import frameless.refined._ // implicit instances for refined | ||
|
||
implicit val encoderA: TypedEncoder[A] = TypedEncoder.usingDerivation | ||
|
||
implicit val encoderB: TypedEncoder[B] = TypedEncoder.usingDerivation | ||
} |