Skip to content

Commit

Permalink
Fix Spark cross publish (#580)
Browse files Browse the repository at this point in the history
* Fix spark cross publish

* Refactor the way CI launches tests

* Make cross spark tests acutally work
  • Loading branch information
pomadchin authored Nov 10, 2021
1 parent 2f228f1 commit e850090
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 2 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,12 @@ jobs:
- name: Test & Compute Coverage
run: sbt ++${{ matrix.scala }} coverage frameless-test

- name: Test & Compute Coverage Spark 3.1.x
run: sbt ++${{ matrix.scala }} coverage frameless-test-spark31

- name: Test & Compute Coverage Spark 3.0.x
run: sbt ++${{ matrix.scala }} coverage frameless-test-spark30

- name: Upload Codecov Results
run: codecov -F ${{ matrix.scala }}

Expand Down
19 changes: 17 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ val Scala213 = "2.13.7"
val previousVersion = "0.10.1"

/** A list of projects that can be safely compiled across Scala versions. */
val projectsCrossVersion = "core" :: "dataset" :: "refined" :: "ml" :: Nil
val projectsCrossVersion = "core" :: "cats" :: "dataset" :: "refined" :: "ml" :: Nil
val projectsSpark31 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark31")
val projectsSpark30 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark30")

ThisBuild / versionScheme := Some("semver-spec")

Expand Down Expand Up @@ -48,13 +50,18 @@ lazy val root = Project("frameless", file("." + "frameless")).in(file("."))
.settings(
/** Not all Spark versions support Scala 2.13. These commands are launched for the supported subset of projects only. */
commands ++= Seq(
commandCrossVersion("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport"), "test" :: "coverageReport" :: Nil).value,
// run tests separately for different Spark versions to reduce pressure on CI
command("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport")).value,
command212("frameless-test-spark31")(projectsSpark31.map(_ + "/test") ::: projectsSpark31.map(_ + "/test/coverageReport")).value,
command212("frameless-test-spark30")(projectsSpark30.map(_ + "/test") ::: projectsSpark30.map(_ + "/test/coverageReport")).value,
commandCrossVersion("frameless-mimaReportBinaryIssues")(projectsCrossVersion.map(_ + "/mimaReportBinaryIssues"), "mimaReportBinaryIssues" :: Nil).value,
commandCrossVersion("frameless-publish")(projectsCrossVersion.map(_ + "/publish"), "publish" :: Nil).value,
commandCrossVersion("frameless-publishSigned")(projectsCrossVersion.map(_ + "/publishSigned"), "publishSigned" :: Nil).value,
)
)

def command(name: String)(commands: List[String]) = commandCrossVersion(name)(commands, commands)
def command212(name: String)(commands212: List[String]) = commandCrossVersion(name)(Nil, commands212)
def commandCrossVersion(name: String)(commands213: List[String], commands212: List[String]) = Def.setting { Command.command(name) { currentState =>
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => commands213 ::: currentState
Expand All @@ -74,12 +81,14 @@ lazy val cats = project

lazy val `cats-spark31` = project
.settings(name := "frameless-cats-spark31")
.settings(sourceDirectory := (cats / sourceDirectory).value)
.settings(catsSettings)
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")

lazy val `cats-spark30` = project
.settings(name := "frameless-cats-spark30")
.settings(sourceDirectory := (cats / sourceDirectory).value)
.settings(catsSettings)
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")
Expand All @@ -92,13 +101,15 @@ lazy val dataset = project

lazy val `dataset-spark31` = project
.settings(name := "frameless-dataset-spark31")
.settings(sourceDirectory := (dataset / sourceDirectory).value)
.settings(datasetSettings)
.settings(sparkDependencies(spark31Version))
.settings(mimaPreviousArtifacts := Set.empty)
.dependsOn(core % "test->test;compile->compile")

lazy val `dataset-spark30` = project
.settings(name := "frameless-dataset-spark30")
.settings(sourceDirectory := (dataset / sourceDirectory).value)
.settings(datasetSettings)
.settings(sparkDependencies(spark30Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand All @@ -111,11 +122,13 @@ lazy val refined = project

lazy val `refined-spark31` = project
.settings(name := "frameless-refined-spark31")
.settings(sourceDirectory := (refined / sourceDirectory).value)
.settings(refinedSettings)
.dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided")

lazy val `refined-spark30` = project
.settings(name := "frameless-refined-spark30")
.settings(sourceDirectory := (refined / sourceDirectory).value)
.settings(refinedSettings)
.dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided")

Expand All @@ -130,6 +143,7 @@ lazy val ml = project

lazy val `ml-spark31` = project
.settings(name := "frameless-ml-spark31")
.settings(sourceDirectory := (ml / sourceDirectory).value)
.settings(mlSettings)
.settings(sparkMlDependencies(spark31Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand All @@ -140,6 +154,7 @@ lazy val `ml-spark31` = project

lazy val `ml-spark30` = project
.settings(name := "frameless-ml-spark30")
.settings(sourceDirectory := (ml / sourceDirectory).value)
.settings(mlSettings)
.settings(sparkMlDependencies(spark30Version))
.settings(mimaPreviousArtifacts := Set.empty)
Expand Down

0 comments on commit e850090

Please sign in to comment.