diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd949184c..9199c8fe2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,12 +36,6 @@ jobs: - name: Test & Compute Coverage run: sbt ++${{ matrix.scala }} coverage frameless-test - - name: Test & Compute Coverage Spark 3.1.x - run: sbt ++${{ matrix.scala }} coverage frameless-test-spark31 - - - name: Test & Compute Coverage Spark 3.0.x - run: sbt ++${{ matrix.scala }} coverage frameless-test-spark30 - - name: Upload Codecov Results run: codecov -F ${{ matrix.scala }} diff --git a/build.sbt b/build.sbt index 2d2fc8bdc..1f8ce91da 100644 --- a/build.sbt +++ b/build.sbt @@ -17,8 +17,6 @@ val previousVersion = "0.10.1" /** A list of projects that can be safely compiled across Scala versions. */ val projectsCrossVersion = "core" :: "cats" :: "dataset" :: "refined" :: "ml" :: Nil -val projectsSpark31 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark31") -val projectsSpark30 = projectsCrossVersion.head :: projectsCrossVersion.tail.map(_ + "-spark30") ThisBuild / versionScheme := Some("semver-spec") @@ -51,17 +49,13 @@ lazy val root = Project("frameless", file("." + "frameless")).in(file(".")) /** Not all Spark versions support Scala 2.13. These commands are launched for the supported subset of projects only. */ commands ++= Seq( // run tests separately for different Spark versions to reduce pressure on CI - command("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport")).value, - command212("frameless-test-spark31")(projectsSpark31.map(_ + "/test") ::: projectsSpark31.map(_ + "/test/coverageReport")).value, - command212("frameless-test-spark30")(projectsSpark30.map(_ + "/test") ::: projectsSpark30.map(_ + "/test/coverageReport")).value, + commandCrossVersion("frameless-test")(projectsCrossVersion.map(_ + "/test") ::: projectsCrossVersion.map(_ + "/test/coverageReport"), "test" :: "coverageReport" :: Nil).value, commandCrossVersion("frameless-mimaReportBinaryIssues")(projectsCrossVersion.map(_ + "/mimaReportBinaryIssues"), "mimaReportBinaryIssues" :: Nil).value, commandCrossVersion("frameless-publish")(projectsCrossVersion.map(_ + "/publish"), "publish" :: Nil).value, commandCrossVersion("frameless-publishSigned")(projectsCrossVersion.map(_ + "/publishSigned"), "publishSigned" :: Nil).value, ) ) -def command(name: String)(commands: List[String]) = commandCrossVersion(name)(commands, commands) -def command212(name: String)(commands212: List[String]) = commandCrossVersion(name)(Nil, commands212) def commandCrossVersion(name: String)(commands213: List[String], commands212: List[String]) = Def.setting { Command.command(name) { currentState => CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, 13)) => commands213 ::: currentState @@ -79,16 +73,16 @@ lazy val cats = project .settings(catsSettings) .dependsOn(dataset % "test->test;compile->compile;provided->provided") -lazy val `cats-spark31` = (project in file("cats")) +lazy val `cats-spark31` = project .settings(name := "frameless-cats-spark31") - .settings(target := file("cats-spark31/target")) + .settings(sourceDirectory := (cats / sourceDirectory).value) .settings(catsSettings) .settings(mimaPreviousArtifacts := Set.empty) .dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided") -lazy val `cats-spark30` = (project in file("cats")) +lazy val `cats-spark30` = project .settings(name := "frameless-cats-spark30") - .settings(target := file("cats-spark30/target")) + .settings(sourceDirectory := (cats / sourceDirectory).value) .settings(catsSettings) .settings(mimaPreviousArtifacts := Set.empty) .dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided") @@ -99,17 +93,17 @@ lazy val dataset = project .settings(sparkDependencies(sparkVersion)) .dependsOn(core % "test->test;compile->compile") -lazy val `dataset-spark31` = (project in file("dataset")) +lazy val `dataset-spark31` = project .settings(name := "frameless-dataset-spark31") - .settings(target := file("dataset-spark31/target")) + .settings(sourceDirectory := (dataset / sourceDirectory).value) .settings(datasetSettings) .settings(sparkDependencies(spark31Version)) .settings(mimaPreviousArtifacts := Set.empty) .dependsOn(core % "test->test;compile->compile") -lazy val `dataset-spark30` = (project in file("dataset")) +lazy val `dataset-spark30` = project .settings(name := "frameless-dataset-spark30") - .settings(target := file("dataset-spark30/target")) + .settings(sourceDirectory := (dataset / sourceDirectory).value) .settings(datasetSettings) .settings(sparkDependencies(spark30Version)) .settings(mimaPreviousArtifacts := Set.empty) @@ -120,15 +114,15 @@ lazy val refined = project .settings(refinedSettings) .dependsOn(dataset % "test->test;compile->compile;provided->provided") -lazy val `refined-spark31` = (project in file("refined")) +lazy val `refined-spark31` = project .settings(name := "frameless-refined-spark31") - .settings(target := file("refined-spark31/target")) + .settings(sourceDirectory := (refined / sourceDirectory).value) .settings(refinedSettings) .dependsOn(`dataset-spark31` % "test->test;compile->compile;provided->provided") -lazy val `refined-spark30` = (project in file("refined")) +lazy val `refined-spark30` = project .settings(name := "frameless-refined-spark30") - .settings(target := file("refined-spark30/target")) + .settings(sourceDirectory := (refined / sourceDirectory).value) .settings(refinedSettings) .dependsOn(`dataset-spark30` % "test->test;compile->compile;provided->provided") @@ -141,9 +135,9 @@ lazy val ml = project dataset % "test->test;compile->compile;provided->provided" ) -lazy val `ml-spark31` = (project in file("ml")) +lazy val `ml-spark31` = project .settings(name := "frameless-ml-spark31") - .settings(target := file("ml-spark31/target")) + .settings(sourceDirectory := (ml / sourceDirectory).value) .settings(mlSettings) .settings(sparkMlDependencies(spark31Version)) .settings(mimaPreviousArtifacts := Set.empty) @@ -152,9 +146,9 @@ lazy val `ml-spark31` = (project in file("ml")) `dataset-spark31` % "test->test;compile->compile;provided->provided" ) -lazy val `ml-spark30` = (project in file("ml")) +lazy val `ml-spark30` = project .settings(name := "frameless-ml-spark30") - .settings(target := file("ml-spark30/target")) + .settings(sourceDirectory := (ml / sourceDirectory).value) .settings(mlSettings) .settings(sparkMlDependencies(spark30Version)) .settings(mimaPreviousArtifacts := Set.empty)