diff --git a/build.sbt b/build.sbt index acde8144..687a6a1e 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,8 @@ // Your sbt build file. Guides on how to write one can be found at // http://www.scala-sbt.org/0.13/docs/index.html +import ReleaseTransformations._ + val sparkVer = sys.props.getOrElse("spark.version", "2.3.0") val sparkBranch = sparkVer.substring(0, 3) val defaultScalaVer = sparkBranch match { @@ -20,7 +22,7 @@ scalaVersion := scalaVer spName := "databricks/spark-deep-learning" // Don't forget to set the version -version := s"1.0.0-spark$sparkBranch" +version := (version in ThisBuild).value + s"-spark$sparkBranch" // All Spark Packages need a license licenses := Seq("Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0")) @@ -83,3 +85,13 @@ concurrentRestrictions in Global := Seq( autoAPIMappings := true coverageHighlighting := false + +// We only use sbt-release to update version numbers for now. +releaseProcess := Seq[ReleaseStep]( + inquireVersions, + setReleaseVersion, + commitReleaseVersion, + tagRelease, + setNextVersion, + commitNextVersion +) diff --git a/project/plugins.sbt b/project/plugins.sbt index e5cd848d..89f028f9 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,3 +3,5 @@ resolvers += "Spark Packages repo" at "https://dl.bintray.com/spark-packages/mav addSbtPlugin("org.spark-packages" %% "sbt-spark-package" % "0.2.5") // scalacOptions in (Compile,doc) := Seq("-groups", "-implicits") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0") + +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.8") diff --git a/version.sbt b/version.sbt new file mode 100644 index 00000000..dc11afef --- /dev/null +++ b/version.sbt @@ -0,0 +1 @@ +version in ThisBuild := "1.0.1-SNAPSHOT"