From a520cd65619f6d7d525d930b291e17266d807c4b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Aug 2020 21:23:55 +0100 Subject: [PATCH 1/5] Finish upgrading to protobuf 3.11.4 Because it's bundled inside protoc-jar whereas 3.9.0 is not and fails to download from Maven. --- build.sbt | 5 ++++- project/plugins.sbt | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 0120cdd368..397f0e5629 100644 --- a/build.sbt +++ b/build.sbt @@ -297,7 +297,10 @@ lazy val zincPersistCore = (project in internalPath / "zinc-persist-core") crossPaths := false, autoScalaLibrary := false, exportJars := true, - ProtobufConfig / protobufRunProtoc := (args => Protoc.runProtoc("-v390" +: args.toArray)), + ProtobufConfig / version := "3.11.4", // sync version w/ plugins.sbt + ProtobufConfig / protobufRunProtoc := { args => + Protoc.runProtoc(s"-v${(ProtobufConfig / version).value}" +: args.toArray) + }, publish / skip := true, assembly / assemblyShadeRules := Seq( ShadeRule diff --git a/project/plugins.sbt b/project/plugins.sbt index 48466ab880..5c67a24dd6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.4.6") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.2") addSbtPlugin("com.github.gseitz" % "sbt-protobuf" % "0.6.5") -libraryDependencies += "com.github.os72" % "protoc-jar" % "3.11.4" +libraryDependencies += "com.github.os72" % "protoc-jar" % "3.11.4" // sync w/ ProtobufConfig / version addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.18") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") From cdb7ab530ef046240fe0936ac2cbdfd213b9d634 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Aug 2020 21:25:01 +0100 Subject: [PATCH 2/5] Compute source changes using id not file --- .../sbt/internal/inc/IncrementalCommon.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index 7561f46335..1299a973a5 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -382,20 +382,22 @@ private[inc] abstract class IncrementalCommon( log.debug(s"current source = $sources") new UnderlyingChanges[VirtualFileRef] { + val sourceIds = sources.map(_.id) + val previousSourceIds = previousSources.map(_.id) + val added0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef] val changed0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef] val removed0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef] val unmodified0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef] - new ParVector(sources.toVector).foreach { - case f: VirtualFileRef if previousSources.contains(f) => + + new ParVector(sources.toVector).foreach { f => + if (previousSourceIds.contains(f.id)) { if (equivS.equiv(previous.source(f), stamps.source(f))) unmodified0.add(f) else changed0.add(f) - case f: VirtualFileRef => added0.add(f) - } - previousSources.foreach { - case f: VirtualFile => if (!sources.contains(f)) removed0.add(f) - case _ => + } else added0.add(f) } + previousSources.foreach(f => if (!sourceIds.contains(f.id)) removed0.add(f)) + val added = new WrappedSet(added0) val changed = new WrappedSet(changed0) val removed = new WrappedSet(removed0) From 2b2f8b96fc278d2eb609ae8e1983fa323596e641 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Aug 2020 21:26:21 +0100 Subject: [PATCH 3/5] Move hasAnyMacro to Lookup.shouldDoEarlyOutput --- .../scala/sbt/internal/inc/Incremental.scala | 103 +++++++++--------- .../main/scala/sbt/internal/inc/Lookup.scala | 15 +++ .../scala/sbt/internal/inc/PickleJar.scala | 18 ++- 3 files changed, 74 insertions(+), 62 deletions(-) diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index 43284502dc..3d1782c5e8 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -44,9 +44,6 @@ import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } * compatible with the [[sbt.internal.inc.Incremental]] class. */ object Incremental { - def hasAnyMacro(a: Analysis): Boolean = - a.apis.internal.values.exists(p => p.hasMacro) - class PrefixingLogger(val prefix: String)(orig: Logger) extends Logger { def trace(t: => Throwable): Unit = orig.trace(t) def success(message: => String): Unit = orig.success(message) @@ -360,16 +357,6 @@ object Incremental { "All initially invalidated sources:" + initialInvSources + "\n" ) } - def notifyEarlyArtifact(): Unit = - if (options.pipelining) - for { - earlyO <- earlyOutput - pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath()) - p <- progress - } { - PickleJar.touch(pickleJarPath) - p.afterEarlyOutput(!hasAnyMacro(previous)) - } else () val hasModified = initialInvClasses.nonEmpty || initialInvSources.nonEmpty val analysis = withClassfileManager(options, converter, output, outputJarContent) { classfileManager => @@ -388,7 +375,9 @@ object Incremental { 1 ) else { - notifyEarlyArtifact(); previous + if (options.pipelining) + writeEarlyOut(lookup, progress, earlyOutput, previous, new java.util.HashSet) + previous } } (hasModified, analysis) @@ -465,6 +454,22 @@ object Incremental { classfileManager.complete(true) result } + + private[inc] def writeEarlyOut( + lookup: Lookup, + progress: Option[CompileProgress], + earlyOutput: Option[Output], + analysis: Analysis, + knownProducts: java.util.Set[String] + ) = { + for { + earlyO <- earlyOutput + pickleJar <- jo2o(earlyO.getSingleOutputAsPath) + } { + PickleJar.write(pickleJar, knownProducts) + progress.foreach(_.afterEarlyOutput(!lookup.shouldDoEarlyOutput(analysis))) + } + } } private object AnalysisCallback { @@ -546,7 +551,7 @@ private final class AnalysisCallback( incHandlerOpt: Option[Incremental.IncrementalCallback], log: Logger ) extends xsbti.AnalysisCallback { - import Incremental.{ hasAnyMacro, CompileCycleResult } + import Incremental.CompileCycleResult // This must have a unique value per AnalysisCallback private[this] val compileStartTime: Long = System.currentTimeMillis() @@ -835,7 +840,7 @@ private final class AnalysisCallback( invalidationResults match { case None => val early = incHandler.previousAnalysisPruned - if (!hasAnyMacro(early)) writeEarlyArtifacts(early) + if (!lookup.shouldDoEarlyOutput(early)) writeEarlyArtifacts(early) else notifyEarlyArifactFailure() case Some(CompileCycleResult(false, _, _)) => notifyEarlyArifactFailure() case _ => () @@ -977,7 +982,7 @@ private final class AnalysisCallback( val a = getAnalysis val CompileCycleResult(continue, invalidations, merged) = incHandler.mergeAndInvalidate(a, false) - if (!hasAnyMacro(merged)) { + if (!lookup.shouldDoEarlyOutput(merged)) { assert( !continue && invalidations.isEmpty, "everything was supposed to be invalidated already" @@ -998,7 +1003,7 @@ private final class AnalysisCallback( // Store invalidations and continuation decision; the analysis will be computed again after Analyze phase. invalidationResults = Some(CompileCycleResult(continue, invalidations, Analysis.empty)) // If there will be no more compilation cycles, store the early analysis file and update the pickle jar - if (options.pipelining && !continue && !hasAnyMacro(merged)) { + if (options.pipelining && !continue && !lookup.shouldDoEarlyOutput(merged)) { writeEarlyArtifacts(merged) } } @@ -1010,44 +1015,38 @@ private final class AnalysisCallback( } private[this] var writtenEarlyArtifacts: Boolean = false + private def writeEarlyArtifacts(merged: Analysis): Unit = { writtenEarlyArtifacts = true - // log.info(s"writeEarlyArtifacts to $earlyOutput") - earlyAnalysisStore map { store => - store.set(AnalysisContents.create(merged, currentSetup)) - } - for { - earlyO <- earlyOutput - pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath()) - } { - // List classes defined in the files that were compiled in this run. - val ps = java.util.concurrent.ConcurrentHashMap.newKeySet[String] - val knownProducts: ParVector[VirtualFileRef] = - new ParVector(merged.relations.allSources.toVector) - .flatMap(merged.relations.products) - // extract product paths in parallel - jo2o(output.getSingleOutputAsPath) match { - case Some(so) if so.getFileName.toString.endsWith(".jar") => - knownProducts foreach { product => - new JarUtils.ClassInJar(product.id).toClassFilePath foreach { path => - ps.add(path.replace('\\', '/')) - } + earlyAnalysisStore.foreach(_.set(AnalysisContents.create(merged, currentSetup))) + Incremental.writeEarlyOut(lookup, progress, earlyOutput, merged, knownProducts(merged)) + } + + private def knownProducts(merged: Analysis) = { + // List classes defined in the files that were compiled in this run. + val ps = java.util.concurrent.ConcurrentHashMap.newKeySet[String] + val knownProducts: ParVector[VirtualFileRef] = + new ParVector(merged.relations.allSources.toVector) + .flatMap(merged.relations.products) + // extract product paths in parallel + jo2o(output.getSingleOutputAsPath) match { + case Some(so) if so.getFileName.toString.endsWith(".jar") => + knownProducts foreach { product => + new JarUtils.ClassInJar(product.id).toClassFilePath foreach { path => + ps.add(path.replace('\\', '/')) } - case Some(so) => - knownProducts foreach { product => - val productPath = converter.toPath(product) - try { - ps.add(so.relativize(productPath).toString.replace('\\', '/')) - } catch { - case NonFatal(_) => ps.add(product.id) - } + } + case Some(so) => + knownProducts foreach { product => + val productPath = converter.toPath(product) + try { + ps.add(so.relativize(productPath).toString.replace('\\', '/')) + } catch { + case NonFatal(_) => ps.add(product.id) } - case _ => sys.error(s"unsupported output $output") - } - PickleJar.write(pickleJarPath, ps) - progress foreach { p => - p.afterEarlyOutput(true) - } + } + case _ => sys.error(s"unsupported output $output") } + ps } } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala index 97b49438a8..21c64d1f76 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala @@ -129,6 +129,21 @@ trait ExternalLookup extends ExternalHooks.Lookup { import scala.collection.JavaConverters._ shouldDoIncrementalCompilation(changedClasses.iterator().asScala.toSet, previousAnalysis) } + + /** + * Used to override whether we should proceed with making an early output. + * + * By default we do not make an early output in the presence of any macros + * because macro expansion (in a downstream subproject) requires the macro implementation + * to be present in bytecode form, rather than just just a pickle-containing JAR. + * + * If you're careful micromanaging the separation of macro implementations + * (e.g. `def impl(c: Context) = ...`) from macro definitions + * (e.g. `def foo: Unit = macro Foo.impl`) you can safely override this. + */ + def shouldDoEarlyOutput(compileAnalysis: CompileAnalysis): Boolean = { + compileAnalysis.asInstanceOf[Analysis].apis.internal.values.exists(_.hasMacro) + } } trait NoopExternalLookup extends ExternalLookup { diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala index 68c5f367a3..d5b9030c20 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala @@ -19,20 +19,18 @@ import java.nio.file.attribute.BasicFileAttributes import scala.reflect.io.RootPath object PickleJar { - // create an empty JAR file in case the subproject has no classes. - def touch(pickleOut: Path): Unit = { + def write(pickleOut: Path, knownProducts: java.util.Set[String]): Unit = { if (!Files.exists(pickleOut)) { Files.createDirectories(pickleOut.getParent) Files.createFile(pickleOut) } - () - } - def write(pickleOut: Path, knownProducts: java.util.Set[String]): Path = { - val pj = RootPath(pickleOut, writable = false) // so it doesn't delete the file - val result = try Files.walkFileTree(pj.root, deleteUnknowns(knownProducts)) - finally pj.close() - touch(pickleOut) - result + + if (!knownProducts.isEmpty) { + val pj = RootPath(pickleOut, writable = false) // so it doesn't delete the file + try Files.walkFileTree(pj.root, deleteUnknowns(knownProducts)) + finally pj.close() + () + } } def deleteUnknowns(knownProducts: java.util.Set[String]) = new SimpleFileVisitor[Path] { From e085744d9c03a05ee7f4809711cd0515c608caa9 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Aug 2020 21:28:43 +0100 Subject: [PATCH 4/5] Fix extraction of source <-> class mappings for Java This was in the original, to-upstream source, but it got lost somewhere. --- .../compiler-bridge/src/main/scala/xsbt/API.scala | 2 +- .../subproject-pipelining-mixed/build.json | 15 +++++++++++++++ .../subproject-pipelining-mixed/changes/B2.scala | 6 ++++++ .../changes/Other.scala | 5 +++++ .../subproject-pipelining-mixed/dep/A.java | 5 +++++ .../dep/incOptions.properties | 1 + .../subproject-pipelining-mixed/test | 9 +++++++++ .../subproject-pipelining-mixed/use/B.scala | 6 ++++++ .../use/incOptions.properties | 1 + 9 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/build.json create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/B2.scala create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/Other.scala create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/A.java create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/incOptions.properties create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/test create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/B.scala create mode 100644 zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/incOptions.properties diff --git a/internal/compiler-bridge/src/main/scala/xsbt/API.scala b/internal/compiler-bridge/src/main/scala/xsbt/API.scala index e43f1e9dc4..0f3bca882b 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/API.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/API.scala @@ -210,7 +210,7 @@ final class API(val global: CallbackGlobal) extends Compat with GlobalHelpers wi !ignoredSymbol(sym) && sym.isStatic && !sym.isImplClass && - !sym.hasFlag(Flags.JAVA) && + (!sym.hasFlag(Flags.JAVA) || global.callback.isPickleJava) && !sym.isNestedClass } } diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/build.json b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/build.json new file mode 100644 index 0000000000..14886d982e --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/build.json @@ -0,0 +1,15 @@ +{ + "projects": [ + { + "name": "use", + "dependsOn": [ + "dep" + ], + "scalaVersion": "2.13.3" + }, + { + "name": "dep", + "scalaVersion": "2.13.3" + } + ] +} diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/B2.scala b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/B2.scala new file mode 100644 index 0000000000..4fc2c47067 --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/B2.scala @@ -0,0 +1,6 @@ +package example + +object B { + val y = A.x + 1 + def main(args: Array[String]): Unit = println(y) +} diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/Other.scala b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/Other.scala new file mode 100644 index 0000000000..e53184cec5 --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/changes/Other.scala @@ -0,0 +1,5 @@ +package example + +object Other { + def z = 1 +} diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/A.java b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/A.java new file mode 100644 index 0000000000..5bec24925b --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/A.java @@ -0,0 +1,5 @@ +package example; + +public class A { + public static int x() { return 3; } +} diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/incOptions.properties b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/incOptions.properties new file mode 100644 index 0000000000..5260dbd945 --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/dep/incOptions.properties @@ -0,0 +1 @@ +pipelining = true diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/test b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/test new file mode 100644 index 0000000000..8bec397f2b --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/test @@ -0,0 +1,9 @@ +# base-line +> use/compile +$ sleep 1000 + +# 1. make dep recompile something other than A +# 2. make use recompile, needing a pickle for A +$ copy-file changes/Other.scala dep/Other.scala +$ copy-file changes/B2.scala use/B.scala +> use/compile diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/B.scala b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/B.scala new file mode 100644 index 0000000000..20682768f2 --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/B.scala @@ -0,0 +1,6 @@ +package example + +object B { + val y = A.x + def main(args: Array[String]): Unit = println(y) +} diff --git a/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/incOptions.properties b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/incOptions.properties new file mode 100644 index 0000000000..5260dbd945 --- /dev/null +++ b/zinc/src/sbt-test/source-dependencies/subproject-pipelining-mixed/use/incOptions.properties @@ -0,0 +1 @@ +pipelining = true From 105fcaa5c54c59bd6be819672f7e6b34e71bc852 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 Aug 2020 21:29:45 +0100 Subject: [PATCH 5/5] Demote some warnings to echos to avoid failing with -Xfatal-warnings --- internal/compiler-bridge/src/main/scala/xsbt/Dependency.scala | 2 +- internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala | 3 +++ .../compiler-bridge/src/main/scala/xsbt/ExtractUsedNames.scala | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/internal/compiler-bridge/src/main/scala/xsbt/Dependency.scala b/internal/compiler-bridge/src/main/scala/xsbt/Dependency.scala index a8a3e87cd5..e8c2404172 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/Dependency.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/Dependency.scala @@ -95,7 +95,7 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile with case Some(classOrModuleDef) => memberRef(ClassDependency(classOrModuleDef, dep)) case None => - reporter.warning(unit.position(0), Feedback.OrphanTopLevelImports) + reporter.echo(unit.position(0), Feedback.OrphanTopLevelImports) // package-info.java & empty scala files orphanImportsReported = true } } diff --git a/internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala b/internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala index 4170be7ad0..8cf469a648 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala @@ -598,6 +598,9 @@ class ExtractAPI[GlobalType <: Global]( "sbt-api: Unexpected nullary method type " + in + " in " + in.owner ) Constants.emptyType + case MethodType(_, _) => + reporter.echo(NoPosition, s"sbt-api: Unhandled method type $in in ${in.owner}") + Constants.emptyType case _ => reporter.warning(NoPosition, "sbt-api: Unhandled type " + t.getClass + " : " + t) Constants.emptyType diff --git a/internal/compiler-bridge/src/main/scala/xsbt/ExtractUsedNames.scala b/internal/compiler-bridge/src/main/scala/xsbt/ExtractUsedNames.scala index e80fe487c2..e2224ce34d 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/ExtractUsedNames.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/ExtractUsedNames.scala @@ -118,7 +118,7 @@ class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType) } case None => - reporter.warning(unit.position(0), Feedback.OrphanNames) + reporter.echo(unit.position(0), Feedback.OrphanNames) } }