Skip to content

Commit

Permalink
Merge pull request #874 from dwijnand/trailing-changes
Browse files Browse the repository at this point in the history
  • Loading branch information
dwijnand authored Aug 11, 2020
2 parents 217af07 + 105fcaa commit e226ba9
Show file tree
Hide file tree
Showing 18 changed files with 142 additions and 74 deletions.
5 changes: 4 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,10 @@ lazy val zincPersistCore = (project in internalPath / "zinc-persist-core")
crossPaths := false,
autoScalaLibrary := false,
exportJars := true,
ProtobufConfig / protobufRunProtoc := (args => Protoc.runProtoc("-v390" +: args.toArray)),
ProtobufConfig / version := "3.11.4", // sync version w/ plugins.sbt
ProtobufConfig / protobufRunProtoc := { args =>
Protoc.runProtoc(s"-v${(ProtobufConfig / version).value}" +: args.toArray)
},
publish / skip := true,
assembly / assemblyShadeRules := Seq(
ShadeRule
Expand Down
2 changes: 1 addition & 1 deletion internal/compiler-bridge/src/main/scala/xsbt/API.scala
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ final class API(val global: CallbackGlobal) extends Compat with GlobalHelpers wi
!ignoredSymbol(sym) &&
sym.isStatic &&
!sym.isImplClass &&
!sym.hasFlag(Flags.JAVA) &&
(!sym.hasFlag(Flags.JAVA) || global.callback.isPickleJava) &&
!sym.isNestedClass
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ final class Dependency(val global: CallbackGlobal) extends LocateClassFile with
case Some(classOrModuleDef) =>
memberRef(ClassDependency(classOrModuleDef, dep))
case None =>
reporter.warning(unit.position(0), Feedback.OrphanTopLevelImports)
reporter.echo(unit.position(0), Feedback.OrphanTopLevelImports) // package-info.java & empty scala files
orphanImportsReported = true
}
}
Expand Down
3 changes: 3 additions & 0 deletions internal/compiler-bridge/src/main/scala/xsbt/ExtractAPI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,9 @@ class ExtractAPI[GlobalType <: Global](
"sbt-api: Unexpected nullary method type " + in + " in " + in.owner
)
Constants.emptyType
case MethodType(_, _) =>
reporter.echo(NoPosition, s"sbt-api: Unhandled method type $in in ${in.owner}")
Constants.emptyType
case _ =>
reporter.warning(NoPosition, "sbt-api: Unhandled type " + t.getClass + " : " + t)
Constants.emptyType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ class ExtractUsedNames[GlobalType <: CallbackGlobal](val global: GlobalType)
}

case None =>
reporter.warning(unit.position(0), Feedback.OrphanNames)
reporter.echo(unit.position(0), Feedback.OrphanNames)
}
}

Expand Down
103 changes: 51 additions & 52 deletions internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp }
* compatible with the [[sbt.internal.inc.Incremental]] class.
*/
object Incremental {
def hasAnyMacro(a: Analysis): Boolean =
a.apis.internal.values.exists(p => p.hasMacro)

class PrefixingLogger(val prefix: String)(orig: Logger) extends Logger {
def trace(t: => Throwable): Unit = orig.trace(t)
def success(message: => String): Unit = orig.success(message)
Expand Down Expand Up @@ -360,16 +357,6 @@ object Incremental {
"All initially invalidated sources:" + initialInvSources + "\n"
)
}
def notifyEarlyArtifact(): Unit =
if (options.pipelining)
for {
earlyO <- earlyOutput
pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath())
p <- progress
} {
PickleJar.touch(pickleJarPath)
p.afterEarlyOutput(!hasAnyMacro(previous))
} else ()
val hasModified = initialInvClasses.nonEmpty || initialInvSources.nonEmpty
val analysis = withClassfileManager(options, converter, output, outputJarContent) {
classfileManager =>
Expand All @@ -388,7 +375,9 @@ object Incremental {
1
)
else {
notifyEarlyArtifact(); previous
if (options.pipelining)
writeEarlyOut(lookup, progress, earlyOutput, previous, new java.util.HashSet)
previous
}
}
(hasModified, analysis)
Expand Down Expand Up @@ -465,6 +454,22 @@ object Incremental {
classfileManager.complete(true)
result
}

private[inc] def writeEarlyOut(
lookup: Lookup,
progress: Option[CompileProgress],
earlyOutput: Option[Output],
analysis: Analysis,
knownProducts: java.util.Set[String]
) = {
for {
earlyO <- earlyOutput
pickleJar <- jo2o(earlyO.getSingleOutputAsPath)
} {
PickleJar.write(pickleJar, knownProducts)
progress.foreach(_.afterEarlyOutput(!lookup.shouldDoEarlyOutput(analysis)))
}
}
}

private object AnalysisCallback {
Expand Down Expand Up @@ -546,7 +551,7 @@ private final class AnalysisCallback(
incHandlerOpt: Option[Incremental.IncrementalCallback],
log: Logger
) extends xsbti.AnalysisCallback {
import Incremental.{ hasAnyMacro, CompileCycleResult }
import Incremental.CompileCycleResult

// This must have a unique value per AnalysisCallback
private[this] val compileStartTime: Long = System.currentTimeMillis()
Expand Down Expand Up @@ -835,7 +840,7 @@ private final class AnalysisCallback(
invalidationResults match {
case None =>
val early = incHandler.previousAnalysisPruned
if (!hasAnyMacro(early)) writeEarlyArtifacts(early)
if (!lookup.shouldDoEarlyOutput(early)) writeEarlyArtifacts(early)
else notifyEarlyArifactFailure()
case Some(CompileCycleResult(false, _, _)) => notifyEarlyArifactFailure()
case _ => ()
Expand Down Expand Up @@ -977,7 +982,7 @@ private final class AnalysisCallback(
val a = getAnalysis
val CompileCycleResult(continue, invalidations, merged) =
incHandler.mergeAndInvalidate(a, false)
if (!hasAnyMacro(merged)) {
if (!lookup.shouldDoEarlyOutput(merged)) {
assert(
!continue && invalidations.isEmpty,
"everything was supposed to be invalidated already"
Expand All @@ -998,7 +1003,7 @@ private final class AnalysisCallback(
// Store invalidations and continuation decision; the analysis will be computed again after Analyze phase.
invalidationResults = Some(CompileCycleResult(continue, invalidations, Analysis.empty))
// If there will be no more compilation cycles, store the early analysis file and update the pickle jar
if (options.pipelining && !continue && !hasAnyMacro(merged)) {
if (options.pipelining && !continue && !lookup.shouldDoEarlyOutput(merged)) {
writeEarlyArtifacts(merged)
}
}
Expand All @@ -1010,44 +1015,38 @@ private final class AnalysisCallback(
}

private[this] var writtenEarlyArtifacts: Boolean = false

private def writeEarlyArtifacts(merged: Analysis): Unit = {
writtenEarlyArtifacts = true
// log.info(s"writeEarlyArtifacts to $earlyOutput")
earlyAnalysisStore map { store =>
store.set(AnalysisContents.create(merged, currentSetup))
}
for {
earlyO <- earlyOutput
pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath())
} {
// List classes defined in the files that were compiled in this run.
val ps = java.util.concurrent.ConcurrentHashMap.newKeySet[String]
val knownProducts: ParVector[VirtualFileRef] =
new ParVector(merged.relations.allSources.toVector)
.flatMap(merged.relations.products)
// extract product paths in parallel
jo2o(output.getSingleOutputAsPath) match {
case Some(so) if so.getFileName.toString.endsWith(".jar") =>
knownProducts foreach { product =>
new JarUtils.ClassInJar(product.id).toClassFilePath foreach { path =>
ps.add(path.replace('\\', '/'))
}
earlyAnalysisStore.foreach(_.set(AnalysisContents.create(merged, currentSetup)))
Incremental.writeEarlyOut(lookup, progress, earlyOutput, merged, knownProducts(merged))
}

private def knownProducts(merged: Analysis) = {
// List classes defined in the files that were compiled in this run.
val ps = java.util.concurrent.ConcurrentHashMap.newKeySet[String]
val knownProducts: ParVector[VirtualFileRef] =
new ParVector(merged.relations.allSources.toVector)
.flatMap(merged.relations.products)
// extract product paths in parallel
jo2o(output.getSingleOutputAsPath) match {
case Some(so) if so.getFileName.toString.endsWith(".jar") =>
knownProducts foreach { product =>
new JarUtils.ClassInJar(product.id).toClassFilePath foreach { path =>
ps.add(path.replace('\\', '/'))
}
case Some(so) =>
knownProducts foreach { product =>
val productPath = converter.toPath(product)
try {
ps.add(so.relativize(productPath).toString.replace('\\', '/'))
} catch {
case NonFatal(_) => ps.add(product.id)
}
}
case Some(so) =>
knownProducts foreach { product =>
val productPath = converter.toPath(product)
try {
ps.add(so.relativize(productPath).toString.replace('\\', '/'))
} catch {
case NonFatal(_) => ps.add(product.id)
}
case _ => sys.error(s"unsupported output $output")
}
PickleJar.write(pickleJarPath, ps)
progress foreach { p =>
p.afterEarlyOutput(true)
}
}
case _ => sys.error(s"unsupported output $output")
}
ps
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -382,20 +382,22 @@ private[inc] abstract class IncrementalCommon(
log.debug(s"current source = $sources")

new UnderlyingChanges[VirtualFileRef] {
val sourceIds = sources.map(_.id)
val previousSourceIds = previousSources.map(_.id)

val added0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef]
val changed0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef]
val removed0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef]
val unmodified0 = java.util.concurrent.ConcurrentHashMap.newKeySet[VirtualFileRef]
new ParVector(sources.toVector).foreach {
case f: VirtualFileRef if previousSources.contains(f) =>

new ParVector(sources.toVector).foreach { f =>
if (previousSourceIds.contains(f.id)) {
if (equivS.equiv(previous.source(f), stamps.source(f))) unmodified0.add(f)
else changed0.add(f)
case f: VirtualFileRef => added0.add(f)
}
previousSources.foreach {
case f: VirtualFile => if (!sources.contains(f)) removed0.add(f)
case _ =>
} else added0.add(f)
}
previousSources.foreach(f => if (!sourceIds.contains(f.id)) removed0.add(f))

val added = new WrappedSet(added0)
val changed = new WrappedSet(changed0)
val removed = new WrappedSet(removed0)
Expand Down
15 changes: 15 additions & 0 deletions internal/zinc-core/src/main/scala/sbt/internal/inc/Lookup.scala
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,21 @@ trait ExternalLookup extends ExternalHooks.Lookup {
import scala.collection.JavaConverters._
shouldDoIncrementalCompilation(changedClasses.iterator().asScala.toSet, previousAnalysis)
}

/**
* Used to override whether we should proceed with making an early output.
*
* By default we do not make an early output in the presence of any macros
* because macro expansion (in a downstream subproject) requires the macro implementation
* to be present in bytecode form, rather than just just a pickle-containing JAR.
*
* If you're careful micromanaging the separation of macro implementations
* (e.g. `def impl(c: Context) = ...`) from macro definitions
* (e.g. `def foo: Unit = macro Foo.impl`) you can safely override this.
*/
def shouldDoEarlyOutput(compileAnalysis: CompileAnalysis): Boolean = {
compileAnalysis.asInstanceOf[Analysis].apis.internal.values.exists(_.hasMacro)
}
}

trait NoopExternalLookup extends ExternalLookup {
Expand Down
18 changes: 8 additions & 10 deletions internal/zinc-core/src/main/scala/sbt/internal/inc/PickleJar.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,18 @@ import java.nio.file.attribute.BasicFileAttributes
import scala.reflect.io.RootPath

object PickleJar {
// create an empty JAR file in case the subproject has no classes.
def touch(pickleOut: Path): Unit = {
def write(pickleOut: Path, knownProducts: java.util.Set[String]): Unit = {
if (!Files.exists(pickleOut)) {
Files.createDirectories(pickleOut.getParent)
Files.createFile(pickleOut)
}
()
}
def write(pickleOut: Path, knownProducts: java.util.Set[String]): Path = {
val pj = RootPath(pickleOut, writable = false) // so it doesn't delete the file
val result = try Files.walkFileTree(pj.root, deleteUnknowns(knownProducts))
finally pj.close()
touch(pickleOut)
result

if (!knownProducts.isEmpty) {
val pj = RootPath(pickleOut, writable = false) // so it doesn't delete the file
try Files.walkFileTree(pj.root, deleteUnknowns(knownProducts))
finally pj.close()
()
}
}

def deleteUnknowns(knownProducts: java.util.Set[String]) = new SimpleFileVisitor[Path] {
Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.4.6")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "3.0.2")
addSbtPlugin("com.github.gseitz" % "sbt-protobuf" % "0.6.5")
libraryDependencies += "com.github.os72" % "protoc-jar" % "3.11.4"
libraryDependencies += "com.github.os72" % "protoc-jar" % "3.11.4" // sync w/ ProtobufConfig / version
addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.18")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"projects": [
{
"name": "use",
"dependsOn": [
"dep"
],
"scalaVersion": "2.13.3"
},
{
"name": "dep",
"scalaVersion": "2.13.3"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package example

object B {
val y = A.x + 1
def main(args: Array[String]): Unit = println(y)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package example

object Other {
def z = 1
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package example;

public class A {
public static int x() { return 3; }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pipelining = true
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# base-line
> use/compile
$ sleep 1000

# 1. make dep recompile something other than A
# 2. make use recompile, needing a pickle for A
$ copy-file changes/Other.scala dep/Other.scala
$ copy-file changes/B2.scala use/B.scala
> use/compile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package example

object B {
val y = A.x
def main(args: Array[String]): Unit = println(y)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pipelining = true

0 comments on commit e226ba9

Please sign in to comment.