Skip to content

Commit

Permalink
[query] fix remaining scala warnings (#14188)
Browse files Browse the repository at this point in the history
  • Loading branch information
patrick-schultz authored and chrisvittal committed Jul 11, 2024
1 parent 2c94c1c commit e583d70
Show file tree
Hide file tree
Showing 100 changed files with 126 additions and 373 deletions.
3 changes: 2 additions & 1 deletion hail/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ tasks.withType(ScalaCompile) {
"-deprecation",
"-unchecked",
"-Ywarn-unused:_,-explicits,-implicits",
"-Wconf:cat=unused-locals:w,cat=unused:info,any:w",
// "-Wconf:cat=unused-locals:w,cat=unused:info,any:w",
"-Wconf:any:e",
"-Ypartial-unification",
]

Expand Down
2 changes: 2 additions & 0 deletions hail/src/main/scala/is/hail/backend/BackendUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import is.hail.io.fs._
import is.hail.services._
import is.hail.utils._

import scala.annotation.nowarn
import scala.util.Try

object BackendUtils {
Expand Down Expand Up @@ -93,6 +94,7 @@ class BackendUtils(

results
case Some(cachedResults) =>
@nowarn("cat=unused-pat-vars&msg=pattern var c")
val remainingContexts =
for {
c @ (_, k) <- contexts.zipWithIndex
Expand Down
2 changes: 0 additions & 2 deletions hail/src/main/scala/is/hail/backend/ExecuteContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,6 @@ class ExecuteContext(

private val cleanupFunctions = mutable.ArrayBuffer[() => Unit]()

private[this] val broadcasts = mutable.ArrayBuffer.empty[BroadcastValue[_]]

val memo: mutable.Map[Any, Any] = new mutable.HashMap[Any, Any]()

val taskContext: HailTaskContext = new LocalTaskContext(0, 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ class LocalBackend(
throw new LowererUnsupportedOperation(s"lowered to uncompilable IR: ${Pretty(ctx, ir)}")

if (ir.typ == TVoid) {
val (pt, f) = ctx.timer.time("Compile") {
val (_, f) = ctx.timer.time("Compile") {
Compile[AsmFunction1RegionUnit](
ctx,
FastSeq(),
Expand Down
18 changes: 5 additions & 13 deletions hail/src/main/scala/is/hail/backend/service/ServiceBackend.scala
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,6 @@ class ServiceBackend(
fs: FS,
collection: Array[Array[Byte]],
stageIdentifier: String,
dependency: Option[TableStageDependency] = None,
f: (Array[Byte], HailTaskContext, HailClassLoader, FS) => Array[Byte],
): (String, String, Int) = {
val backendContext = _backendContext.asInstanceOf[ServiceBackendContext]
Expand Down Expand Up @@ -291,7 +290,7 @@ class ServiceBackend(
f: (Array[Byte], HailTaskContext, HailClassLoader, FS) => Array[Byte]
): Array[Array[Byte]] = {
val (token, root, n) =
submitAndWaitForBatch(_backendContext, fs, collection, stageIdentifier, dependency, f)
submitAndWaitForBatch(_backendContext, fs, collection, stageIdentifier, f)

log.info(s"parallelizeAndComputeWithIndex: $token: reading results")
val startTime = System.nanoTime()
Expand Down Expand Up @@ -321,14 +320,8 @@ class ServiceBackend(
)(
f: (Array[Byte], HailTaskContext, HailClassLoader, FS) => Array[Byte]
): (Option[Throwable], IndexedSeq[(Array[Byte], Int)]) = {
val (token, root, n) = submitAndWaitForBatch(
_backendContext,
fs,
collection.map(_._1).toArray,
stageIdentifier,
dependency,
f,
)
val (token, root, _) =
submitAndWaitForBatch(_backendContext, fs, collection.map(_._1).toArray, stageIdentifier, f)
log.info(s"parallelizeAndComputeWithIndex: $token: reading results")
val startTime = System.nanoTime()
val r @ (_, results) = runAllKeepFirstError(executor) {
Expand Down Expand Up @@ -372,7 +365,6 @@ class ServiceBackend(
MakeTuple.ordered(FastSeq(x)),
optimize = true,
)
val retPType = pt.asInstanceOf[PBaseStruct]
val elementType = pt.fields(0).typ
val off = ctx.scopedExecution((hcl, fs, htc, r) => f(hcl, fs, htc, r).apply(r))
val codec = TypedCodecSpec(
Expand Down Expand Up @@ -455,7 +447,7 @@ object ServiceBackendAPI {
assert(argv.length == 7, argv.toFastSeq)

val scratchDir = argv(0)
val logFile = argv(1)
// val logFile = argv(1)
val jarLocation = argv(2)
val kind = argv(3)
assert(kind == Main.DRIVER)
Expand All @@ -473,7 +465,7 @@ object ServiceBackendAPI {
val batchClient = new BatchClient(s"$scratchDir/secrets/gsa-key/key.json")
log.info("BatchClient allocated.")

var batchId =
val batchId =
BatchConfig.fromConfigFile(s"$scratchDir/batch-config/batch-config.json").map(_.batchId)
log.info("BatchConfig parsed.")

Expand Down
4 changes: 2 additions & 2 deletions hail/src/main/scala/is/hail/backend/service/Worker.scala
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ object Worker {
throw new IllegalArgumentException(s"expected seven arguments, not: ${argv.length}")
}
val scratchDir = argv(0)
val logFile = argv(1)
var jarLocation = argv(2)
// val logFile = argv(1)
// var jarLocation = argv(2)
val kind = argv(3)
assert(kind == Main.WORKER)
val root = argv(4)
Expand Down
2 changes: 1 addition & 1 deletion hail/src/main/scala/is/hail/experimental/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ package object experimental {
if (ac <= 1 || an == 0) // FAF should not be calculated on singletons
0.0
else {
var f = (af: Double) => ac.toDouble - 1 - qpois(ci, an.toDouble * af)
val f = (af: Double) => ac.toDouble - 1 - qpois(ci, an.toDouble * af)
val root = uniroot(f, lower, upper, tol)
val rounder = 1d / (precision / 100d)
var max_af = math.round(root.getOrElse(0.0) * rounder) / rounder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ object RelationalSpec {

def read(fs: FS, path: String): RelationalSpec = {
val jv = readMetadata(fs, path)
val references = readReferences(fs, path, jv)

(jv \ "name").extract[String] match {
case "TableSpec" => TableSpec.fromJValue(fs, path, jv)
Expand Down
2 changes: 0 additions & 2 deletions hail/src/main/scala/is/hail/expr/ir/BinaryOp.scala
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,6 @@ object BinaryOp {
case _ => incompatible(lt, rt, op)
}
case (TBoolean, TBoolean) =>
val ll = coerce[Boolean](l)
val rr = coerce[Boolean](r)
op match {
case _ => incompatible(lt, rt, op)
}
Expand Down
9 changes: 0 additions & 9 deletions hail/src/main/scala/is/hail/expr/ir/BinarySearch.scala
Original file line number Diff line number Diff line change
Expand Up @@ -286,15 +286,6 @@ object BinarySearch {
}
}

private def runSearchUnit(
cb: EmitCodeBuilder,
haystack: SIndexableValue,
compare: Comparator,
found: (Value[Int], Value[Int], Value[Int]) => Unit,
notFound: Value[Int] => Unit,
): Unit =
runSearchBoundedUnit(cb, haystack, compare, 0, haystack.loadLength(), found, notFound)

private def runSearchBounded[T: TypeInfo](
cb: EmitCodeBuilder,
haystack: SIndexableValue,
Expand Down
20 changes: 4 additions & 16 deletions hail/src/main/scala/is/hail/expr/ir/Emit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import is.hail.expr.ir.streams.{EmitStream, StreamProducer, StreamUtils}
import is.hail.io.{BufferSpec, InputBuffer, OutputBuffer, TypedCodecSpec}
import is.hail.io.fs.FS
import is.hail.linalg.{BLAS, LAPACK, LinalgCodeUtils}
import is.hail.types.{tcoerce, TypeWithRequiredness, VirtualTypeWithReq}
import is.hail.types.{TypeWithRequiredness, VirtualTypeWithReq, tcoerce}
import is.hail.types.physical._
import is.hail.types.physical.stypes._
import is.hail.types.physical.stypes.concrete._
Expand All @@ -25,8 +25,8 @@ import is.hail.variant.ReferenceGenome

import scala.collection.mutable
import scala.language.existentials

import java.io._
import scala.annotation.nowarn

// class for holding all information computed ahead-of-time that we need in the emitter
object EmitContext {
Expand Down Expand Up @@ -769,6 +769,7 @@ class Emit[C](

val mb: EmitMethodBuilder[C] = cb.emb.asInstanceOf[EmitMethodBuilder[C]]

@nowarn("cat=unused-locals&msg=local default argument")
def emit(
ir: IR,
mb: EmitMethodBuilder[C] = mb,
Expand Down Expand Up @@ -2792,7 +2793,7 @@ class Emit[C](
}

case ResultOp(idx, sig) =>
val AggContainer(aggs, sc, _) = container.get
val AggContainer(_, sc, _) = container.get

val rvAgg = agg.Extract.getAgg(sig)
rvAgg.result(cb, sc.states(idx), region)
Expand Down Expand Up @@ -3534,16 +3535,6 @@ class Emit[C](
): IEmitCode =
this.emitI(ir, cb, region, env, container, loopEnv)

def emitVoid(
ir: IR,
env: EmitEnv = env,
container: Option[AggContainer] = container,
loopEnv: Option[Env[LoopRef]] = loopEnv,
): Code[Unit] =
EmitCodeBuilder.scopedVoid(mb) { cb =>
this.emitVoid(cb, ir, region, env, container, loopEnv)
}

def emitStream(ir: IR, outerRegion: Value[Region], env: EmitEnv = env): EmitCode =
EmitCode.fromI(mb)(cb =>
EmitStream.produce(this, ir, cb, cb.emb, outerRegion, env, container)
Expand Down Expand Up @@ -3677,7 +3668,6 @@ class Emit[C](
)

sort.emitWithBuilder[Boolean] { cb =>
val region = sort.getCodeParam[Region](1)
val leftEC = cb.memoize(
EmitCode.present(sort, elemSCT.loadToSValue(cb, sort.getCodeParam(2)(elemSCT.ti))),
"sort_leftEC",
Expand Down Expand Up @@ -3774,8 +3764,6 @@ object NDArrayEmitter {
rightShape: IndexedSeq[Value[Long]],
errorID: Int,
): IndexedSeq[Value[Long]] = {
val mb = cb.emb

assert(leftShape.nonEmpty)
assert(rightShape.nonEmpty)

Expand Down
7 changes: 0 additions & 7 deletions hail/src/main/scala/is/hail/expr/ir/EmitClassBuilder.scala
Original file line number Diff line number Diff line change
Expand Up @@ -384,18 +384,11 @@ final class EmitClassBuilder[C](val emodb: EmitModuleBuilder, val cb: ClassBuild
newPField(name, st),
)

private[this] val typMap: mutable.Map[Type, Value[_ <: Type]] =
mutable.Map()

private[this] val pTypeMap: mutable.Map[PType, Value[_ <: PType]] = mutable.Map()

private[this] type CompareMapKey = (SType, SType)

private[this] val memoizedComparisons: mutable.Map[CompareMapKey, CodeOrdering] =
mutable.Map[CompareMapKey, CodeOrdering]()

def numTypes: Int = typMap.size

private[this] val decodedLiteralsField = genFieldThisRef[Array[Long]]("decoded_lits")

def literalsArray(): Value[Array[Long]] = decodedLiteralsField
Expand Down
19 changes: 0 additions & 19 deletions hail/src/main/scala/is/hail/expr/ir/ExtractIntervalFilters.scala
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,6 @@ class KeySetLattice(ctx: ExecuteContext, keyType: TStruct) extends Lattice {
if (v.isEmpty) return top

val builder = mutable.ArrayBuilder.make[Interval]()
var i = 0
if (v.head.left != IntervalEndpoint(Row(), -1)) {
builder += Interval(IntervalEndpoint(Row(), -1), v.head.left)
}
Expand Down Expand Up @@ -751,24 +750,6 @@ class ExtractIntervalFilters(ctx: ExecuteContext, keyType: TStruct) {
private def literalSizeOkay(lit: Any): Boolean = lit.asInstanceOf[Iterable[_]].size <=
MAX_LITERAL_SIZE

private def wrapInRow(intervals: IndexedSeq[Interval]): IndexedSeq[Interval] = intervals
.map { interval =>
Interval(
IntervalEndpoint(Row(interval.left.point), interval.left.sign),
IntervalEndpoint(Row(interval.right.point), interval.right.sign),
)
}

private def intervalFromComparison(v: Any, op: ComparisonOp[_]): Interval = {
(op: @unchecked) match {
case _: EQ => Interval(endpoint(v, -1), endpoint(v, 1))
case GT(_, _) => Interval(negInf, endpoint(v, -1)) // value > key
case GTEQ(_, _) => Interval(negInf, endpoint(v, 1)) // value >= key
case LT(_, _) => Interval(endpoint(v, 1), posInf) // value < key
case LTEQ(_, _) => Interval(endpoint(v, -1), posInf) // value <= key
}
}

private def posInf: IntervalEndpoint = IntervalEndpoint(Row(), 1)

private def negInf: IntervalEndpoint = IntervalEndpoint(Row(), -1)
Expand Down
3 changes: 1 addition & 2 deletions hail/src/main/scala/is/hail/expr/ir/GenericLines.scala
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ object GenericLines {
private var eof = false
private var closed = false

private var buf = new Array[Byte](64 * 1024)
private val buf = new Array[Byte](64 * 1024)
private var bufOffset = 0L
private var bufMark = 0
private var bufPos = 0
Expand Down Expand Up @@ -339,7 +339,6 @@ object GenericLines {
}
val body: (FS, Any) => CloseableIterator[GenericLine] = { (fs: FS, context: Any) =>
val contextRow = context.asInstanceOf[Row]
val index = contextRow.getAs[Int](0)
val file = contextRow.getAs[String](1)
val chrom = contextRow.getAs[String](2)
val start = contextRow.getAs[Int](3)
Expand Down
1 change: 0 additions & 1 deletion hail/src/main/scala/is/hail/expr/ir/LowerMatrixIR.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1035,7 +1035,6 @@ object LowerMatrixIR {
.aggregate(makeTuple(applyAggOp(Count(), FastSeq(), FastSeq()), 'global(colsField).len))
case MatrixAggregate(child, query) =>
val lc = lower(ctx, child, ab)
val idx = Symbol(genUID())
TableAggregate(
lc,
aggExplodeIR(
Expand Down
1 change: 0 additions & 1 deletion hail/src/main/scala/is/hail/expr/ir/MatrixWriter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1011,7 +1011,6 @@ case class VCFPartitionWriter(
_writeB(cb, v.toBytes(cb).loadBytes(cb))
case v: SCallValue =>
val ploidy = v.ploidy(cb)
val phased = v.isPhased(cb)
cb.if_(ploidy.ceq(0), cb._fatal("VCF spec does not support 0-ploid calls."))
cb.if_(ploidy.ceq(1), cb._fatal("VCF spec does not support phased haploid calls."))
val c = v.canonicalCall(cb)
Expand Down
5 changes: 2 additions & 3 deletions hail/src/main/scala/is/hail/expr/ir/NativeReaderOptions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,11 @@ class NativeReaderOptionsSerializer() extends CustomSerializer[NativeReaderOptio
NativeReaderOptions(intervals, intervalPointType, filterIntervals)
},
{ case opts: NativeReaderOptions =>
implicit val fmt = format
val ty = TArray(TInterval(opts.intervalPointType))
(("name" -> opts.getClass.getSimpleName) ~
("name" -> opts.getClass.getSimpleName) ~
("intervals" -> JSONAnnotationImpex.exportAnnotation(opts.intervals, ty)) ~
("intervalPointType" -> opts.intervalPointType.parsableString()) ~
("filterIntervals" -> opts.filterIntervals))
("filterIntervals" -> opts.filterIntervals)
},
)
)
Expand Down
3 changes: 3 additions & 0 deletions hail/src/main/scala/is/hail/expr/ir/NormalizeNames.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ package is.hail.expr.ir
import is.hail.backend.ExecuteContext
import is.hail.utils.StackSafe._

import scala.annotation.nowarn

class NormalizeNames(normFunction: Int => String, allowFreeVariables: Boolean = false) {
var count: Int = 0

Expand All @@ -23,6 +25,7 @@ class NormalizeNames(normFunction: Int => String, allowFreeVariables: Boolean =
private def normalizeIR(ir: BaseIR, env: BindingEnv[String], context: Array[String] = Array())
: StackFrame[BaseIR] = {

@nowarn("cat=unused-locals&msg=default argument")
def normalizeBaseIR(next: BaseIR, env: BindingEnv[String] = env): StackFrame[BaseIR] =
call(normalizeIR(next, env, context :+ ir.getClass().getName()))

Expand Down
4 changes: 0 additions & 4 deletions hail/src/main/scala/is/hail/expr/ir/PruneDeadFields.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1109,7 +1109,6 @@ object PruneDeadFields {
memoizeMatrixIR(ctx, child, dep, memo)
case MatrixColsTail(child, _) => memoizeMatrixIR(ctx, child, requestedType, memo)
case CastTableToMatrix(child, entriesFieldName, colsFieldName, _) =>
val m = Map(MatrixType.entriesIdentifier -> entriesFieldName)
val childDep = child.typ.copy(
key = requestedType.rowKey,
globalType = unify(
Expand Down Expand Up @@ -1679,7 +1678,6 @@ object PruneDeadFields {
memoizeValueIR(ctx, aggIR, requestedType.asInstanceOf[TDict].valueType, memo),
)
case AggArrayPerElement(a, elementName, indexName, aggBody, knownLength, isScan) =>
val aType = a.typ.asInstanceOf[TArray]
val bodyEnv = memoizeValueIR(ctx, aggBody, TIterable.elementType(requestedType), memo)
if (isScan) {
val valueType =
Expand Down Expand Up @@ -1778,7 +1776,6 @@ object PruneDeadFields {
val sType = requestedType.asInstanceOf[TStruct]
val insFieldNames = fields.map(_._1).toSet
val rightDep = sType.filter(f => insFieldNames.contains(f.name))._1
val rightDepFields = rightDep.fieldNames.toSet
val leftDep = TStruct(
old.typ.asInstanceOf[TStruct]
.fields
Expand Down Expand Up @@ -1815,7 +1812,6 @@ object PruneDeadFields {
}
)
case GetTupleElement(o, idx) =>
val childTupleType = o.typ.asInstanceOf[TTuple]
val tupleDep = TTuple(FastSeq(TupleField(idx, requestedType)))
memoizeValueIR(ctx, o, tupleDep, memo)
case ConsoleLog(message, result) =>
Expand Down
Loading

0 comments on commit e583d70

Please sign in to comment.