Skip to content

Commit

Permalink
Using the scala collections
Browse files Browse the repository at this point in the history
compiler was not able to differentiate between the scala and java api
  • Loading branch information
nthanvi committed Aug 22, 2016
1 parent 40cf9c7 commit c2ab0c5
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 7 deletions.
7 changes: 4 additions & 3 deletions core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,15 @@ private[spark] object JavaUtils {
}

// Workaround for SPARK-3926 / SI-8911
def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
= new SerializableMapWrapper(underlying)
def mapAsSerializableJavaMap[A, B](underlying: scala.collection.Map[A, B]):
SerializableMapWrapper[A, B]
= new SerializableMapWrapper(underlying)

// Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
// but implements java.io.Serializable. It can't just be subclassed to make it
// Serializable since the MapWrapper class has no no-arg constructor. This class
// doesn't need a no-arg constructor though.
class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
class SerializableMapWrapper[A, B](underlying: scala.collection.Map[A, B])
extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

override def size: Int = underlying.size
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ private[spark] object PythonRDD extends Logging {
JavaRDD[Array[Byte]] = {
val file = new DataInputStream(new FileInputStream(filename))
try {
val objs = new collection.mutable.ArrayBuffer[Array[Byte]]
val objs = new scala.collection.mutable.ArrayBuffer[Array[Byte]]
try {
while (true) {
val length = file.readInt()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ private[ui] class DriverPage(parent: MesosClusterUI) extends WebUIPage("driver")
}.getOrElse(Seq[Node]())
}

private def propertiesRow(properties: collection.Map[String, String]): Seq[Node] = {
private def propertiesRow(properties: scala.collection.Map[String, String]): Seq[Node] = {
properties.map { case (k, v) =>
<tr>
<td>{k}</td><td>{v}</td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.scheduler

import collection.mutable.ArrayBuffer
import scala.collection.mutable.ArrayBuffer

import org.apache.spark.annotation.DeveloperApi

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ private[spark] class BlockManagerInfo(
def blocks: JHashMap[BlockId, BlockStatus] = _blocks

// This does not include broadcast blocks.
def cachedBlocks: collection.Set[BlockId] = _cachedBlocks
def cachedBlocks: scala.collection.Set[BlockId] = _cachedBlocks

override def toString: String = "BlockManagerInfo " + timeMs + " " + _remainingMem

Expand Down

0 comments on commit c2ab0c5

Please sign in to comment.