From 8b0632ca78492f80e26d4b3493296b3b04b55866 Mon Sep 17 00:00:00 2001 From: Matt Massie Date: Tue, 23 Jun 2015 16:55:11 -0700 Subject: [PATCH] Minor Scala style fixes --- .../spark/shuffle/hash/HashShuffleReaderSuite.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala b/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala index 0add85c6377dc..28ca68698e3dc 100644 --- a/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala +++ b/core/src/test/scala/org/apache/spark/shuffle/hash/HashShuffleReaderSuite.scala @@ -41,10 +41,10 @@ class RecordingManagedBuffer(underlyingBuffer: NioManagedBuffer) extends Managed var callsToRetain = 0 var callsToRelease = 0 - override def size() = underlyingBuffer.size() - override def nioByteBuffer() = underlyingBuffer.nioByteBuffer() - override def createInputStream() = underlyingBuffer.createInputStream() - override def convertToNetty() = underlyingBuffer.convertToNetty() + override def size(): Long = underlyingBuffer.size() + override def nioByteBuffer(): ByteBuffer = underlyingBuffer.nioByteBuffer() + override def createInputStream(): InputStream = underlyingBuffer.createInputStream() + override def convertToNetty(): AnyRef = underlyingBuffer.convertToNetty() override def retain(): ManagedBuffer = { callsToRetain += 1 @@ -81,7 +81,7 @@ class HashShuffleReaderSuite extends SparkFunSuite with LocalSparkContext { // Create a return function to use for the mocked wrapForCompression method that just returns // the original input stream. val dummyCompressionFunction = new Answer[InputStream] { - override def answer(invocation: InvocationOnMock) = + override def answer(invocation: InvocationOnMock): InputStream = invocation.getArguments()(1).asInstanceOf[InputStream] } @@ -118,7 +118,7 @@ class HashShuffleReaderSuite extends SparkFunSuite with LocalSparkContext { // Test a scenario where all data is local, just to avoid creating a bunch of additional mocks // for the code to read data over the network. val statuses: Array[(BlockManagerId, Long)] = - Array.fill(numMaps)((localBlockManagerId, byteOutputStream.size())) + Array.fill(numMaps)((localBlockManagerId, byteOutputStream.size().toLong)) when(mapOutputTracker.getServerStatuses(shuffleId, reduceId)).thenReturn(statuses) // Create a mocked shuffle handle to pass into HashShuffleReader.