From 0727d35a153da9f27516e1d3834046e5ed6a6088 Mon Sep 17 00:00:00 2001 From: zsxwing Date: Wed, 29 Apr 2015 07:39:20 -0700 Subject: [PATCH] Change BatchUIData to a case class --- .../spark/streaming/ui/BatchUIData.scala | 27 +++---------------- .../StreamingJobProgressListenerSuite.scala | 1 - 2 files changed, 3 insertions(+), 25 deletions(-) diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchUIData.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchUIData.scala index c2ee869e5e5a7..f45c291b7c0fe 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchUIData.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchUIData.scala @@ -24,14 +24,13 @@ import org.apache.spark.streaming.ui.StreamingJobProgressListener._ private[ui] case class OutputOpIdAndSparkJobId(outputOpId: OutputOpId, sparkJobId: SparkJobId) -private[ui] class BatchUIData( +private[ui] case class BatchUIData( val batchTime: Time, val receiverNumRecords: Map[Int, Long], val submissionTime: Long, val processingStartTime: Option[Long], - val processingEndTime: Option[Long]) { - - var outputOpIdSparkJobIdPairs: Seq[OutputOpIdAndSparkJobId] = Seq.empty + val processingEndTime: Option[Long], + var outputOpIdSparkJobIdPairs: Seq[OutputOpIdAndSparkJobId] = Seq.empty) { /** * Time taken for the first job of this batch to start processing from the time this batch @@ -60,26 +59,6 @@ private[ui] class BatchUIData( * The number of recorders received by the receivers in this batch. */ def numRecords: Long = receiverNumRecords.map(_._2).sum - - def canEqual(other: Any): Boolean = other.isInstanceOf[BatchUIData] - - override def equals(other: Any): Boolean = other match { - case that: BatchUIData => - (that canEqual this) && - outputOpIdSparkJobIdPairs == that.outputOpIdSparkJobIdPairs && - batchTime == that.batchTime && - receiverNumRecords == that.receiverNumRecords && - submissionTime == that.submissionTime && - processingStartTime == that.processingStartTime && - processingEndTime == that.processingEndTime - case _ => false - } - - override def hashCode(): Int = { - val state = Seq(outputOpIdSparkJobIdPairs, batchTime, receiverNumRecords, submissionTime, - processingStartTime, processingEndTime) - state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b) - } } private[ui] object BatchUIData { diff --git a/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala index 1a85b3ebff73d..fa89536de4054 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.streaming.ui import java.util.Properties -import org.apache.spark.streaming.ui.OutputOpIdAndSparkJobId import org.scalatest.Matchers import org.apache.spark.scheduler.SparkListenerJobStart