From e1dd86ffdee4bd15c1a2d8c9c70b7eacf29e9bdb Mon Sep 17 00:00:00 2001 From: Manish Amde Date: Mon, 24 Mar 2014 19:23:44 -0700 Subject: [PATCH] implementing code style suggestions --- .../scala/org/apache/spark/mllib/tree/DecisionTree.scala | 2 +- .../org/apache/spark/mllib/tree/impurity/Entropy.scala | 4 ++-- .../scala/org/apache/spark/mllib/tree/impurity/Gini.scala | 8 ++++---- .../org/apache/spark/mllib/tree/impurity/Variance.scala | 7 +++---- .../scala/org/apache/spark/mllib/tree/model/Node.scala | 1 - .../org/apache/spark/mllib/tree/DecisionTreeSuite.scala | 7 ------- 6 files changed, 10 insertions(+), 19 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala index 3ab644e74df1b..5e8fc70bd3c04 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala @@ -245,7 +245,7 @@ object DecisionTree extends Serializable with Logging { new DecisionTree(strategy).train(input: RDD[LabeledPoint]) } - val InvalidBinIndex = -1 + private val InvalidBinIndex = -1 /** * Returns an array of optimal splits for all nodes at a given level diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala index 9018821abc875..8832d7a6929a9 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.tree.impurity -import javax.naming.OperationNotSupportedException +import java.lang.UnsupportedOperationException /** * Class for calculating [[http://en.wikipedia.org/wiki/Binary_entropy_function entropy]] during @@ -45,5 +45,5 @@ object Entropy extends Impurity { } def calculate(count: Double, sum: Double, sumSquares: Double): Double = - throw new OperationNotSupportedException("Entropy.calculate") + throw new UnsupportedOperationException("Entropy.calculate") } diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala index 20af8f6c1c2cd..3f043125a6aba 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala @@ -17,11 +17,11 @@ package org.apache.spark.mllib.tree.impurity -import javax.naming.OperationNotSupportedException +import java.lang.UnsupportedOperationException /** - * Class for calculating the [[http://en.wikipedia.org/wiki/Gini_coefficient Gini - * coefficent]] during binary classification + * Class for calculating the [[http://en.wikipedia + * .org/wiki/Decision_tree_learning#Gini_impurity]] during binary classification */ object Gini extends Impurity { @@ -43,6 +43,6 @@ object Gini extends Impurity { } def calculate(count: Double, sum: Double, sumSquares: Double): Double = - throw new OperationNotSupportedException("Gini.calculate") + throw new UnsupportedOperationException("Gini.calculate") } diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Variance.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Variance.scala index 85b7be560fecb..35b1c4e5c3727 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Variance.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Variance.scala @@ -17,15 +17,14 @@ package org.apache.spark.mllib.tree.impurity -import javax.naming.OperationNotSupportedException -import org.apache.spark.Logging +import java.lang.UnsupportedOperationException /** * Class for calculating variance during regression */ -object Variance extends Impurity with Logging { +object Variance extends Impurity { def calculate(c0: Double, c1: Double): Double - = throw new OperationNotSupportedException("Variance.calculate") + = throw new UnsupportedOperationException("Variance.calculate") /** * variance calculation diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala index 4a2c876a51b54..c3e5c00c8d53c 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala @@ -18,7 +18,6 @@ package org.apache.spark.mllib.tree.model import org.apache.spark.Logging -import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.tree.configuration.FeatureType._ /** diff --git a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala index f8914e03bd12f..2dfcdd857b504 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala @@ -17,22 +17,15 @@ package org.apache.spark.mllib.tree -import scala.util.Random - import org.scalatest.BeforeAndAfterAll import org.scalatest.FunSuite import org.apache.spark.SparkContext -import org.apache.spark.SparkContext._ - -import org.jblas._ -import org.apache.spark.rdd.RDD import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.tree.impurity.{Entropy, Gini, Variance} import org.apache.spark.mllib.tree.model.Filter import org.apache.spark.mllib.tree.configuration.Strategy import org.apache.spark.mllib.tree.configuration.Algo._ -import scala.collection.mutable import org.apache.spark.mllib.tree.configuration.FeatureType._ class DecisionTreeSuite extends FunSuite with BeforeAndAfterAll {