Skip to content

Commit

Permalink
Merge remote-tracking branch 'apache/master' into SPARK-5991
Browse files Browse the repository at this point in the history
  • Loading branch information
mengxr committed Feb 27, 2015
2 parents 282ec8d + 5f7f3b9 commit 06140a4
Show file tree
Hide file tree
Showing 8 changed files with 62 additions and 46 deletions.
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1389,17 +1389,17 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
stopped = true
env.metricsSystem.report()
metadataCleaner.cancel()
env.actorSystem.stop(heartbeatReceiver)
cleaner.foreach(_.stop())
dagScheduler.stop()
dagScheduler = null
listenerBus.stop()
eventLogger.foreach(_.stop())
env.actorSystem.stop(heartbeatReceiver)
progressBar.foreach(_.stop())
taskScheduler = null
// TODO: Cache.stop()?
env.stop()
SparkEnv.set(null)
listenerBus.stop()
eventLogger.foreach(_.stop())
logInfo("Successfully stopped SparkContext")
SparkContext.clearActiveContext()
} else {
Expand Down
20 changes: 12 additions & 8 deletions docs/mllib-decision-tree.md
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification tree model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = DecisionTreeModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = DecisionTreeModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -284,8 +285,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification tree model:\n" + model.toDebugString());

model.save("myModelPath");
DecisionTreeModel sameModel = DecisionTreeModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
DecisionTreeModel sameModel = DecisionTreeModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down Expand Up @@ -362,8 +364,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression tree model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = DecisionTreeModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = DecisionTreeModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -429,8 +432,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression tree model:\n" + model.toDebugString());

model.save("myModelPath");
DecisionTreeModel sameModel = DecisionTreeModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
DecisionTreeModel sameModel = DecisionTreeModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down
40 changes: 24 additions & 16 deletions docs/mllib-ensembles.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification forest model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = RandomForestModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = RandomForestModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -193,8 +194,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification forest model:\n" + model.toDebugString());

model.save("myModelPath");
RandomForestModel sameModel = RandomForestModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
RandomForestModel sameModel = RandomForestModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down Expand Up @@ -276,8 +278,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression forest model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = RandomForestModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = RandomForestModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -343,8 +346,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression forest model:\n" + model.toDebugString());

model.save("myModelPath");
RandomForestModel sameModel = RandomForestModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
RandomForestModel sameModel = RandomForestModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down Expand Up @@ -504,8 +508,9 @@ val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.
println("Test Error = " + testErr)
println("Learned classification GBT model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = GradientBoostedTreesModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = GradientBoostedTreesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -568,8 +573,9 @@ Double testErr =
System.out.println("Test Error: " + testErr);
System.out.println("Learned classification GBT model:\n" + model.toDebugString());

model.save("myModelPath");
GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down Expand Up @@ -647,8 +653,9 @@ val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean
println("Test Mean Squared Error = " + testMSE)
println("Learned regression GBT model:\n" + model.toDebugString)

model.save("myModelPath")
val sameModel = GradientBoostedTreesModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = GradientBoostedTreesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -717,8 +724,9 @@ Double testMSE =
System.out.println("Test Mean Squared Error: " + testMSE);
System.out.println("Learned regression GBT model:\n" + model.toDebugString());

model.save("myModelPath");
GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
GradientBoostedTreesModel sameModel = GradientBoostedTreesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down
20 changes: 12 additions & 8 deletions docs/mllib-linear-methods.md
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,9 @@ val auROC = metrics.areaUnderROC()

println("Area under ROC = " + auROC)

model.save("myModelPath")
val sameModel = SVMModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = SVMModel.load(sc, "myModelPath")
{% endhighlight %}

The `SVMWithSGD.train()` method by default performs L2 regularization with the
Expand Down Expand Up @@ -308,8 +309,9 @@ public class SVMClassifier {

System.out.println("Area under ROC = " + auROC);

model.save("myModelPath");
SVMModel sameModel = SVMModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
SVMModel sameModel = SVMModel.load(sc.sc(), "myModelPath");
}
}
{% endhighlight %}
Expand Down Expand Up @@ -423,8 +425,9 @@ val valuesAndPreds = parsedData.map { point =>
val MSE = valuesAndPreds.map{case(v, p) => math.pow((v - p), 2)}.mean()
println("training Mean Squared Error = " + MSE)

model.save("myModelPath")
val sameModel = LinearRegressionModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = LinearRegressionModel.load(sc, "myModelPath")
{% endhighlight %}

[`RidgeRegressionWithSGD`](api/scala/index.html#org.apache.spark.mllib.regression.RidgeRegressionWithSGD)
Expand Down Expand Up @@ -496,8 +499,9 @@ public class LinearRegression {
).rdd()).mean();
System.out.println("training Mean Squared Error = " + MSE);

model.save("myModelPath");
LinearRegressionModel sameModel = LinearRegressionModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
LinearRegressionModel sameModel = LinearRegressionModel.load(sc.sc(), "myModelPath");
}
}
{% endhighlight %}
Expand Down
10 changes: 6 additions & 4 deletions docs/mllib-naive-bayes.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,9 @@ val model = NaiveBayes.train(training, lambda = 1.0)
val predictionAndLabel = test.map(p => (model.predict(p.features), p.label))
val accuracy = 1.0 * predictionAndLabel.filter(x => x._1 == x._2).count() / test.count()

model.save("myModelPath")
val sameModel = NaiveBayesModel.load("myModelPath")
// Save and load model
model.save(sc, "myModelPath")
val sameModel = NaiveBayesModel.load(sc, "myModelPath")
{% endhighlight %}
</div>

Expand Down Expand Up @@ -97,8 +98,9 @@ double accuracy = predictionAndLabel.filter(new Function<Tuple2<Double, Double>,
}
}).count() / (double) test.count();

model.save("myModelPath");
NaiveBayesModel sameModel = NaiveBayesModel.load("myModelPath");
// Save and load model
model.save(sc.sc(), "myModelPath");
NaiveBayesModel sameModel = NaiveBayesModel.load(sc.sc(), "myModelPath");
{% endhighlight %}
</div>

Expand Down
3 changes: 1 addition & 2 deletions docs/spark-standalone.md
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,7 @@ SPARK_WORKER_OPTS supports the following system properties:
<td>false</td>
<td>
Enable periodic cleanup of worker / application directories. Note that this only affects standalone
mode, as YARN works differently. Applications directories are cleaned up regardless of whether
the application is still running.
mode, as YARN works differently. Only the directories of stopped applications are cleaned up.
</td>
</tr>
<tr>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ object KafkaUtils {
* @param topics Names of the topics to consume
*/
@Experimental
def createDirectStream[K, V, KD <: Decoder[K], VD <: Decoder[V], R](
def createDirectStream[K, V, KD <: Decoder[K], VD <: Decoder[V]](
jssc: JavaStreamingContext,
keyClass: Class[K],
valueClass: Class[V],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ private[spark] class ApplicationMaster(
@volatile private var finalMsg: String = ""
@volatile private var userClassThread: Thread = _

private var reporterThread: Thread = _
private var allocator: YarnAllocator = _
@volatile private var reporterThread: Thread = _
@volatile private var allocator: YarnAllocator = _

// Fields used in client mode.
private var actorSystem: ActorSystem = null
Expand Down Expand Up @@ -486,11 +486,10 @@ private[spark] class ApplicationMaster(
case _: InterruptedException =>
// Reporter thread can interrupt to stop user class
case cause: Throwable =>
logError("User class threw exception: " + cause.getMessage, cause)
finish(FinalApplicationStatus.FAILED,
ApplicationMaster.EXIT_EXCEPTION_USER_CLASS,
"User class threw exception: " + cause.getMessage)
// re-throw to get it logged
throw cause
}
}
}
Expand Down

0 comments on commit 06140a4

Please sign in to comment.