diff --git a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala index b5412f20844b1..c2ed43a5397d6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala @@ -19,10 +19,10 @@ package org.apache.spark.deploy import java.net.URI import java.io.File -import scala.util.Try import scala.collection.mutable.ArrayBuffer import scala.collection.JavaConversions._ +import scala.util.Try import org.apache.spark.api.python.PythonUtils import org.apache.spark.util.{RedirectThread, Utils} @@ -98,7 +98,7 @@ object PythonRunner { // In Windows, the drive should not be prefixed with "/" // For instance, python does not understand "/C:/path/to/sheep.py" - if (formattedPath.matches("/[a-zA-Z]:/.*")) { + if (Utils.isWindows && formattedPath.matches("/[a-zA-Z]:/.*")) { formattedPath = formattedPath.stripPrefix("/") } formattedPath diff --git a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala index 40000ee7101b9..80f2cc02516fe 100644 --- a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala @@ -18,6 +18,7 @@ package org.apache.spark.deploy import org.scalatest.FunSuite + import org.apache.spark.util.Utils class PythonRunnerSuite extends FunSuite { @@ -29,9 +30,9 @@ class PythonRunnerSuite extends FunSuite { assert(PythonRunner.formatPath("file:///spark.py") === "/spark.py") assert(PythonRunner.formatPath("local:/spark.py") === "/spark.py") assert(PythonRunner.formatPath("local:///spark.py") === "/spark.py") - assert(PythonRunner.formatPath("file:/C:/a/b/spark.py", testWindows = true) === - "C:/a/b/spark.py") if (Utils.isWindows) { + assert(PythonRunner.formatPath("file:/C:/a/b/spark.py", testWindows = true) === + "C:/a/b/spark.py") assert(PythonRunner.formatPath("C:\\a\\b\\spark.py", testWindows = true) === "C:/a/b/spark.py") assert(PythonRunner.formatPath("C:\\a b\\spark.py", testWindows = true) === diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 3443f1a5b89d0..c806e9c82d067 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -207,7 +207,7 @@ class SparkILoop( SparkILoop.getAddedJars.map { jar => new URI(jar).getPath.stripPrefix("/") } } else { // We need new URI(jar).getPath here for the case that `jar` includes encoded white space (%20). - SparkILoop.getAddedJars.map { jar => new URI(jar).getPath} + SparkILoop.getAddedJars.map { jar => new URI(jar).getPath } } // work around for Scala bug val totalClassPath = addedJars.foldLeft(