Skip to content

Commit ceaffe6

Browse files
committed
fix spark-shell NPE without explicit -usejavacp
1 parent 6bba551 commit ceaffe6

2 files changed

Lines changed: 34 additions & 1 deletion

File tree

repl/src/main/scala/org/apache/spark/repl/Main.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,13 +65,15 @@ object Main extends Logging {
6565
// Visible for testing
6666
private[repl] def doMain(args: Array[String], _interp: SparkILoop): Unit = {
6767
interp = _interp
68-
val jars = Utils
68+
val userJars = Utils
6969
.getLocalUserJarsForShell(conf)
7070
// Remove file:///, file:// or file:/ scheme if exists for each jar
7171
.map { x =>
7272
if (x.startsWith("file:")) new File(new URI(x)).getPath else x
7373
}
7474
.mkString(File.pathSeparator)
75+
val jvmClasspath = sys.props.getOrElse("java.class.path", "")
76+
val jars = if (userJars.nonEmpty) jvmClasspath + File.pathSeparator + userJars else jvmClasspath
7577
val interpArguments = List(
7678
"-Yrepl-class-based",
7779
"-Yrepl-outdir",

repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,37 @@ class ReplSuite extends SparkFunSuite {
7272
def runInterpreterInPasteMode(master: String, input: String): String =
7373
runInterpreter(master, ":paste\n" + input + 4.toChar) // 4 is the ascii code of CTRL + D
7474

75+
test("SPARK-56447: spark-shell REPL initializes without explicit -classpath argument") {
76+
// Regression test for SPARK-52587: doMain must include java.class.path in the REPL
77+
// classpath even when the caller does not pass -classpath explicitly. Before the fix,
78+
// the Scala compiler mirror failed to find `object scala` because the JVM classpath
79+
// was not propagated to the REPL settings.
80+
val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath"
81+
val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
82+
val classpath = System.getProperty("java.class.path")
83+
System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
84+
85+
Main.sparkContext = null
86+
Main.sparkSession = null
87+
Main.conf.set("spark.master", "local")
88+
89+
val in = new BufferedReader(new StringReader("spark.version\n"))
90+
val out = new StringWriter()
91+
// No -classpath arg: doMain must derive the classpath from java.class.path itself.
92+
Main.doMain(Array.empty, new SparkILoop(in, new PrintWriter(out)))
93+
94+
if (oldExecutorClasspath != null) {
95+
System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)
96+
} else {
97+
System.clearProperty(CONF_EXECUTOR_CLASSPATH)
98+
}
99+
100+
val output = out.toString
101+
assertDoesNotContain("object scala in compiler mirror not found", output)
102+
assertDoesNotContain("Failed to initialize compiler", output)
103+
assertContains("res0: String =", output)
104+
}
105+
75106
def assertContains(message: String, output: String): Unit = {
76107
val isContain = output.contains(message)
77108
assert(isContain,

0 commit comments

Comments
 (0)