diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java index b038dd6dbad..c875e8557b8 100644 --- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java +++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java @@ -244,9 +244,12 @@ public SparkContext createSparkContext() { new SparkConf() .setMaster(getProperty("master")) .setAppName(getProperty("spark.app.name")) - .setJars(jars) .set("spark.repl.class.uri", classServerUri); + if (jars.length > 0) { + conf.setJars(jars); + } + if (execUri != null) { conf.set("spark.executor.uri", execUri); } @@ -259,17 +262,19 @@ public SparkContext createSparkContext() { for (Object k : intpProperty.keySet()) { String key = (String) k; - Object value = intpProperty.get(key); - logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, value)); - conf.set(key, (String) value); + String val = toString(intpProperty.get(key)); + if (!key.startsWith("spark.") || !val.trim().isEmpty()) { + logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val)); + conf.set(key, val); + } } SparkContext sparkContext = new SparkContext(conf); return sparkContext; } - public static boolean isEmptyString(Object val) { - return val instanceof String && ((String) val).trim().isEmpty(); + static final String toString(Object o) { + return (o instanceof String) ? (String) o : ""; } public static String getSystemDefault( diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java index a5e0fe22760..87df793bb54 100644 --- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java +++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java @@ -25,6 +25,7 @@ import java.util.LinkedList; import java.util.Properties; +import org.apache.spark.SparkConf; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.GUI; import org.apache.zeppelin.interpreter.InterpreterContext; @@ -138,4 +139,18 @@ public void testZContextDependencyLoading() { repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context); assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code()); } + + @Test + public void emptyConfigurationVariablesOnlyForNonSparkProperties() { + Properties intpProperty = repl.getProperty(); + SparkConf sparkConf = repl.getSparkContext().getConf(); + for (Object oKey : intpProperty.keySet()) { + String key = (String) oKey; + String value = (String) intpProperty.get(key); + repl.logger.debug(String.format("[%s]: [%s]", key, value)); + if (key.startsWith("spark.") && value.isEmpty()) { + assertTrue(String.format("configuration starting from 'spark.' should not be empty. [%s]", key), !sparkConf.contains(key) || !sparkConf.get(key).isEmpty()); + } + } + } }