Skip to content

Commit

Permalink
ZEPPELIN-46: set only non-empty values for for spark.* properties
Browse files Browse the repository at this point in the history
https://issues.apache.org/jira/browse/ZEPPELIN-46

Author: Jongyoul Lee <[email protected]>
Author: Alexander Bezzubov <[email protected]>
Author: Alexander Bezzubov <[email protected]>
Author: Alexander <[email protected]>

Closes apache#38 from bzz/fix-non-empty-spark-conf-paraments and squashes the following commits:

56a5ce1 [Alexander Bezzubov] ZEPPELIN-46: fix style convention
50632f1 [Alexander Bezzubov] Merge branch 'master' into fix-non-empty-spark-conf-paraments
922b523 [Alexander] Merge pull request apache#1 from jongyoul/ZEPPELIN-46
cd0b4d4 [Jongyoul Lee] [ZEPPELIN-46] Some spark env must have a valid value - Fixed unused imports
f259a5a [Jongyoul Lee] [ZEPPELIN-46] Some spark env must have a valid value - Fixed styles
aec31d3 [Jongyoul Lee] [ZEPPELIN-46] Some spark env must have a valid value - Fixed styles - Fixed some test cases
5e22509 [Jongyoul Lee] Resolve conflicts
9d94910 [Alexander Bezzubov] ZEPPELIN-46: make tests pass on local Spark
3ff8460 [Alexander Bezzubov] ZEPPELIN-46 adding tests, by @jongyoul
00e4676 [Alexander Bezzubov] ZEPPELIN-46 fixing a typo
0ae83f4 [Alexander Bezzubov] ZEPPELIN-46: set only non-empty properties for spark.*
  • Loading branch information
jongyoul authored and bzz committed Apr 20, 2015
1 parent f86a4e9 commit cdd343b
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,12 @@ public SparkContext createSparkContext() {
new SparkConf()
.setMaster(getProperty("master"))
.setAppName(getProperty("spark.app.name"))
.setJars(jars)
.set("spark.repl.class.uri", classServerUri);

if (jars.length > 0) {
conf.setJars(jars);
}

if (execUri != null) {
conf.set("spark.executor.uri", execUri);
}
Expand All @@ -259,17 +262,19 @@ public SparkContext createSparkContext() {

for (Object k : intpProperty.keySet()) {
String key = (String) k;
Object value = intpProperty.get(key);
logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, value));
conf.set(key, (String) value);
String val = toString(intpProperty.get(key));
if (!key.startsWith("spark.") || !val.trim().isEmpty()) {
logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
conf.set(key, val);
}
}

SparkContext sparkContext = new SparkContext(conf);
return sparkContext;
}

public static boolean isEmptyString(Object val) {
return val instanceof String && ((String) val).trim().isEmpty();
static final String toString(Object o) {
return (o instanceof String) ? (String) o : "";
}

public static String getSystemDefault(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.LinkedList;
import java.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.interpreter.InterpreterContext;
Expand Down Expand Up @@ -138,4 +139,18 @@ public void testZContextDependencyLoading() {
repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
}

@Test
public void emptyConfigurationVariablesOnlyForNonSparkProperties() {
Properties intpProperty = repl.getProperty();
SparkConf sparkConf = repl.getSparkContext().getConf();
for (Object oKey : intpProperty.keySet()) {
String key = (String) oKey;
String value = (String) intpProperty.get(key);
repl.logger.debug(String.format("[%s]: [%s]", key, value));
if (key.startsWith("spark.") && value.isEmpty()) {
assertTrue(String.format("configuration starting from 'spark.' should not be empty. [%s]", key), !sparkConf.contains(key) || !sparkConf.get(key).isEmpty());
}
}
}
}

0 comments on commit cdd343b

Please sign in to comment.