Skip to content

Commit

Permalink
HIVE-9477: No error thrown when global limit optimization failed to f…
Browse files Browse the repository at this point in the history
…ind enough number of rows [Spark Branch] (Rui via Xuefu)

git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1655468 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Xuefu Zhang committed Jan 28, 2015
1 parent 12d769d commit c0d1e54
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ private JobConf cloneJobConf(BaseWork work) throws Exception {
throw new IllegalArgumentException(msg, e);
}
if (work instanceof MapWork) {
cloned.setBoolean("mapred.task.is.map", true);
List<Path> inputPaths = Utilities.getInputPaths(cloned, (MapWork) work,
scratchDir, context, false);
Utilities.setInputPaths(cloned, inputPaths);
Expand All @@ -250,6 +251,7 @@ private JobConf cloneJobConf(BaseWork work) throws Exception {
// remember the JobConf cloned for each MapWork, so we won't clone for it again
workToJobConf.put(work, cloned);
} else if (work instanceof ReduceWork) {
cloned.setBoolean("mapred.task.is.map", false);
Utilities.setReduceWork(cloned, (ReduceWork) work, scratchDir, false);
Utilities.createTmpDirs(cloned, (ReduceWork) work);
cloned.set(Utilities.MAPRED_REDUCER_CLASS, ExecReducer.class.getName());
Expand Down
5 changes: 5 additions & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
Expand Down Expand Up @@ -280,6 +281,10 @@ public void compile(final ParseContext pCtx, final List<Task<? extends Serializa
for (ExecDriver tsk : mrTasks) {
tsk.setRetryCmdWhenFail(true);
}
List<SparkTask> sparkTasks = Utilities.getSparkTasks(rootTasks);
for (SparkTask sparkTask : sparkTasks) {
sparkTask.setRetryCmdWhenFail(true);
}
}

Interner<TableDesc> interner = Interners.newStrongInterner();
Expand Down

0 comments on commit c0d1e54

Please sign in to comment.