Skip to content

Commit

Permalink
HIVE-8788: UT: fix partition test case [Spark Branch] (Chinna via Xuefu)
Browse files Browse the repository at this point in the history
git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1641640 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Xuefu Zhang committed Nov 25, 2014
1 parent 70fef74 commit 85b566e
Show file tree
Hide file tree
Showing 3 changed files with 631 additions and 0 deletions.
16 changes: 16 additions & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.SparkWork;
import org.apache.hadoop.hive.ql.plan.StatsWork;
import org.apache.hadoop.hive.ql.plan.UnionWork;
Expand Down Expand Up @@ -214,6 +216,20 @@ public Collection<MapWork> getMapWork() {
return result;
}

@Override
public Operator<? extends OperatorDesc> getReducer(MapWork mapWork) {
List<BaseWork> children = getWork().getChildren(mapWork);
if (children.size() != 1) {
return null;
}

if (!(children.get(0) instanceof ReduceWork)) {
return null;
}

return ((ReduceWork) children.get(0)).getReducer();
}

public SparkCounters getSparkCounters() {
return sparkCounters;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
import org.apache.hadoop.hive.ql.lib.TypeRule;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.optimizer.physical.CrossProductCheck;
import org.apache.hadoop.hive.ql.optimizer.physical.MetadataOnlyOptimizer;
import org.apache.hadoop.hive.ql.optimizer.physical.NullScanOptimizer;
import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
import org.apache.hadoop.hive.ql.optimizer.physical.SparkMapJoinResolver;
Expand Down Expand Up @@ -278,6 +279,12 @@ protected void optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks, Pa
LOG.debug("Skipping null scan query optimization");
}

if (conf.getBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES)) {
physicalCtx = new MetadataOnlyOptimizer().resolve(physicalCtx);
} else {
LOG.debug("Skipping metadata only query optimization");
}

if (conf.getBoolVar(HiveConf.ConfVars.HIVE_CHECK_CROSS_PRODUCT)) {
physicalCtx = new CrossProductCheck().resolve(physicalCtx);
} else {
Expand Down
Loading

0 comments on commit 85b566e

Please sign in to comment.