Skip to content

Commit

Permalink
HIVE-22491 : Use Collections emptyList (David Mollitor via Ashutosh C…
Browse files Browse the repository at this point in the history
…hauhan)

Signed-off-by: Ashutosh Chauhan <[email protected]>
  • Loading branch information
belugabehr authored and ashutoshc committed May 10, 2020
1 parent d12a308 commit 0bf24c8
Show file tree
Hide file tree
Showing 22 changed files with 48 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -66,7 +67,7 @@ public static List<FileStatus> getFileStatusRecurse(Path path, int level, FileS
// does not exist. But getFileStatus() throw IOException. To mimic the
// similar behavior we will return empty array on exception. For external
// tables, the path of the table will not exists during table creation
return new ArrayList<>(0);
return Collections.emptyList();
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -1007,7 +1008,7 @@ public HCatNotificationEvent apply(@Nullable NotificationEvent notificationEvent
}
});
} else {
return new ArrayList<HCatNotificationEvent>();
return Collections.emptyList();
}
} catch (TException e) {
throw new ConnectionFailureException("TException while getting notifications", e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package org.apache.hive.hcatalog.templeton.tool;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Date;
Expand Down Expand Up @@ -138,7 +137,7 @@ public List<String> getChildList(CuratorFramework zk) {
} catch (IOException e) {
LOG.info("No jobs to check.");
}
return new ArrayList<String>();
return Collections.emptyList();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import org.apache.hadoop.hive.common.ZooKeeperHiveHelper;
Expand Down Expand Up @@ -280,7 +281,7 @@ public List<String> getAllForType(Type type) {
try {
return zk.getChildren().forPath(getPath(type));
} catch (Exception e) {
return new ArrayList<String>();
return Collections.emptyList();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ private static IndexPredicateAnalyzer newAnalyzer(Schema schema) {
public static List<KuduPredicate> getPredicates(Configuration conf, Schema schema) {
SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf);
if (sarg == null) {
return new ArrayList<>();
return Collections.emptyList();
}
return toKuduPredicates(sarg, schema);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
Expand Down Expand Up @@ -694,7 +694,7 @@ public static MapredWork clonePlan(MapredWork plan) {
*/
public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots) {
if (roots.isEmpty()) {
return new ArrayList<>();
return Collections.emptyList();
}
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
CompilationOpContext ctx = roots.get(0).getCompilationOpContext();
Expand Down
2 changes: 1 addition & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
Original file line number Diff line number Diff line change
Expand Up @@ -4141,7 +4141,7 @@ public static Collection<Class<?>> getClassNamesFromConfig(HiveConf hiveConf, Co
String[] classNames = org.apache.hadoop.util.StringUtils.getStrings(HiveConf.getVar(hiveConf,
confVar));
if (classNames == null) {
return new ArrayList<>(0);
return Collections.emptyList();
}
Collection<Class<?>> classList = new ArrayList<Class<?>>(classNames.length);
for (String className : classNames) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
Expand All @@ -31,7 +31,6 @@
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.parse.ExplainConfiguration;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.mapred.JobConf;
Expand Down Expand Up @@ -226,15 +225,15 @@ public void connect(SparkTran parent, SparkTran child) {

public List<SparkTran> getParents(SparkTran tran) {
if (!invertedTransGraph.containsKey(tran)) {
return new ArrayList<SparkTran>();
return Collections.emptyList();
}

return invertedTransGraph.get(tran);
}

public List<SparkTran> getChildren(SparkTran tran) {
if (!transGraph.containsKey(tran)) {
return new ArrayList<SparkTran>();
return Collections.emptyList();
}

return transGraph.get(tran);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
Expand Down Expand Up @@ -481,7 +482,7 @@ public static List<Operator<? extends OperatorDesc>> doGetWorksFromPath(Map<Path
**/
public static List<String> doGetAliasesFromPath(Map<Path, List<String>> pathToAliases, Path dir) {
if (pathToAliases == null) {
return new ArrayList<String>();
return Collections.emptyList();
}
Path path = getMatchingPath(pathToAliases, dir);
return pathToAliases.get(path);
Expand Down
8 changes: 3 additions & 5 deletions ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
Original file line number Diff line number Diff line change
Expand Up @@ -1625,8 +1625,7 @@ public List<RelOptMaterialization> getPreprocessedMaterializedViewsFromRegistry(
List<RelOptMaterialization> materializedViews =
HiveMaterializedViewsRegistry.get().getRewritingMaterializedViews();
if (materializedViews.isEmpty()) {
// Bail out: empty list
return new ArrayList<>();
return Collections.emptyList();
}
// Add to final result
return filterAugmentMaterializedViews(materializedViews, tablesUsed, txnMgr);
Expand Down Expand Up @@ -1772,8 +1771,7 @@ public List<RelOptMaterialization> getPreprocessedMaterializedViews(
List<Table> materializedViewTables =
getAllMaterializedViewObjectsForRewriting();
if (materializedViewTables.isEmpty()) {
// Bail out: empty list
return new ArrayList<>();
return Collections.emptyList();
}
// Return final result
return getValidMaterializedViews(materializedViewTables, tablesUsed, false, txnMgr);
Expand Down Expand Up @@ -3819,7 +3817,7 @@ public List<Partition> getPartitionsByFilter(Table tbl, String filter)
private static List<Partition> convertFromMetastore(Table tbl,
List<org.apache.hadoop.hive.metastore.api.Partition> partitions) throws HiveException {
if (partitions == null) {
return new ArrayList<Partition>();
return Collections.emptyList();
}

List<Partition> results = new ArrayList<Partition>(partitions.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
Expand Down Expand Up @@ -511,7 +512,7 @@ private List<FieldSchema> getColsInternal(boolean forMs) {
tPartition.getSd().getSerdeInfo().getSerializationLib(), e);
}

return new ArrayList<FieldSchema>();
return Collections.emptyList();
}

public String getLocation() {
Expand Down
3 changes: 2 additions & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -714,7 +715,7 @@ private List<FieldSchema> getColsInternal(boolean forMs) {
} catch (Exception e) {
LOG.error("Unable to get field from serde: " + serializationLib, e);
}
return new ArrayList<FieldSchema>();
return Collections.emptyList();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.optimizer.calcite.rules;

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
Expand Down Expand Up @@ -204,13 +205,13 @@ private static List<RexNode> extractCommonOperands(RexBuilder rexBuilder, RelNod
for (RexNode conjunction : conjunctions) {
// We do not know what it is, we bail out for safety
if (!(conjunction instanceof RexCall) || !HiveCalciteUtil.isDeterministic(conjunction)) {
return new ArrayList<>();
return Collections.emptyList();
}
RexCall conjCall = (RexCall) conjunction;
Set<Integer> refs = HiveCalciteUtil.getInputRefs(conjCall);
if (refs.size() != 1) {
// We do not know what it is, we bail out for safety
return new ArrayList<>();
return Collections.emptyList();
}
RexNode ref = rexBuilder.makeInputRef(input, refs.iterator().next());
String stringRef = ref.toString();
Expand All @@ -227,7 +228,7 @@ private static List<RexNode> extractCommonOperands(RexBuilder rexBuilder, RelNod
// If we did not add any factor or there are no common factors, we can
// bail out
if (refsInAllOperands.isEmpty()) {
return new ArrayList<>();
return Collections.emptyList();
}
}

Expand Down
2 changes: 1 addition & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ public static List<FieldSchema> getFieldSchemasFromRowSchema(RowSchema row,
public static List<FieldSchema> getFieldSchemasFromColumnInfo(
List<ColumnInfo> cols, String fieldPrefix) {
if ((cols == null) || (cols.size() == 0)) {
return new ArrayList<FieldSchema>();
return Collections.emptyList();
}

List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedHashSet;
import java.util.List;
Expand Down Expand Up @@ -575,7 +576,7 @@ public List<String> getKeyColumns() {
int[] keyColumnMap = vectorReduceSinkInfo.getReduceSinkKeyColumnMap();
if (keyColumnMap == null) {
// Always show an array.
return new ArrayList<String>();
return Collections.emptyList();
}
return outputColumnsAndTypesToStringList(
vectorReduceSinkInfo.getReduceSinkKeyColumnMap(),
Expand All @@ -591,7 +592,7 @@ public List<String> getValueColumns() {
int[] valueColumnMap = vectorReduceSinkInfo.getReduceSinkValueColumnMap();
if (valueColumnMap == null) {
// Always show an array.
return new ArrayList<String>();
return Collections.emptyList();
}
return outputColumnsAndTypesToStringList(
vectorReduceSinkInfo.getReduceSinkValueColumnMap(),
Expand Down
4 changes: 2 additions & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -1059,8 +1059,8 @@ public static List<ColStatistics> getTableColumnStats(
}

private static List<ColStatistics> convertColStats(List<ColumnStatisticsObj> colStats, String tabName) {
if (colStats==null) {
return new ArrayList<ColStatistics>();
if (colStats == null) {
return Collections.emptyList();
}
List<ColStatistics> stats = new ArrayList<ColStatistics>(colStats.size());
for (ColumnStatisticsObj statObj : colStats) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.serde2;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;

Expand Down Expand Up @@ -107,7 +107,7 @@ public StructField getStructFieldRef(String fieldName) {

@Override
public List<NullStructField> getAllStructFieldRefs() {
return new ArrayList<NullStructField>();
return Collections.emptyList();
}

@Override
Expand All @@ -117,7 +117,7 @@ public Object getStructFieldData(Object data, StructField fieldRef) {

@Override
public List<Object> getStructFieldsDataAsList(Object data) {
return new ArrayList<Object>();
return Collections.emptyList();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ public static List<FileStatus> getFileStatusRecurse(Path base, FileSystem fs) {
// does not exist. But getFileStatus() throw IOException. To mimic the
// similar behavior we will return empty array on exception. For external
// tables, the path of the table will not exists during table creation
return new ArrayList<>(0);
return Collections.emptyList();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6224,12 +6224,10 @@ public List<String> get_partition_names_ps(final String db_name,
@Override
public List<String> partition_name_to_vals(String part_name) throws TException {
if (part_name.length() == 0) {
return new ArrayList<>();
return Collections.emptyList();
}
LinkedHashMap<String, String> map = Warehouse.makeSpecFromName(part_name);
List<String> part_vals = new ArrayList<>();
part_vals.addAll(map.values());
return part_vals;
return new ArrayList<>(map.values());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -706,17 +707,17 @@ private List<String> diff(final List<String> list1, final List<String> list2) {
return list1;

if (list1 == null || list1.size() == 0)
return new ArrayList<String>();
return Collections.emptyList();

if (list2.containsAll(list1))
return new ArrayList<String>();
return Collections.emptyList();

diffList.addAll(list2);
LOG.debug("diffList=" + Arrays.toString(diffList.toArray()) + ",master list=" + Arrays.toString(list1.toArray()));
if (diffList.retainAll(list1)) {
LOG.debug("diffList=" + Arrays.toString(diffList.toArray()));
if (diffList.size() == list1.size()) { // lists match
return new ArrayList<String>(); // return empty list indicating no missing elements
return Collections.emptyList(); // return empty list indicating no missing elements
} else {
list1.removeAll(diffList);
LOG.debug("list1.size():" + list1.size());
Expand Down
Loading

0 comments on commit 0bf24c8

Please sign in to comment.