Skip to content

Commit

Permalink
HIVE-23394: Fix Flaky TestJdbcGenericUDTFGetSplits{2}#testGenericUDTF…
Browse files Browse the repository at this point in the history
…OrderBySplitCount1 (Simhadri Govindappa, reviewed by Attila Turoczy, Ayush Saxena, Denys Kuzmenko)

Closes apache#4249
  • Loading branch information
simhadri-g authored and tarak271 committed Dec 19, 2023
1 parent cfa5e67 commit 8e267dd
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,7 @@ public static void beforeTest() throws Exception {
conf.setVar(HiveConf.ConfVars.LLAP_IO_MEMORY_MODE, "none");
conf.setVar(HiveConf.ConfVars.LLAP_EXTERNAL_SPLITS_TEMP_TABLE_STORAGE_FORMAT, "text");


conf.addResource(new URL("file://" + new File(confDir).toURI().getPath()
+ "/tez-site.xml"));
conf.addResource(new URL("file://" + new File(confDir).toURI().getPath() + "/tez-site.xml"));

miniHS2 = new MiniHS2(conf, MiniHS2.MiniClusterType.LLAP);
dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
Expand Down Expand Up @@ -107,8 +105,7 @@ public void tearDown() throws Exception {
hs2Conn.close();
}

protected void runQuery(final String query, final List<String> setCmds,
final int numRows) throws Exception {
protected void runQuery(final String query, final List<String> setCmds, final int numRows) throws Exception {

Connection con = hs2Conn;
BaseJdbcWithMiniLlap.createTestTable(con, null, tableName, kvDataFilePath.toString());
Expand Down Expand Up @@ -179,15 +176,9 @@ protected void testGenericUDTFOrderBySplitCount1(String udtfName, int[] expected
query = "select " + udtfName + "(" + "'select value from " + tableName + " where value is not null limit 2', 5)";
runQuery(query, getConfigs(), expectedCounts[5]);

query = "select " + udtfName + "(" +
"'select `value` from (select value from " + tableName + " where value is not null order by value) as t', 5)";
query = "select " + udtfName + "(" + "'select `value` from (select value from " + tableName +
" where value is not null order by value) as t', 5)";
runQuery(query, getConfigs(), expectedCounts[6]);

List<String> setCmds = getConfigs();
setCmds.add("set hive.llap.external.splits.order.by.force.single.split=false");
query = "select " + udtfName + "(" +
"'select `value` from (select value from " + tableName + " where value is not null order by value) as t', 10)";
runQuery(query, setCmds, expectedCounts[7]);
}

protected void testGenericUDTFOrderBySplitCount1OnPartitionedTable(String udtfName, int[] expectedCounts)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,16 +38,26 @@
public class TestJdbcGenericUDTFGetSplits extends AbstractTestJdbcGenericUDTFGetSplits {

@Test(timeout = 200000)
@Ignore("HIVE-23394")
public void testGenericUDTFOrderBySplitCount1() throws Exception {
super.testGenericUDTFOrderBySplitCount1("get_splits", new int[]{10, 1, 0, 2, 2, 2, 1, 10});
public void testGetSplitsOrderBySplitCount1() throws Exception {
testGenericUDTFOrderBySplitCount1("get_splits", new int[] { 10, 5, 0, 2, 2, 2, 5 });
}

@Test(timeout = 200000)
public void testGenericUDTFOrderBySplitCount1OnPartitionedTable() throws Exception {
super.testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_splits", new int[]{5, 5, 1, 1, 1});
public void testGetLlapSplitsOrderBySplitCount1() throws Exception {
testGenericUDTFOrderBySplitCount1("get_llap_splits", new int[] { 12, 7, 1, 4, 4, 4, 7 });
}

@Test(timeout = 200000)
public void testGetSplitsOrderBySplitCount1OnPartitionedTable() throws Exception {
testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_splits", new int[]{5, 5, 1, 1, 1});
}

@Test(timeout = 200000)
public void testGetLlapSplitsOrderBySplitCount1OnPartitionedTable() throws Exception {
testGenericUDTFOrderBySplitCount1OnPartitionedTable("get_llap_splits", new int[]{7, 7, 3, 3, 3});
}



@Test
public void testDecimalPrecisionAndScale() throws Exception {
Expand Down

This file was deleted.

0 comments on commit 8e267dd

Please sign in to comment.