Skip to content

Commit

Permalink
MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by D…
Browse files Browse the repository at this point in the history
…ustin Cote.
  • Loading branch information
aajisaka committed Mar 29, 2016
1 parent 8018280 commit 0050fa5
Show file tree
Hide file tree
Showing 80 changed files with 841 additions and 573 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@
import java.util.Date;
import java.util.StringTokenizer;

import junit.framework.TestCase;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -39,8 +37,9 @@
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
import org.junit.Test;

/**
/**
* Distributed i/o benchmark.
* <p>
* This test writes into or reads from a specified number of files.
Expand Down Expand Up @@ -68,7 +67,7 @@
* </ul>
*/
@Ignore
public class DFSCIOTest extends TestCase {
public class DFSCIOTest {
// Constants
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
private static final int TEST_TYPE_READ = 0;
Expand Down Expand Up @@ -98,6 +97,7 @@ public class DFSCIOTest extends TestCase {
*
* @throws Exception
*/
@Test
public void testIOs() throws Exception {
testIOs(10, 10);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@
import java.net.InetSocketAddress;
import java.net.URI;

import junit.framework.TestCase;

import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
Expand All @@ -50,8 +48,15 @@
import org.apache.hadoop.mapred.lib.LongSumReducer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;


public class TestFileSystem extends TestCase {
public class TestFileSystem {
private static final Log LOG = FileSystem.LOG;

private static Configuration conf = new Configuration();
Expand All @@ -66,6 +71,7 @@ public class TestFileSystem extends TestCase {
private static Path READ_DIR = new Path(ROOT, "fs_read");
private static Path DATA_DIR = new Path(ROOT, "fs_data");

@Test
public void testFs() throws Exception {
testFs(10 * MEGA, 100, 0);
}
Expand All @@ -90,7 +96,8 @@ public static void testFs(long megaBytes, int numFiles, long seed)
fs.delete(READ_DIR, true);
}

public static void testCommandFormat() throws Exception {
@Test
public void testCommandFormat() throws Exception {
// This should go to TestFsShell.java when it is added.
CommandFormat cf;
cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
Expand Down Expand Up @@ -488,6 +495,7 @@ public static void main(String[] args) throws Exception {
}
}

@Test
public void testFsCache() throws Exception {
{
long now = System.currentTimeMillis();
Expand Down Expand Up @@ -561,6 +569,7 @@ static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOExcep
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
}

@Test
public void testFsClose() throws Exception {
{
Configuration conf = new Configuration();
Expand All @@ -569,6 +578,7 @@ public void testFsClose() throws Exception {
}
}

@Test
public void testFsShutdownHook() throws Exception {
final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
Configuration conf = new Configuration();
Expand Down Expand Up @@ -600,7 +610,7 @@ public void testFsShutdownHook() throws Exception {
assertTrue(closed.contains(fsWithoutAuto));
}


@Test
public void testCacheKeysAreCaseInsensitive()
throws Exception
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,18 @@
import java.io.OutputStreamWriter;
import java.io.File;

import junit.framework.TestCase;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

/**
* Test Job History Log Analyzer.
*
* @see JHLogAnalyzer
*/
public class TestJHLA extends TestCase {
public class TestJHLA {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private String historyLog = System.getProperty("test.build.data",
"build/test/data") + "/history/test.log";
Expand Down Expand Up @@ -133,6 +132,7 @@ public void tearDown() throws Exception {
/**
* Run log analyzer in test mode for file test.log.
*/
@Test
public void testJHLA() {
String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
JHLogAnalyzer.main(args);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,21 +32,25 @@
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapred.*;

import junit.framework.TestCase;
import org.apache.commons.logging.*;
import org.junit.Test;
import static org.junit.Assert.assertEquals;

public class TestSequenceFileMergeProgress extends TestCase {
public class TestSequenceFileMergeProgress {
private static final Log LOG = FileInputFormat.LOG;
private static final int RECORDS = 10000;


@Test
public void testMergeProgressWithNoCompression() throws IOException {
runTest(SequenceFile.CompressionType.NONE);
}

@Test
public void testMergeProgressWithRecordCompression() throws IOException {
runTest(SequenceFile.CompressionType.RECORD);
}

@Test
public void testMergeProgressWithBlockCompression() throws IOException {
runTest(SequenceFile.CompressionType.BLOCK);
}
Expand Down Expand Up @@ -92,7 +96,7 @@ public void runTest(CompressionType compressionType) throws IOException {
count++;
}
assertEquals(RECORDS, count);
assertEquals(1.0f, rIter.getProgress().get());
assertEquals(1.0f, rIter.getProgress().get(), 0.0000);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,11 @@
*/
package org.apache.hadoop.mapred;

import junit.framework.TestCase;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;

import java.io.IOException;
import java.util.Map;
Expand All @@ -41,7 +42,7 @@
* <p/>
* The DFS filesystem is formated before the testcase starts and after it ends.
*/
public abstract class ClusterMapReduceTestCase extends TestCase {
public abstract class ClusterMapReduceTestCase {
private MiniDFSCluster dfsCluster = null;
private MiniMRCluster mrCluster = null;

Expand All @@ -50,9 +51,8 @@ public abstract class ClusterMapReduceTestCase extends TestCase {
*
* @throws Exception
*/
protected void setUp() throws Exception {
super.setUp();

@Before
public void setUp() throws Exception {
startCluster(true, null);
}

Expand Down Expand Up @@ -139,9 +139,9 @@ protected void stopCluster() throws Exception {
*
* @throws Exception
*/
protected void tearDown() throws Exception {
@After
public void tearDown() throws Exception {
stopCluster();
super.tearDown();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
import org.apache.hadoop.mapred.AuditLogger.Keys;
import org.apache.hadoop.net.NetUtils;

import junit.framework.TestCase;
import org.junit.Test;
import static org.junit.Assert.assertEquals;

/**
* Tests {@link AuditLogger}.
*/
public class TestAuditLogger extends TestCase {
public class TestAuditLogger {
private static final String USER = "test";
private static final String OPERATION = "oper";
private static final String TARGET = "tgt";
Expand All @@ -44,6 +44,7 @@ public class TestAuditLogger extends TestCase {
/**
* Test the AuditLog format with key-val pair.
*/
@Test
public void testKeyValLogFormat() {
StringBuilder actLog = new StringBuilder();
StringBuilder expLog = new StringBuilder();
Expand Down Expand Up @@ -114,6 +115,7 @@ private void testFailureLogFormat(boolean checkIP) {
/**
* Test {@link AuditLogger} without IP set.
*/
@Test
public void testAuditLoggerWithoutIP() throws Exception {
// test without ip
testSuccessLogFormat(false);
Expand All @@ -137,6 +139,7 @@ public void ping() {
/**
* Test {@link AuditLogger} with IP set.
*/
@Test
public void testAuditLoggerWithIP() throws Exception {
Configuration conf = new Configuration();
// start the IPC server
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
@Ignore
public class TestBadRecords extends ClusterMapReduceTestCase {

Expand Down Expand Up @@ -206,7 +211,8 @@ private List<String> getProcessed(List<String> inputs, List<String> badRecs) {
}
return processed;
}


@Test
public void testBadMapRed() throws Exception {
JobConf conf = createJobConf();
conf.setMapperClass(BadMapper.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.junit.Test;

import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertFalse;
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
public void _testMapReduce(boolean restart) throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
Expand Down Expand Up @@ -85,14 +91,17 @@ public void _testMapReduce(boolean restart) throws Exception {

}

@Test
public void testMapReduce() throws Exception {
_testMapReduce(false);
}

@Test
public void testMapReduceRestarting() throws Exception {
_testMapReduce(true);
}

@Test
public void testDFSRestart() throws Exception {
Path file = new Path(getInputDir(), "text.txt");
OutputStream os = getFileSystem().create(file);
Expand All @@ -109,6 +118,7 @@ public void testDFSRestart() throws Exception {

}

@Test
public void testMRConfig() throws Exception {
JobConf conf = createJobConf();
assertNull(conf.get("xyz"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
import org.apache.hadoop.mapreduce.MRConfig;
import org.junit.Test;

import junit.framework.TestCase;
import java.io.*;
import java.util.*;

/**
* TestCollect checks if the collect can handle simultaneous invocations.
*/
public class TestCollect extends TestCase
public class TestCollect
{
final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
static final int NUM_FEEDERS = 10;
Expand Down Expand Up @@ -127,7 +127,7 @@ public void configure(JobConf conf) throws IOException {
conf.setNumMapTasks(1);
conf.setNumReduceTasks(1);
}

@Test
public void testCollect() throws IOException {
JobConf conf = new JobConf();
configure(conf);
Expand All @@ -144,9 +144,5 @@ public void testCollect() throws IOException {
fs.delete(OUTPUT_DIR, true);
}
}

public static void main(String[] args) throws IOException {
new TestCollect().testCollect();
}
}

Loading

0 comments on commit 0050fa5

Please sign in to comment.