Skip to content

Commit

Permalink
HBASE-20859 Backup and incremental load could fail in secure clusters.
Browse files Browse the repository at this point in the history
Signed-off-by: tedyu <[email protected]>
  • Loading branch information
jojochuang authored and tedyu committed Jul 11, 2018
1 parent 0d33caa commit 6ee0fed
Show file tree
Hide file tree
Showing 5 changed files with 140 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -695,7 +695,7 @@ public static Path getBulkOutputDir(String tableName, Configuration conf, boolea
throws IOException {
FileSystem fs = FileSystem.get(conf);
String tmp = conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);
fs.getHomeDirectory() + "/hbase-staging");
Path path =
new Path(tmp + Path.SEPARATOR + "bulk_output-" + tableName + "-"
+ EnvironmentEdgeManager.currentTime());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.backup;

import java.io.IOException;
import java.security.PrivilegedAction;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Category(SmallTests.class)
public class TestBackupUtils {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestBackupUtils.class);
private static final Logger LOG = LoggerFactory.getLogger(TestBackupUtils.class);

protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static Configuration conf = TEST_UTIL.getConfiguration();

@Test
public void TestGetBulkOutputDir() {
// Create a user who is not the current user
String fooUserName = "foo1234";
String fooGroupName = "group1";
UserGroupInformation
ugi = UserGroupInformation.createUserForTesting(fooUserName, new String[]{fooGroupName});
// Get user's home directory
Path fooHomeDirectory = ugi.doAs(new PrivilegedAction<Path>() {
@Override public Path run() {
try (FileSystem fs = FileSystem.get(conf)) {
return fs.getHomeDirectory();
} catch (IOException ioe) {
LOG.error("Failed to get foo's home directory", ioe);
}
return null;
}
});

Path bulkOutputDir = ugi.doAs(new PrivilegedAction<Path>() {
@Override public Path run() {
try {
return BackupUtils.getBulkOutputDir("test", conf, false);
} catch (IOException ioe) {
LOG.error("Failed to get bulk output dir path", ioe);
}
return null;
}
});
// Make sure the directory is in foo1234's home directory
Assert.assertTrue(bulkOutputDir.toString().startsWith(fooHomeDirectory.toString()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1352,6 +1352,10 @@ public enum OperationStatusCode {

/** Config key for hbase temporary directory in hdfs */
public static final String TEMPORARY_FS_DIRECTORY_KEY = "hbase.fs.tmp.dir";

/** Don't use it! This'll get you the wrong path in a secure cluster.
* Use FileSystem.getHomeDirectory() or
* "/user/" + UserGroupInformation.getCurrentUser().getShortUserName() */
public static final String DEFAULT_TEMPORARY_HDFS_DIRECTORY = "/user/"
+ System.getProperty("user.name") + "/hbase-staging";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoin
FileSystem fs = FileSystem.get(conf);
String hbaseTmpFsDir =
conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);
fs.getHomeDirectory() + "/hbase-staging");
Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());
fs.makeQualified(partitionsPath);
writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,15 @@
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.verify;

import java.io.IOException;
import java.lang.reflect.Field;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
Expand Down Expand Up @@ -100,6 +103,9 @@
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
Expand Down Expand Up @@ -1494,5 +1500,53 @@ private String getStoragePolicyNameForOldHDFSVersion(FileSystem fs, Path path) {

return null;
}

@Test
public void TestConfigurePartitioner() throws IOException {
Configuration conf = util.getConfiguration();
// Create a user who is not the current user
String fooUserName = "foo1234";
String fooGroupName = "group1";
UserGroupInformation
ugi = UserGroupInformation.createUserForTesting(fooUserName, new String[]{fooGroupName});
// Get user's home directory
Path fooHomeDirectory = ugi.doAs(new PrivilegedAction<Path>() {
@Override public Path run() {
try (FileSystem fs = FileSystem.get(conf)) {
return fs.makeQualified(fs.getHomeDirectory());
} catch (IOException ioe) {
LOG.error("Failed to get foo's home directory", ioe);
}
return null;
}
});

Job job = Mockito.mock(Job.class);
Mockito.doReturn(conf).when(job).getConfiguration();
ImmutableBytesWritable writable = new ImmutableBytesWritable();
List<ImmutableBytesWritable> splitPoints = new LinkedList<ImmutableBytesWritable>();
splitPoints.add(writable);

ugi.doAs(new PrivilegedAction<Void>() {
@Override public Void run() {
try {
HFileOutputFormat2.configurePartitioner(job, splitPoints, false);
} catch (IOException ioe) {
LOG.error("Failed to configure partitioner", ioe);
}
return null;
}
});
FileSystem fs = FileSystem.get(conf);
// verify that the job uses TotalOrderPartitioner
verify(job).setPartitionerClass(TotalOrderPartitioner.class);
// verify that TotalOrderPartitioner.setPartitionFile() is called.
String partitionPathString = conf.get("mapreduce.totalorderpartitioner.path");
Assert.assertNotNull(partitionPathString);
// Make sure the partion file is in foo1234's home directory, and that
// the file exists.
Assert.assertTrue(partitionPathString.startsWith(fooHomeDirectory.toString()));
Assert.assertTrue(fs.exists(new Path(partitionPathString)));
}
}

0 comments on commit 6ee0fed

Please sign in to comment.