Skip to content

Commit

Permalink
HIVE-8828 : Remove hadoop 20 shims (Ashutosh Chauhan via Thejas Nair …
Browse files Browse the repository at this point in the history
…& Brock Noland)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1641980 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
ashutoshc committed Nov 27, 2014
1 parent 3888d2f commit 2e93b0c
Show file tree
Hide file tree
Showing 123 changed files with 1,243 additions and 8,634 deletions.
3 changes: 2 additions & 1 deletion beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.beeline.BeeLine;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hive.service.auth.HiveAuthFactory;

/**
Expand Down Expand Up @@ -201,7 +202,7 @@ public static void main(String[] args) throws Exception {
}

private static void storeTokenInJobConf(String tokenStr) throws Exception {
ShimLoader.getHadoopShims().setTokenStr(ShimLoader.getHadoopShims().getUGIForConf(new Configuration()),
Utils.setTokenStr(Utils.getUGIForConf(new Configuration()),
tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
System.out.println("Stored token " + tokenStr);
}
Expand Down
11 changes: 7 additions & 4 deletions common/src/java/org/apache/hadoop/hive/common/FileUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DefaultFileAccess;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
Expand All @@ -41,6 +42,7 @@
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;

Expand Down Expand Up @@ -373,8 +375,8 @@ public static Path getPathOrParentThatExists(FileSystem fs, Path path) throws IO
public static void checkFileAccessWithImpersonation(final FileSystem fs,
final FileStatus stat, final FsAction action, final String user)
throws IOException, AccessControlException, InterruptedException, Exception {
UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(fs.getConf());
String currentUser = ShimLoader.getHadoopShims().getShortUserName(ugi);
UserGroupInformation ugi = Utils.getUGIForConf(fs.getConf());
String currentUser = ugi.getShortUserName();

if (user == null || currentUser.equals(user)) {
// No need to impersonate user, do the checks as the currently configured user.
Expand All @@ -383,8 +385,9 @@ public static void checkFileAccessWithImpersonation(final FileSystem fs,
}

// Otherwise, try user impersonation. Current user must be configured to do user impersonation.
UserGroupInformation proxyUser = ShimLoader.getHadoopShims().createProxyUser(user);
ShimLoader.getHadoopShims().doAs(proxyUser, new PrivilegedExceptionAction<Object>() {
UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(
user, UserGroupInformation.getLoginUser());
proxyUser.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf());
Expand Down
4 changes: 2 additions & 2 deletions common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.apache.hadoop.hive.conf.Validator.StringSet;
import org.apache.hadoop.hive.conf.Validator.TimeValidator;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
Expand Down Expand Up @@ -2760,8 +2761,7 @@ public static URL getHiveServer2SiteLocation() {
*/
public String getUser() throws IOException {
try {
UserGroupInformation ugi = ShimLoader.getHadoopShims()
.getUGIForConf(this);
UserGroupInformation ugi = Utils.getUGIForConf(this);
return ugi.getUserName();
} catch (LoginException le) {
throw new IOException(le);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.thrift.TException;
import org.slf4j.Logger;
Expand Down Expand Up @@ -254,7 +255,7 @@ public static class HiveClientCacheKey {

private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws IOException, LoginException {
this.metaStoreURIs = hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS);
ugi = ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
ugi = Utils.getUGIForConf(hiveConf);
this.hiveConf = hiveConf;
this.threadId = threadId;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
Expand All @@ -38,6 +37,7 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
import org.apache.hadoop.security.token.delegation.DelegationTokenSelector;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatUtil;
import org.apache.thrift.TException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.GroupMappingServiceProvider;
import org.apache.hadoop.security.UserGroupInformation;
Expand Down Expand Up @@ -129,9 +129,9 @@ public void addUserPrincipal(String principal) throws Exception {
*/
public UserGroupInformation loginUser(String principal)
throws Exception {
ShimLoader.getHadoopShims().loginUserFromKeytab(principal,
UserGroupInformation.loginUserFromKeytab(principal,
getKeyTabFile(principal));
return ShimLoader.getHadoopShims().getUGIForConf(conf);
return Utils.getUGIForConf(conf);
}

public Properties getKdcConf() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
Expand Down Expand Up @@ -231,7 +231,7 @@ private void verifyProperty(String propertyName, String expectedValue) throws Ex
// Store the given token in the UGI
private void storeToken(String tokenStr, UserGroupInformation ugi)
throws Exception {
ShimLoader.getHadoopShims().setTokenStr(ugi,
Utils.setTokenStr(ugi,
tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import java.io.File;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.AfterClass;
Expand Down Expand Up @@ -61,9 +61,8 @@ public void testLogin() throws Exception {
String servicePrinc = miniHiveKdc.getHiveServicePrincipal();
assertNotNull(servicePrinc);
miniHiveKdc.loginUser(servicePrinc);
assertTrue(ShimLoader.getHadoopShims().isLoginKeytabBased());
UserGroupInformation ugi =
ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
assertTrue(UserGroupInformation.isLoginKeytabBased());
UserGroupInformation ugi = Utils.getUGIForConf(hiveConf);
assertEquals(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL, ugi.getShortUserName());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
import org.apache.hadoop.security.UserGroupInformation;

Expand Down Expand Up @@ -56,7 +57,7 @@ protected HiveConf createHiveConf() throws Exception {

// Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
HiveConf conf = super.createHiveConf();
String currentUserName = ShimLoader.getHadoopShims().getUGIForConf(conf).getShortUserName();
String currentUserName = Utils.getUGIForConf(conf).getShortUserName();
conf.set("dfs.namenode.acls.enabled", "true");
conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,15 @@ public class TestHadoop20SAuthBridge extends TestCase {
*/
static volatile boolean isMetastoreTokenManagerInited;

private static class MyHadoopThriftAuthBridge20S extends HadoopThriftAuthBridge20S {
private static class MyHadoopThriftAuthBridge20S extends HadoopThriftAuthBridge {
@Override
public Server createServer(String keytabFile, String principalConf)
throws TTransportException {
//Create a Server that doesn't interpret any Kerberos stuff
return new Server();
}

static class Server extends HadoopThriftAuthBridge20S.Server {
static class Server extends HadoopThriftAuthBridge.Server {
public Server() throws TTransportException {
super();
}
Expand Down Expand Up @@ -312,9 +312,9 @@ private String getDelegationTokenStr(UserGroupInformation ownerUgi,

waitForMetastoreTokenInit();

HadoopThriftAuthBridge20S.Server.authenticationMethod
HadoopThriftAuthBridge.Server.authenticationMethod
.set(AuthenticationMethod.KERBEROS);
HadoopThriftAuthBridge20S.Server.remoteAddress.set(InetAddress.getLocalHost());
HadoopThriftAuthBridge.Server.remoteAddress.set(InetAddress.getLocalHost());
return
HiveMetaStore.getDelegationToken(ownerUgi.getShortUserName(),
realUgi.getShortUserName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.WindowsPathUtil;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.junit.After;
Expand Down Expand Up @@ -90,7 +91,7 @@ public void setUp() throws Exception {
clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");

ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
ugi = Utils.getUGIForConf(clientHiveConf);

SessionState.start(new CliSessionState(clientHiveConf));
msc = new HiveMetaStoreClient(clientHiveConf, null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;

/**
Expand Down Expand Up @@ -82,7 +83,7 @@ protected void setUp() throws Exception {
clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");

ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
ugi = Utils.getUGIForConf(clientHiveConf);

SessionState.start(new CliSessionState(clientHiveConf));
msc = new HiveMetaStoreClient(clientHiveConf, null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;

/**
Expand Down Expand Up @@ -109,7 +110,7 @@ protected void setUp() throws Exception {
clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");

ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
ugi = Utils.getUGIForConf(clientHiveConf);

SessionState.start(new CliSessionState(clientHiveConf));
msc = new HiveMetaStoreClient(clientHiveConf, null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
import org.apache.hadoop.hive.shims.Utils;
import org.junit.Assert;
import org.junit.Test;

Expand All @@ -42,7 +43,7 @@ protected HiveConf createHiveConf() throws Exception {
// Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
HiveConf conf = super.createHiveConf();

String currentUserName = ShimLoader.getHadoopShims().getUGIForConf(conf).getShortUserName();
String currentUserName = Utils.getUGIForConf(conf).getShortUserName();
conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");
dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,17 @@ protected void tearDown() throws Exception {

private Configuration createConf(String zkPath) {
Configuration conf = new Configuration();
conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
+ this.zkPort);
conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
return conf;
}

public void testTokenStorage() throws Exception {
String ZK_PATH = "/zktokenstore-testTokenStorage";
ts = new ZooKeeperTokenStore();
Configuration conf = createConf(ZK_PATH);
conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
ts.setConf(conf);
ts.init(null, ServerMode.METASTORE);

Expand Down Expand Up @@ -128,7 +128,7 @@ public void testAclNoAuth() throws Exception {
String ZK_PATH = "/zktokenstore-testAclNoAuth";
Configuration conf = createConf(ZK_PATH);
conf.set(
HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
"ip:127.0.0.1:r");

ts = new ZooKeeperTokenStore();
Expand All @@ -146,7 +146,7 @@ public void testAclInvalid() throws Exception {
String aclString = "sasl:hive/[email protected]:cdrwa, fail-parse-ignored";
Configuration conf = createConf(ZK_PATH);
conf.set(
HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
aclString);

List<ACL> aclList = ZooKeeperTokenStore.parseACLs(aclString);
Expand All @@ -166,7 +166,7 @@ public void testAclPositive() throws Exception {
String ZK_PATH = "/zktokenstore-testAcl";
Configuration conf = createConf(ZK_PATH);
conf.set(
HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
"ip:127.0.0.1:cdrwa,world:anyone:cdrwa");
ts = new ZooKeeperTokenStore();
ts.setConf(conf);
Expand Down
15 changes: 0 additions & 15 deletions itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -125,12 +125,10 @@ public class QTestUtil {
protected HiveConf conf;
private Driver drv;
private BaseSemanticAnalyzer sem;
private FileSystem fs;
protected final boolean overWrite;
private CliDriver cliDriver;
private HadoopShims.MiniMrShim mr = null;
private HadoopShims.MiniDFSShim dfs = null;
private boolean miniMr = false;
private String hadoopVer = null;
private QTestSetup setup = null;
private boolean isSessionStateStarted = false;
Expand Down Expand Up @@ -309,7 +307,6 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType,
System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
}
conf = new HiveConf(Driver.class);
this.miniMr = (clusterType == MiniClusterType.mr);
this.hadoopVer = getHadoopMainVersion(hadoopVer);
qMap = new TreeMap<String, String>();
qSkipSet = new HashSet<String>();
Expand Down Expand Up @@ -651,17 +648,6 @@ public void cleanUp() throws Exception {
FunctionRegistry.unregisterTemporaryUDF("test_error");
}

private void runLoadCmd(String loadCmd) throws Exception {
int ecode = 0;
ecode = drv.run(loadCmd).getResponseCode();
drv.close();
if (ecode != 0) {
throw new Exception("load command: " + loadCmd
+ " failed with exit code= " + ecode);
}
return;
}

protected void runCreateTableCmd(String createTableCmd) throws Exception {
int ecode = 0;
ecode = drv.run(createTableCmd).getResponseCode();
Expand Down Expand Up @@ -712,7 +698,6 @@ public void init() throws Exception {
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
fs = FileSystem.get(conf);
drv = new Driver(conf);
drv.init();
pd = new ParseDriver();
Expand Down
3 changes: 1 addition & 2 deletions jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
Original file line number Diff line number Diff line change
Expand Up @@ -414,8 +414,7 @@ private String getClientDelegationToken(Map<String, String> jdbcConnConf)
if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
// check delegation token in job conf if any
try {
tokenStr = ShimLoader.getHadoopShims().
getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
} catch (IOException e) {
throw new SQLException("Error reading token ", e);
}
Expand Down
Loading

0 comments on commit 2e93b0c

Please sign in to comment.