Skip to content

Commit

Permalink
HADOOP-13597. Switch KMS from Tomcat to Jetty. Contributed by John Zh…
Browse files Browse the repository at this point in the history
…uge.
  • Loading branch information
xiao-chen committed Jan 6, 2017
1 parent 8850c05 commit 5d18294
Show file tree
Hide file tree
Showing 31 changed files with 1,143 additions and 1,152 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,14 @@
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<!-- Jar file -->
<fileSet>
<directory>target</directory>
<outputDirectory>/share/hadoop/common</outputDirectory>
<includes>
<include>${project.artifactId}-${project.version}.jar</include>
</includes>
</fileSet>
<!-- Configuration files -->
<fileSet>
<directory>${basedir}/src/main/conf</directory>
Expand All @@ -41,7 +49,7 @@
<directory>${basedir}/src/main/libexec</directory>
<outputDirectory>/libexec</outputDirectory>
<includes>
<include>*</include>
<include>**/*</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
Expand All @@ -51,4 +59,19 @@
<outputDirectory>/share/doc/hadoop/kms</outputDirectory>
</fileSet>
</fileSets>
<dependencySets>
<dependencySet>
<useProjectArtifact>false</useProjectArtifact>
<outputDirectory>/share/hadoop/common/lib</outputDirectory>
<!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
<excludes>
<exclude>org.apache.hadoop:hadoop-common</exclude>
<exclude>org.apache.hadoop:hadoop-hdfs</exclude>
<!-- use slf4j from common to avoid multiple binding warnings -->
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>org.hsqldb:hsqldb</exclude>
</excludes>
</dependencySet>
</dependencySets>
</assembly>
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ public class AuthenticationFilter implements Filter {
private String cookieDomain;
private String cookiePath;
private boolean isCookiePersistent;
private boolean isInitializedByTomcat;
private boolean destroySecretProvider;

/**
* <p>Initializes the authentication filter and signer secret provider.</p>
Expand Down Expand Up @@ -209,7 +209,7 @@ protected void initializeSecretProvider(FilterConfig filterConfig)
secretProvider = constructSecretProvider(
filterConfig.getServletContext(),
config, false);
isInitializedByTomcat = true;
destroySecretProvider = true;
} catch (Exception ex) {
throw new ServletException(ex);
}
Expand Down Expand Up @@ -356,7 +356,7 @@ public void destroy() {
authHandler.destroy();
authHandler = null;
}
if (secretProvider != null && isInitializedByTomcat) {
if (secretProvider != null && destroySecretProvider) {
secretProvider.destroy();
secretProvider = null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,39 @@ function hadoop_deprecate_envvar
fi
}

## @description Declare `var` being used and print its value.
## @audience public
## @stability stable
## @replaceable yes
## @param var
function hadoop_using_envvar
{
local var=$1
local val=${!var}

if [[ -n "${val}" ]]; then
hadoop_debug "${var} = ${val}"
fi
}

## @description Create the directory 'dir'.
## @audience public
## @stability stable
## @replaceable yes
## @param dir
function hadoop_mkdir
{
local dir=$1

if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
hadoop_error "WARNING: ${dir} does not exist. Creating."
if ! mkdir -p "${dir}"; then
hadoop_error "ERROR: Unable to create ${dir}. Aborting."
exit 1
fi
fi
}

## @description Bootstraps the Hadoop shell environment
## @audience private
## @stability evolving
Expand Down Expand Up @@ -1396,14 +1429,7 @@ function hadoop_verify_piddir
hadoop_error "No pid directory defined."
exit 1
fi
if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
exit 1
fi
fi
hadoop_mkdir "${HADOOP_PID_DIR}"
touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
Expand All @@ -1421,14 +1447,7 @@ function hadoop_verify_logdir
hadoop_error "No log directory defined."
exit 1
fi
if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
exit 1
fi
fi
hadoop_mkdir "${HADOOP_LOG_DIR}"
touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
if [[ $? -gt 0 ]]; then
hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.conf;

import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Logs access to {@link Configuration}.
* Sensitive data will be redacted.
*/
@InterfaceAudience.Private
public class ConfigurationWithLogging extends Configuration {
private static final Logger LOG =
LoggerFactory.getLogger(ConfigurationWithLogging.class);

private final Logger log;
private final ConfigRedactor redactor;

public ConfigurationWithLogging(Configuration conf) {
super(conf);
log = LOG;
redactor = new ConfigRedactor(conf);
}

/**
* @see Configuration#get(String).
*/
@Override
public String get(String name) {
String value = super.get(name);
log.info("Got {} = '{}'", name, redactor.redact(name, value));
return value;
}

/**
* @see Configuration#get(String, String).
*/
@Override
public String get(String name, String defaultValue) {
String value = super.get(name, defaultValue);
log.info("Got {} = '{}' (default '{}')", name,
redactor.redact(name, value), redactor.redact(name, defaultValue));
return value;
}

/**
* @see Configuration#getBoolean(String, boolean).
*/
@Override
public boolean getBoolean(String name, boolean defaultValue) {
boolean value = super.getBoolean(name, defaultValue);
log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
return value;
}

/**
* @see Configuration#getFloat(String, float).
*/
@Override
public float getFloat(String name, float defaultValue) {
float value = super.getFloat(name, defaultValue);
log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
return value;
}

/**
* @see Configuration#getInt(String, int).
*/
@Override
public int getInt(String name, int defaultValue) {
int value = super.getInt(name, defaultValue);
log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
return value;
}

/**
* @see Configuration#getLong(String, long).
*/
@Override
public long getLong(String name, long defaultValue) {
long value = super.getLong(name, defaultValue);
log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
return value;
}

/**
* @see Configuration#set(String, String, String).
*/
@Override
public void set(String name, String value, String source) {
log.info("Set {} to '{}'{}", name, redactor.redact(name, value),
source == null ? "" : " from " + source);
super.set(name, value, source);
}
}
Loading

0 comments on commit 5d18294

Please sign in to comment.