Skip to content

Commit

Permalink
HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
Browse files Browse the repository at this point in the history
  • Loading branch information
oza committed Mar 3, 2015
1 parent b442aee commit d1c6acc
Show file tree
Hide file tree
Showing 106 changed files with 407 additions and 224 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Locale;

class StabilityOptions {
public static final String STABLE_OPTION = "-stable";
public static final String EVOLVING_OPTION = "-evolving";
public static final String UNSTABLE_OPTION = "-unstable";

public static Integer optionLength(String option) {
String opt = option.toLowerCase();
String opt = option.toLowerCase(Locale.ENGLISH);
if (opt.equals(UNSTABLE_OPTION)) return 1;
if (opt.equals(EVOLVING_OPTION)) return 1;
if (opt.equals(STABLE_OPTION)) return 1;
Expand All @@ -38,7 +39,7 @@ public static Integer optionLength(String option) {
public static void validOptions(String[][] options,
DocErrorReporter reporter) {
for (int i = 0; i < options.length; i++) {
String opt = options[i][0].toLowerCase();
String opt = options[i][0].toLowerCase(Locale.ENGLISH);
if (opt.equals(UNSTABLE_OPTION)) {
RootDocProcessor.stability = UNSTABLE_OPTION;
} else if (opt.equals(EVOLVING_OPTION)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
package org.apache.hadoop.security.authentication.server;

import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
Expand Down Expand Up @@ -68,7 +69,8 @@ public void init(Properties config) throws ServletException {
NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
.split("\\W*,\\W*");
for (int i = 0; i < nonBrowserUserAgents.length; i++) {
nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
nonBrowserUserAgents[i] =
nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
}
}

Expand Down Expand Up @@ -120,7 +122,7 @@ protected boolean isBrowser(String userAgent) {
if (userAgent == null) {
return false;
}
userAgent = userAgent.toLowerCase();
userAgent = userAgent.toLowerCase(Locale.ENGLISH);
boolean isBrowser = true;
for (String nonBrowserUserAgent : nonBrowserUserAgents) {
if (userAgent.contains(nonBrowserUserAgent)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;

import org.apache.directory.server.kerberos.shared.keytab.Keytab;
Expand Down Expand Up @@ -58,24 +59,25 @@ public void testGetServerPrincipal() throws IOException {

// send null hostname
Assert.assertEquals("When no hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, null));
// send empty hostname
Assert.assertEquals("When empty hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, ""));
// send 0.0.0.0 hostname
Assert.assertEquals("When 0.0.0.0 hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
// send uppercase hostname
Assert.assertEquals("When uppercase hostname is sent",
service + "/" + testHost.toLowerCase(),
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, testHost));
// send lowercase hostname
Assert.assertEquals("When lowercase hostname is sent",
service + "/" + testHost.toLowerCase(),
KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(
service, testHost.toLowerCase(Locale.ENGLISH)));
}

@Test
Expand Down
2 changes: 2 additions & 0 deletions hadoop-common-project/hadoop-common/CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,8 @@ Trunk (Unreleased)
HADOOP-10774. Update KerberosTestUtils for hadoop-auth tests when using
IBM Java (sangamesh via aw)

HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)

OPTIMIZATIONS

HADOOP-7761. Improve the performance of raw comparisons. (todd)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1451,11 +1451,9 @@ public boolean getBoolean(String name, boolean defaultValue) {
return defaultValue;
}

valueString = valueString.toLowerCase();

if ("true".equals(valueString))
if (StringUtils.equalsIgnoreCase("true", valueString))
return true;
else if ("false".equals(valueString))
else if (StringUtils.equalsIgnoreCase("false", valueString))
return false;
else return defaultValue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.crypto;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.StringUtils;

/**
* Defines properties of a CipherSuite. Modeled after the ciphers in
Expand Down Expand Up @@ -97,7 +98,7 @@ public String getConfigSuffix() {
String[] parts = name.split("/");
StringBuilder suffix = new StringBuilder();
for (String part : parts) {
suffix.append(".").append(part.toLowerCase());
suffix.append(".").append(StringUtils.toLowerCase(part));
}

return suffix.toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -422,7 +423,7 @@ public Metadata getMetadata(String name) throws IOException {
@Override
public KeyVersion createKey(String name, byte[] material,
Options options) throws IOException {
Preconditions.checkArgument(name.equals(name.toLowerCase()),
Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)),
"Uppercase key names are unsupported: %s", name);
writeLock.lock();
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.util.StringUtils;

import com.google.common.annotations.VisibleForTesting;

Expand Down Expand Up @@ -2795,8 +2796,10 @@ static class Key {
}

Key(URI uri, Configuration conf, long unique) throws IOException {
scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
scheme = uri.getScheme()==null ?
"" : StringUtils.toLowerCase(uri.getScheme());
authority = uri.getAuthority()==null ?
"" : StringUtils.toLowerCase(uri.getAuthority());
this.unique = unique;

this.ugi = UserGroupInformation.getCurrentUser();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.StringUtils;

/**
* Defines the types of supported storage media. The default storage
Expand Down Expand Up @@ -78,7 +79,7 @@ public static StorageType parseStorageType(int i) {
}

public static StorageType parseStorageType(String s) {
return StorageType.valueOf(s.toUpperCase());
return StorageType.valueOf(StringUtils.toUpperCase(s));
}

private static List<StorageType> getNonTransientTypes() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public String toString() {
sb.append("default:");
}
if (type != null) {
sb.append(type.toString().toLowerCase());
sb.append(StringUtils.toLowerCase(type.toString()));
}
sb.append(':');
if (name != null) {
Expand Down Expand Up @@ -263,7 +263,8 @@ public static AclEntry parseAclEntry(String aclStr,

AclEntryType aclType = null;
try {
aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
aclType = Enum.valueOf(
AclEntryType.class, StringUtils.toUpperCase(split[index]));
builder.setType(aclType);
index++;
} catch (IllegalArgumentException iae) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ protected void processOptions(LinkedList<String> args) throws IOException {
String en = StringUtils.popOptionWithArgument("-e", args);
if (en != null) {
try {
encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH));
encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid/unsupported encoding option specified: " + en);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import org.apache.hadoop.fs.GlobPattern;
import org.apache.hadoop.fs.shell.PathData;
import org.apache.hadoop.util.StringUtils;

/**
* Implements the -name expression for the
Expand Down Expand Up @@ -73,7 +74,7 @@ public void addArguments(Deque<String> args) {
public void prepare() throws IOException {
String argPattern = getArgument(1);
if (!caseSensitive) {
argPattern = argPattern.toLowerCase();
argPattern = StringUtils.toLowerCase(argPattern);
}
globPattern = new GlobPattern(argPattern);
}
Expand All @@ -82,7 +83,7 @@ public void prepare() throws IOException {
public Result apply(PathData item, int depth) throws IOException {
String name = getPath(item).getName();
if (!caseSensitive) {
name = name.toLowerCase();
name = StringUtils.toLowerCase(name);
}
if (globPattern.matches(name)) {
return Result.PASS;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

/**
* A factory that will find the correct codec for a given filename.
Expand Down Expand Up @@ -66,10 +67,10 @@ private void addCodec(CompressionCodec codec) {
codecsByClassName.put(codec.getClass().getCanonicalName(), codec);

String codecName = codec.getClass().getSimpleName();
codecsByName.put(codecName.toLowerCase(), codec);
codecsByName.put(StringUtils.toLowerCase(codecName), codec);
if (codecName.endsWith("Codec")) {
codecName = codecName.substring(0, codecName.length() - "Codec".length());
codecsByName.put(codecName.toLowerCase(), codec);
codecsByName.put(StringUtils.toLowerCase(codecName), codec);
}
}

Expand Down Expand Up @@ -246,7 +247,7 @@ public CompressionCodec getCodecByName(String codecName) {
if (codec == null) {
// trying to get the codec by name in case the name was specified
// instead a class
codec = codecsByName.get(codecName.toLowerCase());
codec = codecsByName.get(StringUtils.toLowerCase(codecName));
}
return codec;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsPlugin;
import org.apache.hadoop.metrics2.filter.GlobFilter;
import org.apache.hadoop.util.StringUtils;

/**
* Metrics configuration for MetricsSystemImpl
Expand Down Expand Up @@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration {
private ClassLoader pluginLoader;

MetricsConfig(Configuration c, String prefix) {
super(c, prefix.toLowerCase(Locale.US), ".");
super(c, StringUtils.toLowerCase(prefix), ".");
}

static MetricsConfig create(String prefix) {
return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
+".properties", DEFAULT_FILE_NAME);
return loadFirst(prefix, "hadoop-metrics2-" +
StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME);
}

static MetricsConfig create(String prefix, String... fileNames) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
import org.apache.hadoop.metrics2.lib.MutableStat;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;

/**
Expand Down Expand Up @@ -616,7 +617,7 @@ private InitMode initMode() {
LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY));
String m = System.getProperty(MS_INIT_MODE_KEY);
String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
.toUpperCase(Locale.US));
return InitMode.valueOf(
StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,8 @@ public void setConf(Configuration conf) {
CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION,
QualityOfProtection.AUTHENTICATION.toString());
for (int i=0; i < qop.length; i++) {
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
qop[i] = QualityOfProtection.valueOf(
StringUtils.toUpperCase(qop[i])).getSaslQop();
}
properties.put(Sasl.QOP, StringUtils.join(",", qop));
properties.put(Sasl.SERVER_AUTH, "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.ServiceLoader;

import javax.security.auth.kerberos.KerberosPrincipal;
Expand All @@ -44,6 +43,7 @@
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo;
import org.apache.hadoop.util.StringUtils;


//this will need to be replaced someday when there is a suitable replacement
Expand Down Expand Up @@ -182,7 +182,8 @@ private static String replacePattern(String[] components, String hostname)
if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
fqdn = getLocalHostName();
}
return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2];
return components[0] + "/" +
StringUtils.toLowerCase(fqdn) + "@" + components[2];
}

static String getLocalHostName() throws UnknownHostException {
Expand Down Expand Up @@ -379,7 +380,7 @@ public static Text buildTokenService(InetSocketAddress addr) {
}
host = addr.getAddress().getHostAddress();
} else {
host = addr.getHostName().toLowerCase();
host = StringUtils.toLowerCase(addr.getHostName());
}
return new Text(host + ":" + addr.getPort());
}
Expand Down Expand Up @@ -606,7 +607,8 @@ void setSearchDomains(String ... domains) {
public static AuthenticationMethod getAuthenticationMethod(Configuration conf) {
String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
try {
return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH));
return Enum.valueOf(AuthenticationMethod.class,
StringUtils.toUpperCase(value));
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("Invalid attribute value for " +
HADOOP_SECURITY_AUTHENTICATION + " of " + value);
Expand All @@ -619,7 +621,7 @@ public static void setAuthenticationMethod(
authenticationMethod = AuthenticationMethod.SIMPLE;
}
conf.set(HADOOP_SECURITY_AUTHENTICATION,
authenticationMethod.toString().toLowerCase(Locale.ENGLISH));
StringUtils.toLowerCase(authenticationMethod.toString()));
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,8 @@ static Map<String, String> getSaslProperties(Configuration conf) {
QualityOfProtection.PRIVACY.toString());

for (int i=0; i < qop.length; i++) {
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
qop[i] = QualityOfProtection.valueOf(
StringUtils.toUpperCase(qop[i])).getSaslQop();
}

saslProps.put(Sasl.QOP, StringUtils.join(",", qop));
Expand Down
Loading

0 comments on commit d1c6acc

Please sign in to comment.