Skip to content

Commit 97c5a6e

Browse files
authored
HADOOP-19041. Use StandardCharsets in more places (apache#6449)
1 parent 347521c commit 97c5a6e

File tree

62 files changed

+166
-151
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

62 files changed

+166
-151
lines changed

hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

+3-8
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,11 @@
2121

2222
import java.io.File;
2323
import java.io.IOException;
24-
import java.io.UnsupportedEncodingException;
2524
import java.lang.reflect.InvocationTargetException;
2625
import java.net.InetAddress;
2726
import java.net.UnknownHostException;
2827
import java.nio.ByteBuffer;
29-
import java.nio.charset.IllegalCharsetNameException;
28+
import java.nio.charset.StandardCharsets;
3029
import java.util.ArrayList;
3130
import java.util.HashSet;
3231
import java.util.Iterator;
@@ -426,12 +425,8 @@ DER get(int... tags) {
426425
}
427426

428427
String getAsString() {
429-
try {
430-
return new String(bb.array(), bb.arrayOffset() + bb.position(),
431-
bb.remaining(), "UTF-8");
432-
} catch (UnsupportedEncodingException e) {
433-
throw new IllegalCharsetNameException("UTF-8"); // won't happen.
434-
}
428+
return new String(bb.array(), bb.arrayOffset() + bb.position(),
429+
bb.remaining(), StandardCharsets.UTF_8);
435430
}
436431

437432
@Override

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -3565,7 +3565,7 @@ private void checkForOverride(Properties properties, String name, String attr, S
35653565
* @throws IOException raised on errors performing I/O.
35663566
*/
35673567
public void writeXml(OutputStream out) throws IOException {
3568-
writeXml(new OutputStreamWriter(out, "UTF-8"));
3568+
writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8));
35693569
}
35703570

35713571
public void writeXml(Writer out) throws IOException {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.hadoop.fs;
1919

2020
import java.io.IOException;
21+
import java.nio.charset.StandardCharsets;
2122

2223
import org.apache.commons.codec.DecoderException;
2324
import org.apache.commons.codec.binary.Base64;
@@ -76,7 +77,7 @@ public static byte[] decodeValue(String value) throws IOException {
7677
String en = value.substring(0, 2);
7778
if (value.startsWith("\"") && value.endsWith("\"")) {
7879
value = value.substring(1, value.length()-1);
79-
result = value.getBytes("utf-8");
80+
result = value.getBytes(StandardCharsets.UTF_8);
8081
} else if (en.equalsIgnoreCase(HEX_PREFIX)) {
8182
value = value.substring(2, value.length());
8283
try {
@@ -90,7 +91,7 @@ public static byte[] decodeValue(String value) throws IOException {
9091
}
9192
}
9293
if (result == null) {
93-
result = value.getBytes("utf-8");
94+
result = value.getBytes(StandardCharsets.UTF_8);
9495
}
9596
}
9697
return result;
@@ -114,7 +115,7 @@ public static String encodeValue(byte[] value, XAttrCodec encoding)
114115
} else if (encoding == BASE64) {
115116
return BASE64_PREFIX + base64.encodeToString(value);
116117
} else {
117-
return "\"" + new String(value, "utf-8") + "\"";
118+
return "\"" + new String(value, StandardCharsets.UTF_8) + "\"";
118119
}
119120
}
120121
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -387,7 +387,7 @@ public void testMultiByteCharacters() throws IOException {
387387
String name = "multi_byte_\u611b_name";
388388
String value = "multi_byte_\u0641_value";
389389
out = new BufferedWriter(new OutputStreamWriter(
390-
new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8"));
390+
new FileOutputStream(CONFIG_MULTI_BYTE), StandardCharsets.UTF_8));
391391
startConfig();
392392
declareProperty(name, value, value);
393393
endConfig();

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232

3333
import java.io.*;
3434
import java.net.URI;
35+
import java.nio.charset.StandardCharsets;
3536
import java.util.Arrays;
3637
import java.util.Collection;
3738
import java.util.EnumSet;
@@ -673,7 +674,7 @@ public void testFSOutputStreamBuilder() throws Exception {
673674
fileSys.createFile(path).recursive();
674675
FSDataOutputStream out = builder.build();
675676
String content = "Create with a generic type of createFile!";
676-
byte[] contentOrigin = content.getBytes("UTF8");
677+
byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
677678
out.write(contentOrigin);
678679
out.close();
679680

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ static URL createURL(Path path, Map<String, String> params, Map<String,
106106
if (multiValuedParams != null) {
107107
for (Map.Entry<String, List<String>> multiValuedEntry :
108108
multiValuedParams.entrySet()) {
109-
String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF8");
109+
String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF-8");
110110
List<String> values = multiValuedEntry.getValue();
111111
for (String value : values) {
112112
sb.append(separator).append(name).append("=").

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import java.io.InputStreamReader;
2727
import java.io.OutputStream;
2828
import java.io.OutputStreamWriter;
29+
import java.nio.charset.StandardCharsets;
2930
import java.util.ArrayList;
3031
import java.util.Base64;
3132
import java.util.Iterator;
@@ -148,7 +149,7 @@ TextWriter createWriter(Path file, CompressionCodec codec, String delim,
148149
}
149150
OutputStream tmp = fs.create(file);
150151
java.io.Writer out = new BufferedWriter(new OutputStreamWriter(
151-
(null == codec) ? tmp : codec.createOutputStream(tmp), "UTF-8"));
152+
(null == codec) ? tmp : codec.createOutputStream(tmp), StandardCharsets.UTF_8));
152153
return new TextWriter(out, delim);
153154
}
154155

@@ -379,7 +380,7 @@ public Iterator<FileRegion> iterator() {
379380
FRIterator i = new FRIterator();
380381
try {
381382
BufferedReader r =
382-
new BufferedReader(new InputStreamReader(createStream(), "UTF-8"));
383+
new BufferedReader(new InputStreamReader(createStream(), StandardCharsets.UTF_8));
383384
iterators.put(i, r);
384385
i.pending = nextInternal(i);
385386
} catch (IOException e) {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
import java.io.OutputStreamWriter;
3030
import java.io.Writer;
3131
import java.nio.ByteBuffer;
32+
import java.nio.charset.StandardCharsets;
3233
import java.util.ArrayDeque;
3334
import java.util.Arrays;
3435
import java.util.Queue;
@@ -1063,7 +1064,7 @@ void receiveBlock(
10631064
// send a special ack upstream.
10641065
if (datanode.isRestarting() && isClient && !isTransfer) {
10651066
try (Writer out = new OutputStreamWriter(
1066-
replicaInfo.createRestartMetaStream(), "UTF-8")) {
1067+
replicaInfo.createRestartMetaStream(), StandardCharsets.UTF_8)) {
10671068
// write out the current time.
10681069
out.write(Long.toString(Time.now() + restartBudget));
10691070
out.flush();

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import java.io.OutputStreamWriter;
2929
import java.io.RandomAccessFile;
3030
import java.io.Writer;
31+
import java.nio.charset.StandardCharsets;
3132
import java.nio.file.Files;
3233
import java.util.ArrayList;
3334
import java.util.Arrays;
@@ -399,7 +400,7 @@ void saveDfsUsed() {
399400
try {
400401
long used = getDfsUsed();
401402
try (Writer out = new OutputStreamWriter(
402-
Files.newOutputStream(outFile.toPath()), "UTF-8")) {
403+
Files.newOutputStream(outFile.toPath()), StandardCharsets.UTF_8)) {
403404
// mtime is written last, so that truncated writes won't be valid.
404405
out.write(Long.toString(used) + " " + Long.toString(timer.now()));
405406
// This is only called as part of the volume shutdown.

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import java.io.RandomAccessFile;
2828
import java.net.URI;
2929
import java.nio.channels.ClosedChannelException;
30+
import java.nio.charset.StandardCharsets;
3031
import java.nio.file.Paths;
3132
import java.nio.file.StandardCopyOption;
3233
import java.util.Collection;
@@ -929,7 +930,7 @@ public void save() throws IOException {
929930
boolean success = false;
930931
try (BufferedWriter writer = new BufferedWriter(
931932
new OutputStreamWriter(fileIoProvider.getFileOutputStream(
932-
FsVolumeImpl.this, getTempSaveFile()), "UTF-8"))) {
933+
FsVolumeImpl.this, getTempSaveFile()), StandardCharsets.UTF_8))) {
933934
WRITER.writeValue(writer, state);
934935
success = true;
935936
} finally {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@
6666
import java.io.InputStreamReader;
6767
import java.net.InetSocketAddress;
6868
import java.net.URI;
69+
import java.nio.charset.StandardCharsets;
6970
import java.text.DateFormat;
7071
import java.util.*;
7172
import java.util.concurrent.TimeUnit;
@@ -740,7 +741,7 @@ private static Options buildCliOptions() {
740741
private static String[] readPathFile(String file) throws IOException {
741742
List<String> list = Lists.newArrayList();
742743
BufferedReader reader = new BufferedReader(
743-
new InputStreamReader(new FileInputStream(file), "UTF-8"));
744+
new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8));
744745
try {
745746
String line;
746747
while ((line = reader.readLine()) != null) {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import java.net.URL;
2727
import java.net.URLConnection;
2828
import java.net.URLEncoder;
29+
import java.nio.charset.StandardCharsets;
2930
import java.security.PrivilegedExceptionAction;
3031
import java.util.concurrent.TimeUnit;
3132

@@ -207,7 +208,7 @@ private Integer listCorruptFileBlocks(String dir, String baseUrl)
207208
}
208209
InputStream stream = connection.getInputStream();
209210
BufferedReader input = new BufferedReader(new InputStreamReader(
210-
stream, "UTF-8"));
211+
stream, StandardCharsets.UTF_8));
211212
try {
212213
String line = null;
213214
while ((line = input.readLine()) != null) {
@@ -376,7 +377,7 @@ else if (args[idx].equals("-replicaDetails")) {
376377
}
377378
InputStream stream = connection.getInputStream();
378379
BufferedReader input = new BufferedReader(new InputStreamReader(
379-
stream, "UTF-8"));
380+
stream, StandardCharsets.UTF_8));
380381
String line = null;
381382
String lastLine = NamenodeFsck.CORRUPT_STATUS;
382383
int errCode = -1;

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java

+7-7
Original file line numberDiff line numberDiff line change
@@ -622,7 +622,7 @@ private INodeSection.INode.Builder processINodeXml(Node node)
622622
inodeBld.setId(id);
623623
String name = node.removeChildStr(SECTION_NAME);
624624
if (name != null) {
625-
inodeBld.setName(ByteString.copyFrom(name, "UTF8"));
625+
inodeBld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
626626
}
627627
switch (type) {
628628
case "FILE":
@@ -838,7 +838,7 @@ private void processSymlinkXml(Node node,
838838
}
839839
String target = node.removeChildStr(INODE_SECTION_TARGET);
840840
if (target != null) {
841-
bld.setTarget(ByteString.copyFrom(target, "UTF8"));
841+
bld.setTarget(ByteString.copyFrom(target, StandardCharsets.UTF_8));
842842
}
843843
Long lval = node.removeChildLong(INODE_SECTION_MTIME);
844844
if (lval != null) {
@@ -900,7 +900,7 @@ private INodeSection.XAttrFeatureProto.Builder xattrsXmlToProto(Node xattrs)
900900
}
901901
val = new HexBinaryAdapter().unmarshal(valHex);
902902
} else {
903-
val = valStr.getBytes("UTF8");
903+
val = valStr.getBytes(StandardCharsets.UTF_8);
904904
}
905905
b.setValue(ByteString.copyFrom(val));
906906

@@ -1232,7 +1232,7 @@ public void process() throws IOException {
12321232
}
12331233
String name = inodeRef.removeChildStr("name");
12341234
if (name != null) {
1235-
bld.setName(ByteString.copyFrom(name, "UTF8"));
1235+
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
12361236
}
12371237
Integer dstSnapshotId = inodeRef.removeChildInt(
12381238
INODE_REFERENCE_SECTION_DST_SNAPSHOT_ID);
@@ -1468,7 +1468,7 @@ private void processDirDiffEntry() throws IOException {
14681468
bld.setChildrenSize(childrenSize);
14691469
String name = dirDiff.removeChildStr(SECTION_NAME);
14701470
if (name != null) {
1471-
bld.setName(ByteString.copyFrom(name, "UTF8"));
1471+
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
14721472
}
14731473
Node snapshotCopy = dirDiff.removeChild(
14741474
SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
@@ -1514,7 +1514,7 @@ private void processDirDiffEntry() throws IOException {
15141514
}
15151515
created.verifyNoRemainingKeys("created");
15161516
FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder().
1517-
setName(ByteString.copyFrom(cleName, "UTF8")).
1517+
setName(ByteString.copyFrom(cleName, StandardCharsets.UTF_8)).
15181518
build().writeDelimitedTo(out);
15191519
actualCreatedListSize++;
15201520
}
@@ -1571,7 +1571,7 @@ private void processFileDiffEntry() throws IOException {
15711571
}
15721572
String name = fileDiff.removeChildStr(SECTION_NAME);
15731573
if (name != null) {
1574-
bld.setName(ByteString.copyFrom(name, "UTF8"));
1574+
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
15751575
}
15761576
Node snapshotCopy = fileDiff.removeChild(
15771577
SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.hadoop.hdfs;
1919

2020
import java.io.*;
21+
import java.nio.charset.StandardCharsets;
2122
import java.security.Permission;
2223
import java.security.PrivilegedExceptionAction;
2324
import java.text.SimpleDateFormat;
@@ -1917,7 +1918,7 @@ private static void corrupt(
19171918
char c = content.charAt(0);
19181919
sb.setCharAt(0, ++c);
19191920
for(MaterializedReplica replica : replicas) {
1920-
replica.corruptData(sb.toString().getBytes("UTF8"));
1921+
replica.corruptData(sb.toString().getBytes(StandardCharsets.UTF_8));
19211922
}
19221923
}
19231924

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
import java.net.ServerSocket;
4646
import java.net.SocketTimeoutException;
4747
import java.net.URI;
48+
import java.nio.charset.StandardCharsets;
4849
import java.security.NoSuchAlgorithmException;
4950
import java.security.PrivilegedExceptionAction;
5051
import java.util.ArrayList;
@@ -1885,7 +1886,7 @@ public void testDFSDataOutputStreamBuilderForCreation() throws Exception {
18851886
.replication((short) 1)
18861887
.blockSize(4096)
18871888
.build()) {
1888-
byte[] contentOrigin = content.getBytes("UTF8");
1889+
byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
18891890
out1.write(contentOrigin);
18901891
}
18911892

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -194,10 +194,10 @@ private static void writeFile(FileSystem fs, Path path, byte[] buffer,
194194
*/
195195
public static Configuration initializeStorageStateConf(int numDirs,
196196
Configuration conf) {
197-
StringBuffer nameNodeDirs =
198-
new StringBuffer(new File(TEST_ROOT_DIR, "name1").toString());
199-
StringBuffer dataNodeDirs =
200-
new StringBuffer(new File(TEST_ROOT_DIR, "data1").toString());
197+
StringBuilder nameNodeDirs =
198+
new StringBuilder(new File(TEST_ROOT_DIR, "name1").toString());
199+
StringBuilder dataNodeDirs =
200+
new StringBuilder(new File(TEST_ROOT_DIR, "data1").toString());
201201
for (int i = 2; i <= numDirs; i++) {
202202
nameNodeDirs.append("," + new File(TEST_ROOT_DIR, "name"+i));
203203
dataNodeDirs.append("," + new File(TEST_ROOT_DIR, "data"+i));

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import java.io.Writer;
3434
import java.net.URI;
3535
import java.net.URISyntaxException;
36+
import java.nio.charset.StandardCharsets;
3637
import java.util.ArrayList;
3738
import java.util.Collection;
3839
import java.util.HashMap;
@@ -147,7 +148,7 @@ public FileRegion next() {
147148
newFile.getAbsolutePath());
148149
newFile.createNewFile();
149150
Writer writer = new OutputStreamWriter(
150-
new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
151+
new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8);
151152
for(int i=0; i< BLK_LEN/(Integer.SIZE/8); i++) {
152153
writer.write(currentCount);
153154
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
import static org.mockito.Mockito.mock;
4040

4141
import java.io.IOException;
42+
import java.nio.charset.StandardCharsets;
4243
import java.util.Arrays;
4344
import java.util.function.LongFunction;
4445

@@ -433,7 +434,7 @@ private static INodeDirectory createINodeDirectory(INodeDirectory parent,
433434
PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
434435
FsPermission.createImmutable(perm));
435436
INodeDirectory inodeDirectory = new INodeDirectory(
436-
HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes("UTF-8"), permStatus, 0L);
437+
HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes(StandardCharsets.UTF_8), permStatus, 0L);
437438
parent.addChild(inodeDirectory);
438439
return inodeDirectory;
439440
}
@@ -443,8 +444,8 @@ private static INodeFile createINodeFile(INodeDirectory parent, String name,
443444
PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
444445
FsPermission.createImmutable(perm));
445446
INodeFile inodeFile = new INodeFile(HdfsConstants.GRANDFATHER_INODE_ID,
446-
name.getBytes("UTF-8"), permStatus, 0L, 0L, null, REPLICATION,
447-
PREFERRED_BLOCK_SIZE);
447+
name.getBytes(StandardCharsets.UTF_8), permStatus, 0L, 0L, null,
448+
REPLICATION, PREFERRED_BLOCK_SIZE);
448449
parent.addChild(inodeFile);
449450
return inodeFile;
450451
}

0 commit comments

Comments
 (0)