Skip to content

Commit cf959f3

Browse files
committed
HADOOP-19435. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-fs2img.
1 parent 0984bbd commit cf959f3

File tree

5 files changed

+101
-78
lines changed

5 files changed

+101
-78
lines changed

hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java

+31-19
Original file line numberDiff line numberDiff line change
@@ -90,10 +90,11 @@
9090
import org.apache.hadoop.ipc.RemoteException;
9191
import org.apache.hadoop.net.NetUtils;
9292
import org.apache.hadoop.net.NodeBase;
93-
import org.junit.After;
94-
import org.junit.Before;
93+
import org.junit.jupiter.api.AfterEach;
94+
import org.junit.jupiter.api.BeforeEach;
9595
import org.junit.Rule;
96-
import org.junit.Test;
96+
import org.junit.jupiter.api.Test;
97+
import org.junit.jupiter.api.Timeout;
9798
import org.junit.rules.TestName;
9899
import org.slf4j.Logger;
99100
import org.slf4j.LoggerFactory;
@@ -107,7 +108,7 @@
107108
import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
108109
import static org.apache.hadoop.hdfs.server.common.blockaliasmap.impl.TextFileRegionAliasMap.fileNameFromBlockPoolID;
109110
import static org.apache.hadoop.net.NodeBase.PATH_SEPARATOR_STR;
110-
import static org.junit.Assert.*;
111+
import static org.junit.jupiter.api.Assertions.*;
111112

112113
/**
113114
* Integration tests for the Provided implementation.
@@ -136,7 +137,7 @@ public class ITestProvidedImplementation {
136137
private Configuration conf;
137138
private MiniDFSCluster cluster;
138139

139-
@Before
140+
@BeforeEach
140141
public void setSeed() throws Exception {
141142
if (fBASE.exists() && !FileUtil.fullyDelete(fBASE)) {
142143
throw new IOException("Could not fully delete " + fBASE);
@@ -196,7 +197,7 @@ public void setSeed() throws Exception {
196197
}
197198
}
198199

199-
@After
200+
@AfterEach
200201
public void shutdown() throws Exception {
201202
try {
202203
if (cluster != null) {
@@ -312,7 +313,8 @@ private static List<File> getProvidedNamenodeDirs(String baseDir,
312313
return nnDirs;
313314
}
314315

315-
@Test(timeout=20000)
316+
@Test
317+
@Timeout(value = 20)
316318
public void testLoadImage() throws Exception {
317319
final long seed = r.nextLong();
318320
LOG.info("providedPath: " + providedPath);
@@ -338,7 +340,8 @@ public void testLoadImage() throws Exception {
338340
}
339341
}
340342

341-
@Test(timeout=30000)
343+
@Test
344+
@Timeout(value = 30)
342345
public void testProvidedReporting() throws Exception {
343346
conf.setClass(ImageWriter.Options.UGI_CLASS,
344347
SingleUGIResolver.class, UGIResolver.class);
@@ -417,7 +420,8 @@ public void testProvidedReporting() throws Exception {
417420
}
418421
}
419422

420-
@Test(timeout=500000)
423+
@Test
424+
@Timeout(value = 500)
421425
public void testDefaultReplication() throws Exception {
422426
int targetReplication = 2;
423427
conf.setInt(FixedBlockMultiReplicaResolver.REPLICATION, targetReplication);
@@ -529,7 +533,8 @@ private BlockLocation[] createFile(Path path, short replication,
529533
return fs.getFileBlockLocations(path, 0, fileLen);
530534
}
531535

532-
@Test(timeout=30000)
536+
@Test
537+
@Timeout(value = 30)
533538
public void testClusterWithEmptyImage() throws IOException {
534539
// start a cluster with 2 datanodes without any provided storage
535540
startCluster(nnDirPath, 2, null,
@@ -567,8 +572,8 @@ private DatanodeInfo[] getAndCheckBlockLocations(DFSClient client,
567572
private void checkUniqueness(DatanodeInfo[] locations) {
568573
Set<String> set = new HashSet<>();
569574
for (DatanodeInfo info: locations) {
570-
assertFalse("All locations should be unique",
571-
set.contains(info.getDatanodeUuid()));
575+
assertFalse(
576+
set.contains(info.getDatanodeUuid()), "All locations should be unique");
572577
set.add(info.getDatanodeUuid());
573578
}
574579
}
@@ -577,7 +582,8 @@ private void checkUniqueness(DatanodeInfo[] locations) {
577582
* Tests setting replication of provided files.
578583
* @throws Exception
579584
*/
580-
@Test(timeout=50000)
585+
@Test
586+
@Timeout(value = 50)
581587
public void testSetReplicationForProvidedFiles() throws Exception {
582588
createImage(new FSTreeWalk(providedPath, conf), nnDirPath,
583589
FixedBlockResolver.class);
@@ -618,7 +624,8 @@ private void setAndUnsetReplication(String filename) throws Exception {
618624
defaultReplication);
619625
}
620626

621-
@Test(timeout=30000)
627+
@Test
628+
@Timeout(value = 30)
622629
public void testProvidedDatanodeFailures() throws Exception {
623630
createImage(new FSTreeWalk(providedPath, conf), nnDirPath,
624631
FixedBlockResolver.class);
@@ -689,7 +696,8 @@ public void testProvidedDatanodeFailures() throws Exception {
689696
}
690697
}
691698

692-
@Test(timeout=300000)
699+
@Test
700+
@Timeout(value = 300)
693701
public void testTransientDeadDatanodes() throws Exception {
694702
createImage(new FSTreeWalk(providedPath, conf), nnDirPath,
695703
FixedBlockResolver.class);
@@ -727,7 +735,8 @@ private DatanodeStorageInfo getProvidedDatanodeStorageInfo() {
727735
return providedStorageMap.getProvidedStorageInfo();
728736
}
729737

730-
@Test(timeout=30000)
738+
@Test
739+
@Timeout(value = 30)
731740
public void testNamenodeRestart() throws Exception {
732741
createImage(new FSTreeWalk(providedPath, conf), nnDirPath,
733742
FixedBlockResolver.class);
@@ -768,7 +777,8 @@ private void verifyFileLocation(int fileIndex, int replication)
768777
}
769778
}
770779

771-
@Test(timeout=30000)
780+
@Test
781+
@Timeout(value = 30)
772782
public void testSetClusterID() throws Exception {
773783
String clusterID = "PROVIDED-CLUSTER";
774784
createImage(new FSTreeWalk(providedPath, conf), nnDirPath,
@@ -783,7 +793,8 @@ public void testSetClusterID() throws Exception {
783793
assertEquals(clusterID, nn.getNamesystem().getClusterId());
784794
}
785795

786-
@Test(timeout=30000)
796+
@Test
797+
@Timeout(value = 30)
787798
public void testNumberOfProvidedLocations() throws Exception {
788799
// set default replication to 4
789800
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 4);
@@ -814,7 +825,8 @@ public void testNumberOfProvidedLocations() throws Exception {
814825
}
815826
}
816827

817-
@Test(timeout=30000)
828+
@Test
829+
@Timeout(value = 30)
818830
public void testNumberOfProvidedLocationsManyBlocks() throws Exception {
819831
// increase number of blocks per file to at least 10 blocks per file
820832
conf.setLong(FixedBlockResolver.BLOCKSIZE, baseFileLen/10);

hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSTreeWalk.java

+34-29
Original file line numberDiff line numberDiff line change
@@ -23,17 +23,18 @@
2323
import org.apache.hadoop.fs.Path;
2424
import org.apache.hadoop.fs.permission.AclStatus;
2525
import org.apache.hadoop.hdfs.DFSConfigKeys;
26-
import org.junit.Test;
26+
import org.junit.jupiter.api.Test;
2727

2828
import java.util.HashMap;
2929
import java.util.Map;
3030

31-
import static org.junit.Assert.assertEquals;
32-
import static org.junit.Assert.assertNotNull;
33-
import static org.junit.Assert.fail;
31+
import static org.junit.jupiter.api.Assertions.assertEquals;
32+
import static org.junit.jupiter.api.Assertions.assertNotNull;
33+
import static org.junit.jupiter.api.Assertions.fail;
3434
import static org.mockito.ArgumentMatchers.any;
3535
import static org.mockito.Mockito.mock;
3636
import static org.mockito.Mockito.when;
37+
import org.junit.jupiter.api.Assertions;
3738

3839
/**
3940
* Validate FSTreeWalk specific behavior.
@@ -84,37 +85,41 @@ public void testImportAcl() throws Exception {
8485
* Verify ACL enabled TreeWalk iterator throws an error if the external file
8586
* system does not support ACLs.
8687
*/
87-
@Test(expected = UnsupportedOperationException.class)
88+
@Test
8889
public void testACLNotSupported() throws Exception {
89-
Configuration conf = new Configuration();
90-
conf.setBoolean(DFSConfigKeys.DFS_PROVIDED_ACLS_IMPORT_ENABLED, true);
91-
92-
FileSystem fs = mock(FileSystem.class);
93-
when(fs.getAclStatus(any())).thenThrow(new UnsupportedOperationException());
94-
Path root = mock(Path.class);
95-
when(root.getFileSystem(conf)).thenReturn(fs);
96-
FileStatus rootFileStatus = new FileStatus(0, true, 0, 0, 1, root);
97-
when(fs.getFileStatus(root)).thenReturn(rootFileStatus);
90+
Assertions.assertThrows(UnsupportedOperationException.class, () -> {
91+
Configuration conf = new Configuration();
92+
conf.setBoolean(DFSConfigKeys.DFS_PROVIDED_ACLS_IMPORT_ENABLED, true);
93+
FileSystem fs = mock(FileSystem.class);
94+
when(fs.getAclStatus(any())).thenThrow(new UnsupportedOperationException());
95+
Path root = mock(Path.class);
96+
when(root.getFileSystem(conf)).thenReturn(fs);
97+
FileStatus rootFileStatus = new FileStatus(0, true, 0, 0, 1, root);
98+
when(fs.getFileStatus(root)).thenReturn(rootFileStatus);
99+
FSTreeWalk fsTreeWalk = new FSTreeWalk(root, conf);
100+
TreeWalk.TreeIterator iter = fsTreeWalk.iterator();
101+
fail("Unexpected successful creation of iter: " + iter);
102+
});
103+
98104

99-
FSTreeWalk fsTreeWalk = new FSTreeWalk(root, conf);
100-
TreeWalk.TreeIterator iter = fsTreeWalk.iterator();
101-
fail("Unexpected successful creation of iter: " + iter);
102-
}
105+
}
103106

104107
/**
105108
* Verify creation of INode for ACL enabled TreePath throws an error.
106109
*/
107-
@Test(expected = UnsupportedOperationException.class)
110+
@Test
108111
public void testToINodeACLNotSupported() throws Exception {
109-
BlockResolver blockResolver = new FixedBlockResolver();
110-
Path root = new Path("/");
111-
FileStatus rootFileStatus = new FileStatus(0, false, 0, 0, 1, root);
112+
Assertions.assertThrows(UnsupportedOperationException.class, () -> {
113+
BlockResolver blockResolver = new FixedBlockResolver();
114+
Path root = new Path("/");
115+
FileStatus rootFileStatus = new FileStatus(0, false, 0, 0, 1, root);
116+
AclStatus acls = mock(AclStatus.class);
117+
TreePath treePath = new TreePath(rootFileStatus, 1, null, null, acls);
118+
UGIResolver ugiResolver = mock(UGIResolver.class);
119+
when(ugiResolver.getPermissionsProto(null, acls)).thenReturn(1L);
120+
treePath.toINode(ugiResolver, blockResolver, null);
121+
});
122+
112123

113-
AclStatus acls = mock(AclStatus.class);
114-
TreePath treePath = new TreePath(rootFileStatus, 1, null, null, acls);
115-
116-
UGIResolver ugiResolver = mock(UGIResolver.class);
117-
when(ugiResolver.getPermissionsProto(null, acls)).thenReturn(1L);
118-
treePath.toINode(ugiResolver, blockResolver, null);
119-
}
124+
}
120125
}

hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFixedBlockResolver.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,11 @@
2525
import org.apache.hadoop.fs.FileStatus;
2626
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
2727

28-
import org.junit.Before;
28+
import org.junit.jupiter.api.BeforeEach;
2929
import org.junit.Rule;
30-
import org.junit.Test;
30+
import org.junit.jupiter.api.Test;
3131
import org.junit.rules.TestName;
32-
import static org.junit.Assert.*;
32+
import static org.junit.jupiter.api.Assertions.*;
3333

3434
/**
3535
* Validate fixed-size block partitioning.
@@ -40,7 +40,7 @@ public class TestFixedBlockResolver {
4040

4141
private final FixedBlockResolver blockId = new FixedBlockResolver();
4242

43-
@Before
43+
@BeforeEach
4444
public void setup() {
4545
Configuration conf = new Configuration(false);
4646
conf.setLong(FixedBlockResolver.BLOCKSIZE, 512L * (1L << 20));

hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRandomTreeWalk.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,11 @@
2626
import org.apache.hadoop.fs.FileStatus;
2727
import org.apache.hadoop.fs.Path;
2828

29-
import org.junit.Before;
29+
import org.junit.jupiter.api.BeforeEach;
3030
import org.junit.Rule;
31-
import org.junit.Test;
31+
import org.junit.jupiter.api.Test;
3232
import org.junit.rules.TestName;
33-
import static org.junit.Assert.*;
33+
import static org.junit.jupiter.api.Assertions.*;
3434

3535
/**
3636
* Validate randomly generated hierarchies, including fork() support in
@@ -42,7 +42,7 @@ public class TestRandomTreeWalk {
4242

4343
private Random r = new Random();
4444

45-
@Before
45+
@BeforeEach
4646
public void setSeed() {
4747
long seed = r.nextLong();
4848
r.setSeed(seed);

hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSingleUGIResolver.java

+28-22
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,11 @@
3131
import org.apache.hadoop.fs.permission.FsPermission;
3232
import org.apache.hadoop.security.UserGroupInformation;
3333

34-
import org.junit.Before;
34+
import org.junit.jupiter.api.BeforeEach;
3535
import org.junit.Rule;
36-
import org.junit.Test;
36+
import org.junit.jupiter.api.Test;
3737
import org.junit.rules.TestName;
38-
import static org.junit.Assert.*;
38+
import static org.junit.jupiter.api.Assertions.*;
3939

4040
/**
4141
* Validate resolver assigning all paths to a single owner/group.
@@ -51,7 +51,7 @@ public class TestSingleUGIResolver {
5151

5252
private SingleUGIResolver ugi = new SingleUGIResolver();
5353

54-
@Before
54+
@BeforeEach
5555
public void setup() {
5656
Configuration conf = new Configuration(false);
5757
conf.setInt(SingleUGIResolver.UID, TESTUID);
@@ -125,31 +125,37 @@ public void testAclResolution() {
125125
match(perm, p1);
126126
}
127127

128-
@Test(expected=IllegalArgumentException.class)
128+
@Test
129129
public void testInvalidUid() {
130-
Configuration conf = ugi.getConf();
131-
conf.setInt(SingleUGIResolver.UID, (1 << 24) + 1);
132-
ugi.setConf(conf);
133-
ugi.resolve(file(TESTUSER, TESTGROUP, new FsPermission((short)0777)));
130+
assertThrows(IllegalArgumentException.class, () -> {
131+
Configuration conf = ugi.getConf();
132+
conf.setInt(SingleUGIResolver.UID, (1 << 24) + 1);
133+
ugi.setConf(conf);
134+
ugi.resolve(file(TESTUSER, TESTGROUP, new FsPermission((short)0777)));
135+
});
134136
}
135137

136-
@Test(expected=IllegalArgumentException.class)
138+
@Test
137139
public void testInvalidGid() {
138-
Configuration conf = ugi.getConf();
139-
conf.setInt(SingleUGIResolver.GID, (1 << 24) + 1);
140-
ugi.setConf(conf);
141-
ugi.resolve(file(TESTUSER, TESTGROUP, new FsPermission((short)0777)));
140+
assertThrows(IllegalArgumentException.class, () -> {
141+
Configuration conf = ugi.getConf();
142+
conf.setInt(SingleUGIResolver.GID, (1 << 24) + 1);
143+
ugi.setConf(conf);
144+
ugi.resolve(file(TESTUSER, TESTGROUP, new FsPermission((short)0777)));
145+
});
142146
}
143147

144-
@Test(expected=IllegalStateException.class)
148+
@Test
145149
public void testDuplicateIds() {
146-
Configuration conf = new Configuration(false);
147-
conf.setInt(SingleUGIResolver.UID, 4344);
148-
conf.setInt(SingleUGIResolver.GID, 4344);
149-
conf.set(SingleUGIResolver.USER, TESTUSER);
150-
conf.set(SingleUGIResolver.GROUP, TESTGROUP);
151-
ugi.setConf(conf);
152-
ugi.ugiMap();
150+
assertThrows(IllegalStateException.class, () -> {
151+
Configuration conf = new Configuration(false);
152+
conf.setInt(SingleUGIResolver.UID, 4344);
153+
conf.setInt(SingleUGIResolver.GID, 4344);
154+
conf.set(SingleUGIResolver.USER, TESTUSER);
155+
conf.set(SingleUGIResolver.GROUP, TESTGROUP);
156+
ugi.setConf(conf);
157+
ugi.ugiMap();
158+
});
153159
}
154160

155161
static void match(long encoded, FsPermission p) {

0 commit comments

Comments
 (0)