Class org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSinkUnitTest
|
0%
successful |
Failed tests
setupMiniCluster
org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) at app//org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) at app//org.apache.hadoop.ipc.Client.call(Client.java:1529) at app//org.apache.hadoop.ipc.Client.call(Client.java:1426) at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) at app/jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source) at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713) at app//org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568) at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) at app/jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source) at app//org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134) at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869) at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920) at app//org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848) at app//org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995) at app//org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594) at app//org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533) at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30) at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38) at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSinkUnitTest.setupMiniCluster(ReadsSparkSinkUnitTest.java:47) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568) at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139) at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethodConsideringTimeout(MethodInvocationHelper.java:69) at app//org.testng.internal.invokers.ConfigInvoker.invokeConfigurationMethod(ConfigInvoker.java:361) at app//org.testng.internal.invokers.ConfigInvoker.invokeConfigurations(ConfigInvoker.java:296) at app//org.testng.internal.invokers.TestMethodWorker.invokeBeforeClassMethods(TestMethodWorker.java:180) at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:122) at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511) at app//org.testng.TestRunner.privateRun(TestRunner.java:829) at app//org.testng.TestRunner.run(TestRunner.java:602) at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437) at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431) at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391) at app//org.testng.SuiteRunner.run(SuiteRunner.java:330) at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52) at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95) at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256) at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176) at app//org.testng.TestNG.runSuites(TestNG.java:1099) at app//org.testng.TestNG.run(TestNG.java:1067) at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153) at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95) at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568) at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36) at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33) at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92) at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source) at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200) at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132) at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103) at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63) at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56) at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121) at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71) at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69) at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Tests
Test | Duration | Result |
---|---|---|
readsSinkHDFSTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) | - | ignored |
readsSinkHDFSTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) | - | ignored |
readsSinkHDFSTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) | - | ignored |
readsSinkHDFSTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) | - | ignored |
readsSinkHDFSTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) | - | ignored |
readsSinkHDFSTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) | - | ignored |
readsSinkHDFSTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) | - | ignored |
readsSinkHDFSTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) | - | ignored |
readsSinkHDFSTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) | - | ignored |
readsSinkShardedTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) | - | ignored |
readsSinkShardedTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) | - | ignored |
readsSinkShardedTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) | - | ignored |
readsSinkShardedTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) | - | ignored |
readsSinkShardedTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) | - | ignored |
readsSinkShardedTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) | - | ignored |
readsSinkShardedTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) | - | ignored |
readsSinkShardedTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) | - | ignored |
readsSinkShardedTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) | - | ignored |
readsSinkTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) | - | ignored |
readsSinkTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) | - | ignored |
readsSinkTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) | - | ignored |
readsSinkTest[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) | - | ignored |
readsSinkTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) | - | ignored |
readsSinkTest[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) | - | ignored |
readsSinkTest[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) | - | ignored |
readsSinkTest[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) | - | ignored |
readsSinkTest[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) | - | ignored |
setupMiniCluster | 1.568s | failed |
testReadsSparkSinkNotSortingReadsToHeader | - | ignored |
testSpecifyPartsDir[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) | - | ignored |
testSpecifyPartsDir[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) | - | ignored |
testSpecifyPartsDir[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) | - | ignored |
testSpecifyPartsDir[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) | - | ignored |
testSpecifyPartsDir[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) | - | ignored |
testSpecifyPartsDir[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) | - | ignored |
testSpecifyPartsDir[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) | - | ignored |
testSpecifyPartsDir[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) | - | ignored |
testSpecifyPartsDir[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, true, 1) | - | ignored |
testWritingToAnExistingFileHDFS[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, true, false, 100) | - | ignored |
testWritingToAnExistingFileHDFS[3](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest1, null, .bam, false, false, 100) | - | ignored |
testWritingToAnExistingFileHDFS[5](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, ReadsSparkSinkUnitTest2, null, .bam, true, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[6](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam, ReadsSparkSinkUnitTest3, null, .bam, true, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[7](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram, ReadsSparkSinkUnitTest5, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta, .cram, true, true, 100) | - | ignored |
testWritingToAnExistingFileHDFS[8](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam, ReadsSparkSinkUnitTest6, null, .sam, true, true, 100) | - | ignored |
testWritingToFileURL | - | ignored |
Standard error
22:32:05.881 INFO MiniDFSCluster - starting cluster: numNameNodes=1, numDataNodes=1 22:32:06.124 INFO NameNode - Formatting using clusterid: testClusterID 22:32:06.136 INFO FSEditLog - Edit logging is async:true 22:32:06.155 INFO FSNamesystem - KeyProvider: null 22:32:06.157 INFO FSNamesystem - fsLock is fair: true 22:32:06.157 INFO FSNamesystem - Detailed lock hold time metrics enabled: false 22:32:06.157 INFO FSNamesystem - fsOwner = runner (auth:SIMPLE) 22:32:06.157 INFO FSNamesystem - supergroup = supergroup 22:32:06.157 INFO FSNamesystem - isPermissionEnabled = true 22:32:06.157 INFO FSNamesystem - isStoragePolicyEnabled = true 22:32:06.157 INFO FSNamesystem - HA Enabled: false 22:32:06.193 INFO Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling 22:32:06.195 INFO DatanodeManager - Slow peers collection thread shutdown 22:32:06.198 INFO deprecation - hadoop.configured.node.mapping is deprecated. Instead, use net.topology.configured.node.mapping 22:32:06.198 INFO DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000 22:32:06.198 INFO DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true 22:32:06.200 INFO BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000 22:32:06.201 INFO BlockManager - The block deletion will start around 2025 Mar 28 22:32:06 22:32:06.202 INFO GSet - Computing capacity for map BlocksMap 22:32:06.202 INFO GSet - VM type = 64-bit 22:32:06.202 INFO GSet - 2.0% max memory 3.4 GB = 70 MB 22:32:06.202 INFO GSet - capacity = 2^23 = 8388608 entries 22:32:06.208 INFO BlockManager - Storage policy satisfier is disabled 22:32:06.208 INFO BlockManager - dfs.block.access.token.enable = false 22:32:06.212 INFO BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval 22:32:06.212 INFO BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999 22:32:06.212 INFO BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0 22:32:06.212 INFO BlockManagerSafeMode - dfs.namenode.safemode.extension = 0 22:32:06.213 INFO BlockManager - defaultReplication = 1 22:32:06.213 INFO BlockManager - maxReplication = 512 22:32:06.213 INFO BlockManager - minReplication = 1 22:32:06.213 INFO BlockManager - maxReplicationStreams = 2 22:32:06.213 INFO BlockManager - redundancyRecheckInterval = 3000ms 22:32:06.213 INFO BlockManager - encryptDataTransfer = false 22:32:06.213 INFO BlockManager - maxNumBlocksToLog = 1000 22:32:06.232 INFO FSDirectory - GLOBAL serial map: bits=29 maxEntries=536870911 22:32:06.232 INFO FSDirectory - USER serial map: bits=24 maxEntries=16777215 22:32:06.232 INFO FSDirectory - GROUP serial map: bits=24 maxEntries=16777215 22:32:06.232 INFO FSDirectory - XATTR serial map: bits=24 maxEntries=16777215 22:32:06.240 INFO GSet - Computing capacity for map INodeMap 22:32:06.240 INFO GSet - VM type = 64-bit 22:32:06.240 INFO GSet - 1.0% max memory 3.4 GB = 35 MB 22:32:06.240 INFO GSet - capacity = 2^22 = 4194304 entries 22:32:06.241 INFO FSDirectory - ACLs enabled? true 22:32:06.241 INFO FSDirectory - POSIX ACL inheritance enabled? true 22:32:06.241 INFO FSDirectory - XAttrs enabled? true 22:32:06.241 INFO NameNode - Caching file names occurring more than 10 times 22:32:06.245 INFO SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536 22:32:06.245 INFO SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false 22:32:06.246 INFO SnapshotManager - SkipList is disabled 22:32:06.249 INFO GSet - Computing capacity for map cachedBlocks 22:32:06.249 INFO GSet - VM type = 64-bit 22:32:06.250 INFO GSet - 0.25% max memory 3.4 GB = 8.8 MB 22:32:06.250 INFO GSet - capacity = 2^20 = 1048576 entries 22:32:06.255 INFO TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10 22:32:06.255 INFO TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10 22:32:06.255 INFO TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25 22:32:06.256 INFO FSNamesystem - Retry cache on namenode is enabled 22:32:06.256 INFO FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis 22:32:06.258 INFO GSet - Computing capacity for map NameNodeRetryCache 22:32:06.258 INFO GSet - VM type = 64-bit 22:32:06.258 INFO GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB 22:32:06.258 INFO GSet - capacity = 2^17 = 131072 entries 22:32:06.269 INFO FSImage - Allocated new BlockPoolId: BP-1662193480-10.1.0.27-1743201126265 22:32:06.277 INFO Storage - Storage directory /tmp/minicluster_storage1740573651713901440/name-0-1 has been successfully formatted. 22:32:06.280 INFO Storage - Storage directory /tmp/minicluster_storage1740573651713901440/name-0-2 has been successfully formatted. 22:32:06.296 INFO FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage1740573651713901440/name-0-1/current/fsimage.ckpt_0000000000000000000 using no compression 22:32:06.296 INFO FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage1740573651713901440/name-0-2/current/fsimage.ckpt_0000000000000000000 using no compression 22:32:06.362 INFO FSImageFormatProtobuf - Image file /tmp/minicluster_storage1740573651713901440/name-0-2/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds . 22:32:06.362 INFO FSImageFormatProtobuf - Image file /tmp/minicluster_storage1740573651713901440/name-0-1/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds . 22:32:06.378 INFO NNStorageRetentionManager - Going to retain 1 images with txid >= 0 22:32:06.380 INFO DatanodeManager - Slow peers collection thread shutdown 22:32:06.456 INFO FSNamesystem - Stopping services started for active state 22:32:06.457 INFO FSNamesystem - Stopping services started for standby state 22:32:06.458 INFO NameNode - createNameNode [] 22:32:06.495 WARN MetricsConfig - Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 22:32:06.504 INFO MetricsSystemImpl - Scheduled Metric snapshot period at 10 second(s). 22:32:06.504 INFO MetricsSystemImpl - NameNode metrics system started 22:32:06.508 INFO NameNodeUtils - fs.defaultFS is hdfs://127.0.0.1:0 22:32:06.534 INFO JvmPauseMonitor - Starting JVM pause monitor 22:32:06.546 INFO DFSUtil - Filter initializers set : org.apache.hadoop.http.lib.StaticUserWebFilter,org.apache.hadoop.hdfs.web.AuthFilterInitializer 22:32:06.551 INFO DFSUtil - Starting Web-server for hdfs at: http://localhost:0 22:32:06.562 INFO log - Logging initialized @28732ms to org.eclipse.jetty.util.log.Slf4jLog 22:32:06.640 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret 22:32:06.658 INFO HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 22:32:06.660 INFO HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs 22:32:06.660 INFO HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 22:32:06.662 INFO HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context hdfs 22:32:06.662 INFO HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context static 22:32:06.664 INFO HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint. 22:32:06.693 INFO HttpServer2 - addJerseyResourcePackage: packageName=org.apache.hadoop.hdfs.server.namenode.web.resources;org.apache.hadoop.hdfs.web.resources, pathSpec=/webhdfs/v1/* 22:32:06.697 INFO HttpServer2 - Jetty bound to port 33923 22:32:06.698 INFO Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10 22:32:06.724 INFO session - DefaultSessionIdManager workerName=node0 22:32:06.724 INFO session - No SessionScavenger set, using defaults 22:32:06.725 INFO session - node0 Scavenging every 600000ms 22:32:06.740 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret 22:32:06.742 INFO ContextHandler - Started o.e.j.s.ServletContextHandler@6814fe0e{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE} 22:32:06.888 INFO ContextHandler - Started o.e.j.w.WebAppContext@7e8c7270{hdfs,/,file:///tmp/jetty-localhost-33923-hadoop-hdfs-3_4_0-tests_jar-_-any-8736814415618374753/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs} 22:32:06.893 INFO AbstractConnector - Started ServerConnector@7a838036{HTTP/1.1, (http/1.1)}{localhost:33923} 22:32:06.893 INFO Server - Started @29064ms 22:32:06.899 INFO FSEditLog - Edit logging is async:true 22:32:06.910 INFO FSNamesystem - KeyProvider: null 22:32:06.910 INFO FSNamesystem - fsLock is fair: true 22:32:06.910 INFO FSNamesystem - Detailed lock hold time metrics enabled: false 22:32:06.910 INFO FSNamesystem - fsOwner = runner (auth:SIMPLE) 22:32:06.910 INFO FSNamesystem - supergroup = supergroup 22:32:06.910 INFO FSNamesystem - isPermissionEnabled = true 22:32:06.910 INFO FSNamesystem - isStoragePolicyEnabled = true 22:32:06.910 INFO FSNamesystem - HA Enabled: false 22:32:06.911 INFO Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling 22:32:06.911 INFO DatanodeManager - Slow peers collection thread shutdown 22:32:06.911 INFO DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000 22:32:06.911 INFO DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true 22:32:06.911 INFO BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000 22:32:06.911 INFO BlockManager - The block deletion will start around 2025 Mar 28 22:32:06 22:32:06.911 INFO GSet - Computing capacity for map BlocksMap 22:32:06.911 INFO GSet - VM type = 64-bit 22:32:06.911 INFO GSet - 2.0% max memory 3.4 GB = 70 MB 22:32:06.911 INFO GSet - capacity = 2^23 = 8388608 entries 22:32:06.926 INFO BlockManager - Storage policy satisfier is disabled 22:32:06.926 INFO BlockManager - dfs.block.access.token.enable = false 22:32:06.927 INFO BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval 22:32:06.927 INFO BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999 22:32:06.927 INFO BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0 22:32:06.927 INFO BlockManagerSafeMode - dfs.namenode.safemode.extension = 0 22:32:06.927 INFO BlockManager - defaultReplication = 1 22:32:06.927 INFO BlockManager - maxReplication = 512 22:32:06.927 INFO BlockManager - minReplication = 1 22:32:06.927 INFO BlockManager - maxReplicationStreams = 2 22:32:06.927 INFO BlockManager - redundancyRecheckInterval = 3000ms 22:32:06.927 INFO BlockManager - encryptDataTransfer = false 22:32:06.927 INFO BlockManager - maxNumBlocksToLog = 1000 22:32:06.927 INFO GSet - Computing capacity for map INodeMap 22:32:06.927 INFO GSet - VM type = 64-bit 22:32:06.927 INFO GSet - 1.0% max memory 3.4 GB = 35 MB 22:32:06.927 INFO GSet - capacity = 2^22 = 4194304 entries 22:32:06.928 INFO FSDirectory - ACLs enabled? true 22:32:06.928 INFO FSDirectory - POSIX ACL inheritance enabled? true 22:32:06.928 INFO FSDirectory - XAttrs enabled? true 22:32:06.928 INFO NameNode - Caching file names occurring more than 10 times 22:32:06.928 INFO SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536 22:32:06.928 INFO SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false 22:32:06.928 INFO SnapshotManager - SkipList is disabled 22:32:06.929 INFO GSet - Computing capacity for map cachedBlocks 22:32:06.929 INFO GSet - VM type = 64-bit 22:32:06.929 INFO GSet - 0.25% max memory 3.4 GB = 8.8 MB 22:32:06.929 INFO GSet - capacity = 2^20 = 1048576 entries 22:32:06.929 INFO TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10 22:32:06.929 INFO TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10 22:32:06.929 INFO TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25 22:32:06.929 INFO FSNamesystem - Retry cache on namenode is enabled 22:32:06.929 INFO FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis 22:32:06.929 INFO GSet - Computing capacity for map NameNodeRetryCache 22:32:06.929 INFO GSet - VM type = 64-bit 22:32:06.929 INFO GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB 22:32:06.929 INFO GSet - capacity = 2^17 = 131072 entries 22:32:06.929 INFO BlockManagerInfo - Removed broadcast_25_piece0 on localhost:35275 in memory (size: 4.5 KiB, free: 1920.0 MiB) 22:32:06.933 INFO BlockManagerInfo - Removed broadcast_32_piece0 on localhost:35275 in memory (size: 3.8 KiB, free: 1920.0 MiB) 22:32:06.933 INFO Storage - Lock on /tmp/minicluster_storage1740573651713901440/name-0-1/in_use.lock acquired by nodename 2817@fv-az1720-291 22:32:06.936 INFO Storage - Lock on /tmp/minicluster_storage1740573651713901440/name-0-2/in_use.lock acquired by nodename 2817@fv-az1720-291 22:32:06.936 INFO BlockManager - Removing RDD 47 22:32:06.937 INFO FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage1740573651713901440/name-0-1/current 22:32:06.937 INFO FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage1740573651713901440/name-0-2/current 22:32:06.937 INFO FSImage - No edit log streams selected. 22:32:06.937 INFO FSImage - Planning to load image: FSImageFile(file=/tmp/minicluster_storage1740573651713901440/name-0-1/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000) 22:32:06.942 INFO BlockManagerInfo - Removed broadcast_23_piece0 on localhost:35275 in memory (size: 465.0 B, free: 1920.0 MiB) 22:32:06.949 INFO BlockManagerInfo - Removed broadcast_33_piece0 on localhost:35275 in memory (size: 4.8 KiB, free: 1920.0 MiB) 22:32:06.953 INFO BlockManagerInfo - Removed broadcast_31_piece0 on localhost:35275 in memory (size: 320.0 B, free: 1920.0 MiB) 22:32:06.954 INFO BlockManagerInfo - Removed broadcast_30_piece0 on localhost:35275 in memory (size: 4.7 KiB, free: 1920.0 MiB) 22:32:06.956 INFO BlockManagerInfo - Removed broadcast_27_piece0 on localhost:35275 in memory (size: 5.1 KiB, free: 1920.0 MiB) 22:32:06.958 INFO BlockManagerInfo - Removed broadcast_24_piece0 on localhost:35275 in memory (size: 4.3 KiB, free: 1920.0 MiB) 22:32:06.958 INFO FSImageFormatPBINode - Loading 1 INodes. 22:32:06.959 INFO BlockManagerInfo - Removed broadcast_26_piece0 on localhost:35275 in memory (size: 3.2 KiB, free: 1920.0 MiB) 22:32:06.960 INFO FSImageFormatPBINode - Successfully loaded 1 inodes 22:32:06.960 INFO BlockManagerInfo - Removed broadcast_29_piece0 on localhost:35275 in memory (size: 3.8 KiB, free: 1920.0 MiB) 22:32:06.963 INFO BlockManagerInfo - Removed broadcast_28_piece0 on localhost:35275 in memory (size: 320.0 B, free: 1920.0 MiB) 22:32:06.963 INFO FSImageFormatPBINode - Completed update blocks map and name cache, total waiting duration 0ms. 22:32:06.965 INFO FSImageFormatProtobuf - Loaded FSImage in 0 seconds. 22:32:06.965 INFO FSImage - Loaded image for txid 0 from /tmp/minicluster_storage1740573651713901440/name-0-1/current/fsimage_0000000000000000000 22:32:06.965 INFO BlockManagerInfo - Removed broadcast_22_piece0 on localhost:35275 in memory (size: 159.0 B, free: 1920.0 MiB) 22:32:06.968 INFO FSNamesystem - Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false) 22:32:06.968 INFO FSEditLog - Starting log segment at 1 22:32:06.977 INFO NameCache - initialized with 0 entries 0 lookups 22:32:06.977 INFO FSNamesystem - Finished loading FSImage in 47 msecs 22:32:07.049 INFO NameNode - RPC server is binding to localhost:0 22:32:07.049 INFO NameNode - Enable NameNode state context:false 22:32:07.053 INFO CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false. 22:32:07.063 INFO Server - Listener at localhost:34165 22:32:07.064 INFO Server - Starting Socket Reader #1 for port 0 22:32:07.090 INFO NameNode - Clients are to use localhost:34165 to access this namenode/service. 22:32:07.092 INFO FSNamesystem - Registered FSNamesystemState, ReplicatedBlocksState and ECBlockGroupsState MBeans. 22:32:07.107 INFO LeaseManager - Number of blocks under construction: 0 22:32:07.112 INFO DatanodeAdminDefaultMonitor - Initialized the Default Decommission and Maintenance monitor 22:32:07.113 INFO BlockManager - Start MarkedDeleteBlockScrubber thread 22:32:07.114 INFO BlockManager - initializing replication queues 22:32:07.114 INFO StateChange - STATE* Leaving safe mode after 0 secs 22:32:07.114 INFO StateChange - STATE* Network topology has 0 racks and 0 datanodes 22:32:07.114 INFO StateChange - STATE* UnderReplicatedBlocks has 0 blocks 22:32:07.118 INFO BlockManager - Total number of blocks = 0 22:32:07.119 INFO BlockManager - Number of invalid blocks = 0 22:32:07.119 INFO BlockManager - Number of under-replicated blocks = 0 22:32:07.119 INFO BlockManager - Number of over-replicated blocks = 0 22:32:07.119 INFO BlockManager - Number of blocks being written = 0 22:32:07.119 INFO StateChange - STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 4 msec 22:32:07.119 INFO BlockManager - Reconstruction queues initialisation progress: 0.0, total number of blocks processed: 0/0 22:32:07.135 INFO Server - IPC Server Responder: starting 22:32:07.135 INFO Server - IPC Server listener on 0: starting 22:32:07.137 INFO NameNode - NameNode RPC up at: localhost/127.0.0.1:34165. 22:32:07.138 INFO FSNamesystem - Starting services required for active state 22:32:07.138 INFO FSDirectory - Initializing quota with 12 thread(s) 22:32:07.140 INFO FSDirectory - Quota initialization completed in 2 milliseconds name space=1 storage space=0 storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0, PROVIDED=0, NVDIMM=0 22:32:07.143 INFO CacheReplicationMonitor - Starting CacheReplicationMonitor with interval 30000 milliseconds 22:32:07.153 INFO MiniDFSCluster - Starting DataNode 0 with dfs.datanode.data.dir: [DISK]file:/tmp/minicluster_storage1740573651713901440/data/data1,[DISK]file:/tmp/minicluster_storage1740573651713901440/data/data2 22:32:07.164 INFO ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage1740573651713901440/data/data1 22:32:07.171 INFO ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage1740573651713901440/data/data2 22:32:07.182 INFO MetricsSystemImpl - DataNode metrics system started (again) 22:32:07.187 INFO Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling 22:32:07.190 INFO BlockScanner - Initialized block scanner with targetBytesPerSec 1048576 22:32:07.192 INFO DataNode - Configured hostname is 127.0.0.1 22:32:07.193 INFO Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling 22:32:07.193 INFO DataNode - Starting DataNode with maxLockedMemory = 0 22:32:07.196 INFO DataNode - Opened streaming server at /127.0.0.1:45301 22:32:07.197 INFO DataNode - Balancing bandwidth is 104857600 bytes/s 22:32:07.197 INFO DataNode - Number threads for balancing is 100 22:32:07.201 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret 22:32:07.203 INFO HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter) 22:32:07.204 INFO HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode 22:32:07.204 INFO HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static 22:32:07.204 INFO HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint. 22:32:07.205 INFO HttpServer2 - Jetty bound to port 38187 22:32:07.205 INFO Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10 22:32:07.206 INFO session - DefaultSessionIdManager workerName=node0 22:32:07.206 INFO session - No SessionScavenger set, using defaults 22:32:07.206 INFO session - node0 Scavenging every 660000ms 22:32:07.207 INFO ContextHandler - Started o.e.j.s.ServletContextHandler@d0e155c{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE} 22:32:07.300 INFO ContextHandler - Started o.e.j.w.WebAppContext@2dfa0bff{datanode,/,file:///tmp/jetty-localhost-38187-hadoop-hdfs-3_4_0-tests_jar-_-any-12092527532144223898/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode} 22:32:07.301 INFO AbstractConnector - Started ServerConnector@3aa02b54{HTTP/1.1, (http/1.1)}{localhost:38187} 22:32:07.301 INFO Server - Started @29472ms 22:32:07.306 WARN DatanodeHttpServer - Got null for restCsrfPreventionFilter - will not do any filtering. 22:32:07.307 INFO DatanodeHttpServer - Listening HTTP traffic on /127.0.0.1:38351 22:32:07.308 INFO JvmPauseMonitor - Starting JVM pause monitor 22:32:07.309 INFO DataNode - dnUserName = runner 22:32:07.309 INFO DataNode - supergroup = supergroup 22:32:07.316 INFO CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false. 22:32:07.317 INFO Server - Listener at localhost:44873 22:32:07.317 INFO Server - Starting Socket Reader #1 for port 0 22:32:07.321 INFO DataNode - Opened IPC server at /127.0.0.1:44873 22:32:07.338 INFO DataNode - Refresh request received for nameservices: null 22:32:07.338 INFO DataNode - Starting BPOfferServices for nameservices: <default> 22:32:07.346 INFO DataNode - Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:34165 starting to offer service 22:32:07.349 INFO Server - IPC Server Responder: starting 22:32:07.349 INFO Server - IPC Server listener on 0: starting 22:32:07.395 WARN Server - IPC Server handler 0 on default port 34165, call Call#0 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:54404 / 127.0.0.1:54404 java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.security.proto.SecurityProtos at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.<clinit>(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?] at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?] at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?] 22:32:07.394 WARN Server - IPC Server handler 1 on default port 34165, call Call#1 Retry#0 org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol.versionRequest from localhost:54402 / 127.0.0.1:54402 java.lang.NoSuchMethodError: 'org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor.internalBuildGeneratedFileFrom(java.lang.String[], org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor[])' at org.apache.hadoop.security.proto.SecurityProtos.<clinit>(SecurityProtos.java:6581) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.<clinit>(HdfsProtos.java:62533) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.<clinit>(DatanodeProtocolProtos.java:35783) ~[hadoop-hdfs-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService.getDescriptor(DatanodeProtocolProtos.java:34977) ~[hadoop-hdfs-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.getDescriptorForType(DatanodeProtocolProtos.java:34750) ~[hadoop-hdfs-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?] at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?] at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?] 22:32:07.402 WARN DataNode - Problem connecting to server: localhost/127.0.0.1:34165 22:32:07.405 WARN Server - IPC Server handler 2 on default port 34165, call Call#2 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:54404 / 127.0.0.1:54404 java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?] at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?] at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?] at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?] 22:32:07.407 WARN MiniDFSCluster - Tried waitActive() 2 time(s) and failed, giving up. org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) at org.apache.hadoop.ipc.Client.call(Client.java:1529) at org.apache.hadoop.ipc.Client.call(Client.java:1426) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) at jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713) at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:568) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) at jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134) at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869) at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920) at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533) at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30) at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38) at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSinkUnitTest.setupMiniCluster(ReadsSparkSinkUnitTest.java:47) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:568) at org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139) at org.testng.internal.invokers.MethodInvocationHelper.invokeMethodConsideringTimeout(MethodInvocationHelper.java:69) at org.testng.internal.invokers.ConfigInvoker.invokeConfigurationMethod(ConfigInvoker.java:361) at org.testng.internal.invokers.ConfigInvoker.invokeConfigurations(ConfigInvoker.java:296) at org.testng.internal.invokers.TestMethodWorker.invokeBeforeClassMethods(TestMethodWorker.java:180) at org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:122) at java.base/java.util.ArrayList.forEach(ArrayList.java:1511) at org.testng.TestRunner.privateRun(TestRunner.java:829) at org.testng.TestRunner.run(TestRunner.java:602) at org.testng.SuiteRunner.runTest(SuiteRunner.java:437) at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431) at org.testng.SuiteRunner.privateRun(SuiteRunner.java:391) at org.testng.SuiteRunner.run(SuiteRunner.java:330) at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52) at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95) at org.testng.TestNG.runSuitesSequentially(TestNG.java:1256) at org.testng.TestNG.runSuitesLocally(TestNG.java:1176) at org.testng.TestNG.runSuites(TestNG.java:1099) at org.testng.TestNG.run(TestNG.java:1067) at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153) at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95) at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:568) at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36) at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33) at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92) at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source) at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200) at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132) at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103) at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63) at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56) at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121) at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71) at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69) at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74) 22:32:07.407 INFO MiniDFSCluster - Shutting down the Mini HDFS Cluster 22:32:07.407 INFO MiniDFSCluster - Shutting down DataNode 0 22:32:07.407 INFO DataNode - Closing all peers. 22:32:07.412 INFO ContextHandler - Stopped o.e.j.w.WebAppContext@2dfa0bff{datanode,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode} 22:32:07.414 INFO AbstractConnector - Stopped ServerConnector@3aa02b54{HTTP/1.1, (http/1.1)}{localhost:0} 22:32:07.414 INFO session - node0 Stopped scavenging 22:32:07.415 INFO ContextHandler - Stopped o.e.j.s.ServletContextHandler@d0e155c{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED} 22:32:07.417 INFO DataNode - Waiting up to 30 seconds for transfer threads to complete 22:32:07.417 INFO Server - Stopping server on 44873 22:32:07.417 INFO Server - Stopping IPC Server listener on 0 22:32:07.417 INFO Server - Stopping IPC Server Responder 22:32:07.418 INFO DataNode - BPOfferService Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:34165 interrupted while requesting version info from NN 22:32:07.418 ERROR DataNode - Command processor encountered interrupt and exit. 22:32:07.418 WARN DataNode - Ending command processor service for: Thread[Command processor,5,main] 22:32:07.418 ERROR DataNode - Initialization failed for Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:34165. Exiting. java.io.IOException: DN shut down before block pool connected at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.retrieveNamespaceInfo(BPServiceActor.java:276) ~[hadoop-hdfs-3.4.0.jar:?] at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:306) ~[hadoop-hdfs-3.4.0.jar:?] at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:889) [hadoop-hdfs-3.4.0.jar:?] at java.base/java.lang.Thread.run(Thread.java:833) [?:?] 22:32:07.418 WARN DataNode - Ending block pool service for: Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:34165 22:32:07.419 INFO DataNode - Removed Block pool <registering> (Datanode Uuid unassigned) 22:32:07.419 INFO DataNode - Shutdown complete. 22:32:07.419 WARN DataSetLockManager - not open lock leak check func 22:32:07.419 INFO MiniDFSCluster - Shutting down the namenode 22:32:07.419 INFO FSNamesystem - Stopping services started for active state 22:32:07.419 INFO FSEditLog - Ending log segment 1, 1 22:32:07.419 INFO FSNamesystem - NameNodeEditLogRoller was interrupted, exiting 22:32:07.419 INFO FSNamesystem - LazyPersistFileScrubber was interrupted, exiting 22:32:07.420 INFO FSEditLog - Number of transactions: 2 Total time for transactions(ms): 0 Number of transactions batched in Syncs: 0 Number of syncs: 3 SyncTimes(ms): 1 0 22:32:07.420 INFO FileJournalManager - Finalizing edits file /tmp/minicluster_storage1740573651713901440/name-0-1/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage1740573651713901440/name-0-1/current/edits_0000000000000000001-0000000000000000002 22:32:07.421 INFO FileJournalManager - Finalizing edits file /tmp/minicluster_storage1740573651713901440/name-0-2/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage1740573651713901440/name-0-2/current/edits_0000000000000000001-0000000000000000002 22:32:07.421 INFO FSEditLog - FSEditLogAsync was interrupted, exiting 22:32:07.421 INFO CacheReplicationMonitor - Shutting down CacheReplicationMonitor 22:32:07.421 INFO Server - Stopping server on 34165 22:32:07.422 INFO Server - Stopping IPC Server listener on 0 22:32:07.422 INFO Server - Stopping IPC Server Responder 22:32:07.422 INFO BlockManager - Stopping MarkedDeleteBlockScrubber. 22:32:07.422 INFO BlockManager - Stopping RedundancyMonitor. 22:32:07.422 INFO DatanodeManager - Slow peers collection thread shutdown 22:32:07.426 INFO FSNamesystem - Stopping services started for active state 22:32:07.426 INFO FSNamesystem - Stopping services started for standby state 22:32:07.427 INFO ContextHandler - Stopped o.e.j.w.WebAppContext@7e8c7270{hdfs,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs} 22:32:07.427 INFO AbstractConnector - Stopped ServerConnector@7a838036{HTTP/1.1, (http/1.1)}{localhost:0} 22:32:07.428 INFO session - node0 Stopped scavenging 22:32:07.428 INFO ContextHandler - Stopped o.e.j.s.ServletContextHandler@6814fe0e{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED} 22:32:07.428 INFO MetricsSystemImpl - Stopping DataNode metrics system... 22:32:07.429 INFO MetricsSystemImpl - DataNode metrics system stopped. 22:32:07.429 INFO MetricsSystemImpl - DataNode metrics system shutdown complete.