22:41:38.427 WARN NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
22:41:38.974 WARN MetricsConfig - Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
22:41:39.165 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:41:39.259 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:41:39.798 WARN AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:41:39.971 WARN DatanodeHttpServer - Got null for restCsrfPreventionFilter - will not do any filtering.
22:41:40.132 WARN Server - IPC Server handler 0 on default port 41005, call Call#0 Retry#0 org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol.versionRequest from localhost:52856 / 127.0.0.1:52856
java.lang.NoSuchMethodError: 'org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor.internalBuildGeneratedFileFrom(java.lang.String[], org.apache.hadoop.thirdparty.protobuf.Descriptors$FileDescriptor[])'
at org.apache.hadoop.security.proto.SecurityProtos.<clinit>(SecurityProtos.java:6581) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.<clinit>(HdfsProtos.java:62533) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.<clinit>(DatanodeProtocolProtos.java:35783) ~[hadoop-hdfs-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService.getDescriptor(DatanodeProtocolProtos.java:34977) ~[hadoop-hdfs-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.getDescriptorForType(DatanodeProtocolProtos.java:34750) ~[hadoop-hdfs-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:41:40.142 WARN DataNode - Problem connecting to server: localhost/127.0.0.1:41005
22:41:40.300 WARN Server - IPC Server handler 1 on default port 41005, call Call#1 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:52860 / 127.0.0.1:52860
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.security.proto.SecurityProtos
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.<clinit>(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:41:40.305 WARN Server - IPC Server handler 2 on default port 41005, call Call#2 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:52860 / 127.0.0.1:52860
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:41:40.307 WARN MiniDFSCluster - Tried waitActive() 2 time(s) and failed, giving up. org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169)
at java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584)
at org.apache.hadoop.ipc.Client.call(Client.java:1529)
at org.apache.hadoop.ipc.Client.call(Client.java:1426)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
at jdk.proxy3/jdk.proxy3.$Proxy60.getDatanodeReport(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713)
at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
at jdk.proxy3/jdk.proxy3.$Proxy61.getDatanodeReport(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134)
at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869)
at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920)
at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848)
at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995)
at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594)
at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533)
at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30)
at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38)
at org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:74)
at org.broadinstitute.hellbender.utils.gcs.BucketUtilsUnitTest.testCopyAndDeleteHDFS(BucketUtilsUnitTest.java:208)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
at org.testng.TestRunner.privateRun(TestRunner.java:829)
at org.testng.TestRunner.run(TestRunner.java:602)
at org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at org.testng.SuiteRunner.run(SuiteRunner.java:330)
at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at org.testng.TestNG.runSuites(TestNG.java:1099)
at org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
22:41:40.325 ERROR DataNode - Initialization failed for Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:41005. Exiting.
java.io.IOException: DN shut down before block pool connected
at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.retrieveNamespaceInfo(BPServiceActor.java:276) ~[hadoop-hdfs-3.4.0.jar:?]
at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:306) ~[hadoop-hdfs-3.4.0.jar:?]
at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:889) [hadoop-hdfs-3.4.0.jar:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
22:41:40.326 WARN DataNode - Ending block pool service for: Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:41005
22:41:40.327 ERROR DataNode - Command processor encountered interrupt and exit.
22:41:40.327 WARN DataNode - Ending command processor service for: Thread[Command processor,5,main]
22:41:40.327 WARN DataSetLockManager - not open lock leak check func