Class org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSourceUnitTest

28

tests

2

failures

0

ignored

4.843s

duration

92%

successful

Failed tests

testCRAMReferenceFromHDFS

org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169)
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203)

	at app//org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584)
	at app//org.apache.hadoop.ipc.Client.call(Client.java:1529)
	at app//org.apache.hadoop.ipc.Client.call(Client.java:1426)
	at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
	at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
	at app/jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source)
	at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713)
	at app//org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
	at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
	at app/jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source)
	at app//org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:74)
	at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSourceUnitTest.testCRAMReferenceFromHDFS(ReadsSparkSourceUnitTest.java:231)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
	at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
	at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
	at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
	at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
	at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
	at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
	at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
	at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
	at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
	at app//org.testng.TestRunner.run(TestRunner.java:602)
	at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
	at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
	at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
	at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
	at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
	at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
	at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
	at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
	at app//org.testng.TestNG.runSuites(TestNG.java:1099)
	at app//org.testng.TestNG.run(TestNG.java:1067)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
	at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
	at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
	at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
	at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)

testReadFromFileAndHDFS

org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169)
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203)

	at app//org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584)
	at app//org.apache.hadoop.ipc.Client.call(Client.java:1529)
	at app//org.apache.hadoop.ipc.Client.call(Client.java:1426)
	at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
	at app//org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
	at app/jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source)
	at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713)
	at app//org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
	at app//org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
	at app//org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
	at app/jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source)
	at app//org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594)
	at app//org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38)
	at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:74)
	at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSourceUnitTest.testReadFromFileAndHDFS(ReadsSparkSourceUnitTest.java:203)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
	at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
	at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
	at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
	at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
	at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
	at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
	at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
	at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
	at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
	at app//org.testng.TestRunner.run(TestRunner.java:602)
	at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
	at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
	at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
	at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
	at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
	at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
	at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
	at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
	at app//org.testng.TestNG.runSuites(TestNG.java:1099)
	at app//org.testng.TestNG.run(TestNG.java:1067)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
	at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
	at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
	at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
	at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)

Tests

Test Duration Result
loadReadsNonExistentReference 0.001s passed
readsSparkSourceTestLenient[0](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam, null) 0.234s passed
readsSparkSourceTestLenient[1](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta) 0.213s passed
readsSparkSourceTestSilent[0](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam, null) 0.245s passed
readsSparkSourceTestSilent[1](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta) 0.195s passed
readsSparkSourceTestStrict[0](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam, null) 0.042s passed
readsSparkSourceTestStrict[1](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta) 0.050s passed
readsSparkSourceTest[0](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam, null) 0.512s passed
readsSparkSourceTest[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.bam, null) 0.342s passed
readsSparkSourceTest[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, null) 0.320s passed
readsSparkSourceTest[3](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta) 0.215s passed
readsSparkSourceTest[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/valid.cram, /home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/valid.fasta) 0.240s passed
readsSparkSourceUnknownHostTest 0.009s passed
shardedReadsSparkSourceTest[0](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, /home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.sharded.bam, null) 0.282s passed
testCRAMReferenceFromHDFS 0.496s failed
testCRAMReferenceRequired 0.001s passed
testCheckCRAMReference 0s passed
testGetHeader[0](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam, null) 0.060s passed
testGetHeader[1](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.bam, null) 0.044s passed
testGetHeader[2](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam, null) 0.052s passed
testGetHeader[3](/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram, /home/runner/work/gatk/gatk/src/test/resources/human_g1k_v37.chr17_1Mb.fasta) 0.033s passed
testGetHeader[4](/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/valid.cram, /home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/valid.fasta) 0.032s passed
testHeadersAreStripped 0.258s passed
testIntervals 0.177s passed
testIntervalsWithUnmapped 0.186s passed
testPartitionSizing 0.117s passed
testReadFromFileAndHDFS 0.487s failed
testReject2BitCRAMReference 0s passed

Standard error

22:32:07.529 INFO  MemoryStore - Block broadcast_34 stored as values in memory (estimated size 325.2 KiB, free 1919.7 MiB)
22:32:07.562 INFO  MemoryStore - Block broadcast_34_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.6 MiB)
22:32:07.563 INFO  BlockManagerInfo - Added broadcast_34_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:07.566 INFO  SparkContext - Created broadcast 34 from newAPIHadoopFile at PathSplitSource.java:96
22:32:07.653 INFO  MemoryStore - Block broadcast_35 stored as values in memory (estimated size 325.2 KiB, free 1919.3 MiB)
22:32:07.665 INFO  MemoryStore - Block broadcast_35_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.3 MiB)
22:32:07.665 INFO  BlockManagerInfo - Added broadcast_35_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:07.665 INFO  SparkContext - Created broadcast 35 from newAPIHadoopFile at PathSplitSource.java:96
22:32:07.708 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:07.708 INFO  DAGScheduler - Got job 20 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:07.709 INFO  DAGScheduler - Final stage: ResultStage 29 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:07.709 INFO  DAGScheduler - Parents of final stage: List()
22:32:07.709 INFO  DAGScheduler - Missing parents: List()
22:32:07.709 INFO  DAGScheduler - Submitting ResultStage 29 (ParallelCollectionRDD[69] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:07.710 INFO  MemoryStore - Block broadcast_36 stored as values in memory (estimated size 3.0 KiB, free 1919.3 MiB)
22:32:07.711 INFO  MemoryStore - Block broadcast_36_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1919.3 MiB)
22:32:07.711 INFO  BlockManagerInfo - Added broadcast_36_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.9 MiB)
22:32:07.711 INFO  SparkContext - Created broadcast 36 from broadcast at DAGScheduler.scala:1580
22:32:07.712 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 29 (ParallelCollectionRDD[69] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:07.712 INFO  TaskSchedulerImpl - Adding task set 29.0 with 4 tasks resource profile 0
22:32:07.714 INFO  TaskSetManager - Starting task 0.0 in stage 29.0 (TID 67) (localhost, executor driver, partition 0, PROCESS_LOCAL, 47080 bytes) 
22:32:07.716 INFO  TaskSetManager - Starting task 1.0 in stage 29.0 (TID 68) (localhost, executor driver, partition 1, PROCESS_LOCAL, 47194 bytes) 
22:32:07.717 INFO  TaskSetManager - Starting task 2.0 in stage 29.0 (TID 69) (localhost, executor driver, partition 2, PROCESS_LOCAL, 47105 bytes) 
22:32:07.718 INFO  TaskSetManager - Starting task 3.0 in stage 29.0 (TID 70) (localhost, executor driver, partition 3, PROCESS_LOCAL, 47530 bytes) 
22:32:07.718 INFO  Executor - Running task 3.0 in stage 29.0 (TID 70)
22:32:07.718 INFO  Executor - Running task 0.0 in stage 29.0 (TID 67)
22:32:07.718 INFO  Executor - Running task 2.0 in stage 29.0 (TID 69)
22:32:07.719 INFO  Executor - Running task 1.0 in stage 29.0 (TID 68)
22:32:07.724 INFO  Executor - Finished task 3.0 in stage 29.0 (TID 70). 40805 bytes result sent to driver
22:32:07.725 INFO  Executor - Finished task 1.0 in stage 29.0 (TID 68). 40469 bytes result sent to driver
22:32:07.727 INFO  Executor - Finished task 2.0 in stage 29.0 (TID 69). 40380 bytes result sent to driver
22:32:07.728 INFO  Executor - Finished task 0.0 in stage 29.0 (TID 67). 40398 bytes result sent to driver
22:32:07.729 INFO  TaskSetManager - Finished task 3.0 in stage 29.0 (TID 70) in 12 ms on localhost (executor driver) (1/4)
22:32:07.730 INFO  TaskSetManager - Finished task 1.0 in stage 29.0 (TID 68) in 15 ms on localhost (executor driver) (2/4)
22:32:07.730 INFO  TaskSetManager - Finished task 0.0 in stage 29.0 (TID 67) in 17 ms on localhost (executor driver) (3/4)
22:32:07.731 INFO  TaskSetManager - Finished task 2.0 in stage 29.0 (TID 69) in 15 ms on localhost (executor driver) (4/4)
22:32:07.731 INFO  TaskSchedulerImpl - Removed TaskSet 29.0, whose tasks have all completed, from pool 
22:32:07.731 INFO  DAGScheduler - ResultStage 29 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.022 s
22:32:07.731 INFO  DAGScheduler - Job 20 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:07.731 INFO  TaskSchedulerImpl - Killing all running tasks in stage 29: Stage finished
22:32:07.731 INFO  DAGScheduler - Job 20 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.023353 s
22:32:07.750 INFO  FileInputFormat - Total input files to process : 1
22:32:07.812 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:07.812 INFO  DAGScheduler - Got job 21 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:07.812 INFO  DAGScheduler - Final stage: ResultStage 30 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:07.812 INFO  DAGScheduler - Parents of final stage: List()
22:32:07.813 INFO  DAGScheduler - Missing parents: List()
22:32:07.813 INFO  DAGScheduler - Submitting ResultStage 30 (MapPartitionsRDD[76] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:07.851 INFO  MemoryStore - Block broadcast_37 stored as values in memory (estimated size 470.5 KiB, free 1918.8 MiB)
22:32:07.853 INFO  MemoryStore - Block broadcast_37_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1918.6 MiB)
22:32:07.853 INFO  BlockManagerInfo - Added broadcast_37_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.7 MiB)
22:32:07.854 INFO  SparkContext - Created broadcast 37 from broadcast at DAGScheduler.scala:1580
22:32:07.854 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 30 (MapPartitionsRDD[76] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:07.854 INFO  TaskSchedulerImpl - Adding task set 30.0 with 1 tasks resource profile 0
22:32:07.861 INFO  TaskSetManager - Starting task 0.0 in stage 30.0 (TID 71) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7838 bytes) 
22:32:07.861 INFO  Executor - Running task 0.0 in stage 30.0 (TID 71)
22:32:07.935 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam:0+57538
22:32:07.952 INFO  Executor - Finished task 0.0 in stage 30.0 (TID 71). 159615 bytes result sent to driver
22:32:07.954 INFO  TaskSetManager - Finished task 0.0 in stage 30.0 (TID 71) in 99 ms on localhost (executor driver) (1/1)
22:32:07.954 INFO  TaskSchedulerImpl - Removed TaskSet 30.0, whose tasks have all completed, from pool 
22:32:07.955 INFO  DAGScheduler - ResultStage 30 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.140 s
22:32:07.955 INFO  DAGScheduler - Job 21 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:07.955 INFO  TaskSchedulerImpl - Killing all running tasks in stage 30: Stage finished
22:32:07.955 INFO  DAGScheduler - Job 21 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.143400 s
22:32:07.965 INFO  MemoryStore - Block broadcast_38 stored as values in memory (estimated size 325.3 KiB, free 1918.3 MiB)
22:32:07.982 INFO  MemoryStore - Block broadcast_38_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.3 MiB)
22:32:07.983 INFO  BlockManagerInfo - Added broadcast_38_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:07.983 INFO  SparkContext - Created broadcast 38 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.033 INFO  MemoryStore - Block broadcast_39 stored as values in memory (estimated size 325.3 KiB, free 1917.9 MiB)
22:32:08.043 INFO  MemoryStore - Block broadcast_39_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1917.9 MiB)
22:32:08.043 INFO  BlockManagerInfo - Added broadcast_39_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.6 MiB)
22:32:08.043 INFO  SparkContext - Created broadcast 39 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.080 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:08.081 INFO  DAGScheduler - Got job 22 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:08.081 INFO  DAGScheduler - Final stage: ResultStage 31 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:08.081 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.081 INFO  DAGScheduler - Missing parents: List()
22:32:08.081 INFO  DAGScheduler - Submitting ResultStage 31 (ParallelCollectionRDD[82] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:08.082 INFO  MemoryStore - Block broadcast_40 stored as values in memory (estimated size 3.0 KiB, free 1917.9 MiB)
22:32:08.083 INFO  MemoryStore - Block broadcast_40_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1917.9 MiB)
22:32:08.083 INFO  BlockManagerInfo - Added broadcast_40_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.6 MiB)
22:32:08.083 INFO  SparkContext - Created broadcast 40 from broadcast at DAGScheduler.scala:1580
22:32:08.084 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 31 (ParallelCollectionRDD[82] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:08.084 INFO  TaskSchedulerImpl - Adding task set 31.0 with 4 tasks resource profile 0
22:32:08.086 INFO  TaskSetManager - Starting task 0.0 in stage 31.0 (TID 72) (localhost, executor driver, partition 0, PROCESS_LOCAL, 169886 bytes) 
22:32:08.088 INFO  TaskSetManager - Starting task 1.0 in stage 31.0 (TID 73) (localhost, executor driver, partition 1, PROCESS_LOCAL, 170093 bytes) 
22:32:08.090 INFO  TaskSetManager - Starting task 2.0 in stage 31.0 (TID 74) (localhost, executor driver, partition 2, PROCESS_LOCAL, 170081 bytes) 
22:32:08.091 INFO  TaskSetManager - Starting task 3.0 in stage 31.0 (TID 75) (localhost, executor driver, partition 3, PROCESS_LOCAL, 169419 bytes) 
22:32:08.092 INFO  Executor - Running task 1.0 in stage 31.0 (TID 73)
22:32:08.092 INFO  Executor - Running task 0.0 in stage 31.0 (TID 72)
22:32:08.092 INFO  Executor - Running task 3.0 in stage 31.0 (TID 75)
22:32:08.092 INFO  Executor - Running task 2.0 in stage 31.0 (TID 74)
22:32:08.098 INFO  Executor - Finished task 2.0 in stage 31.0 (TID 74). 163399 bytes result sent to driver
22:32:08.099 INFO  Executor - Finished task 3.0 in stage 31.0 (TID 75). 162737 bytes result sent to driver
22:32:08.101 INFO  TaskSetManager - Finished task 3.0 in stage 31.0 (TID 75) in 11 ms on localhost (executor driver) (1/4)
22:32:08.102 INFO  Executor - Finished task 1.0 in stage 31.0 (TID 73). 163454 bytes result sent to driver
22:32:08.103 INFO  TaskSetManager - Finished task 2.0 in stage 31.0 (TID 74) in 15 ms on localhost (executor driver) (2/4)
22:32:08.105 INFO  TaskSetManager - Finished task 1.0 in stage 31.0 (TID 73) in 19 ms on localhost (executor driver) (3/4)
22:32:08.108 INFO  Executor - Finished task 0.0 in stage 31.0 (TID 72). 163204 bytes result sent to driver
22:32:08.109 INFO  TaskSetManager - Finished task 0.0 in stage 31.0 (TID 72) in 25 ms on localhost (executor driver) (4/4)
22:32:08.109 INFO  TaskSchedulerImpl - Removed TaskSet 31.0, whose tasks have all completed, from pool 
22:32:08.109 INFO  DAGScheduler - ResultStage 31 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.027 s
22:32:08.109 INFO  DAGScheduler - Job 22 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.109 INFO  TaskSchedulerImpl - Killing all running tasks in stage 31: Stage finished
22:32:08.109 INFO  DAGScheduler - Job 22 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.028678 s
22:32:08.112 INFO  FileInputFormat - Total input files to process : 1
22:32:08.168 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:08.169 INFO  DAGScheduler - Got job 23 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:08.169 INFO  DAGScheduler - Final stage: ResultStage 32 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:08.169 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.169 INFO  DAGScheduler - Missing parents: List()
22:32:08.169 INFO  DAGScheduler - Submitting ResultStage 32 (MapPartitionsRDD[89] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:08.199 INFO  MemoryStore - Block broadcast_41 stored as values in memory (estimated size 470.5 KiB, free 1917.4 MiB)
22:32:08.200 INFO  MemoryStore - Block broadcast_41_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1917.3 MiB)
22:32:08.201 INFO  BlockManagerInfo - Added broadcast_41_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.4 MiB)
22:32:08.201 INFO  SparkContext - Created broadcast 41 from broadcast at DAGScheduler.scala:1580
22:32:08.201 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 32 (MapPartitionsRDD[89] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:08.201 INFO  TaskSchedulerImpl - Adding task set 32.0 with 1 tasks resource profile 0
22:32:08.202 INFO  TaskSetManager - Starting task 0.0 in stage 32.0 (TID 76) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7883 bytes) 
22:32:08.203 INFO  Executor - Running task 0.0 in stage 32.0 (TID 76)
22:32:08.253 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.bam:0+221635
22:32:08.291 INFO  Executor - Finished task 0.0 in stage 32.0 (TID 76). 650184 bytes result sent to driver
22:32:08.296 INFO  TaskSetManager - Finished task 0.0 in stage 32.0 (TID 76) in 94 ms on localhost (executor driver) (1/1)
22:32:08.296 INFO  TaskSchedulerImpl - Removed TaskSet 32.0, whose tasks have all completed, from pool 
22:32:08.296 INFO  DAGScheduler - ResultStage 32 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.126 s
22:32:08.296 INFO  DAGScheduler - Job 23 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.297 INFO  TaskSchedulerImpl - Killing all running tasks in stage 32: Stage finished
22:32:08.297 INFO  DAGScheduler - Job 23 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.128892 s
22:32:08.306 INFO  MemoryStore - Block broadcast_42 stored as values in memory (estimated size 325.3 KiB, free 1916.9 MiB)
22:32:08.317 INFO  MemoryStore - Block broadcast_42_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1916.9 MiB)
22:32:08.318 INFO  BlockManagerInfo - Added broadcast_42_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.4 MiB)
22:32:08.318 INFO  SparkContext - Created broadcast 42 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.371 INFO  BlockManagerInfo - Removed broadcast_36_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.4 MiB)
22:32:08.372 INFO  BlockManagerInfo - Removed broadcast_42_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1919.4 MiB)
22:32:08.374 INFO  BlockManagerInfo - Removed broadcast_39_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.5 MiB)
22:32:08.376 INFO  BlockManagerInfo - Removed broadcast_40_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.5 MiB)
22:32:08.379 INFO  BlockManagerInfo - Removed broadcast_38_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:08.381 INFO  BlockManagerInfo - Removed broadcast_35_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:08.381 INFO  MemoryStore - Block broadcast_43 stored as values in memory (estimated size 325.3 KiB, free 1918.1 MiB)
22:32:08.382 INFO  BlockManagerInfo - Removed broadcast_37_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.8 MiB)
22:32:08.384 INFO  BlockManagerInfo - Removed broadcast_34_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.8 MiB)
22:32:08.385 INFO  BlockManagerInfo - Removed broadcast_41_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1920.0 MiB)
22:32:08.393 INFO  MemoryStore - Block broadcast_43_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1919.6 MiB)
22:32:08.393 INFO  BlockManagerInfo - Added broadcast_43_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.9 MiB)
22:32:08.394 INFO  SparkContext - Created broadcast 43 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.434 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:08.435 INFO  DAGScheduler - Got job 24 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:08.435 INFO  DAGScheduler - Final stage: ResultStage 33 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:08.435 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.435 INFO  DAGScheduler - Missing parents: List()
22:32:08.435 INFO  DAGScheduler - Submitting ResultStage 33 (ParallelCollectionRDD[95] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:08.437 INFO  MemoryStore - Block broadcast_44 stored as values in memory (estimated size 3.0 KiB, free 1919.6 MiB)
22:32:08.438 INFO  MemoryStore - Block broadcast_44_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1919.6 MiB)
22:32:08.438 INFO  BlockManagerInfo - Added broadcast_44_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.9 MiB)
22:32:08.438 INFO  SparkContext - Created broadcast 44 from broadcast at DAGScheduler.scala:1580
22:32:08.439 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 33 (ParallelCollectionRDD[95] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:08.439 INFO  TaskSchedulerImpl - Adding task set 33.0 with 4 tasks resource profile 0
22:32:08.440 INFO  TaskSetManager - Starting task 0.0 in stage 33.0 (TID 77) (localhost, executor driver, partition 0, PROCESS_LOCAL, 169886 bytes) 
22:32:08.441 INFO  TaskSetManager - Starting task 1.0 in stage 33.0 (TID 78) (localhost, executor driver, partition 1, PROCESS_LOCAL, 170093 bytes) 
22:32:08.442 INFO  TaskSetManager - Starting task 2.0 in stage 33.0 (TID 79) (localhost, executor driver, partition 2, PROCESS_LOCAL, 170081 bytes) 
22:32:08.443 INFO  TaskSetManager - Starting task 3.0 in stage 33.0 (TID 80) (localhost, executor driver, partition 3, PROCESS_LOCAL, 169419 bytes) 
22:32:08.443 INFO  Executor - Running task 0.0 in stage 33.0 (TID 77)
22:32:08.443 INFO  Executor - Running task 2.0 in stage 33.0 (TID 79)
22:32:08.443 INFO  Executor - Running task 1.0 in stage 33.0 (TID 78)
22:32:08.443 INFO  Executor - Running task 3.0 in stage 33.0 (TID 80)
22:32:08.450 INFO  Executor - Finished task 2.0 in stage 33.0 (TID 79). 163399 bytes result sent to driver
22:32:08.450 INFO  Executor - Finished task 3.0 in stage 33.0 (TID 80). 162737 bytes result sent to driver
22:32:08.450 INFO  Executor - Finished task 1.0 in stage 33.0 (TID 78). 163411 bytes result sent to driver
22:32:08.451 INFO  TaskSetManager - Finished task 2.0 in stage 33.0 (TID 79) in 10 ms on localhost (executor driver) (1/4)
22:32:08.453 INFO  Executor - Finished task 0.0 in stage 33.0 (TID 77). 163204 bytes result sent to driver
22:32:08.453 INFO  TaskSetManager - Finished task 1.0 in stage 33.0 (TID 78) in 13 ms on localhost (executor driver) (2/4)
22:32:08.454 INFO  TaskSetManager - Finished task 3.0 in stage 33.0 (TID 80) in 12 ms on localhost (executor driver) (3/4)
22:32:08.455 INFO  TaskSetManager - Finished task 0.0 in stage 33.0 (TID 77) in 16 ms on localhost (executor driver) (4/4)
22:32:08.455 INFO  TaskSchedulerImpl - Removed TaskSet 33.0, whose tasks have all completed, from pool 
22:32:08.455 INFO  DAGScheduler - ResultStage 33 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.019 s
22:32:08.455 INFO  DAGScheduler - Job 24 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.455 INFO  TaskSchedulerImpl - Killing all running tasks in stage 33: Stage finished
22:32:08.455 INFO  DAGScheduler - Job 24 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.020924 s
22:32:08.458 INFO  FileInputFormat - Total input files to process : 1
22:32:08.513 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:08.514 INFO  DAGScheduler - Got job 25 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:08.514 INFO  DAGScheduler - Final stage: ResultStage 34 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:08.514 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.514 INFO  DAGScheduler - Missing parents: List()
22:32:08.514 INFO  DAGScheduler - Submitting ResultStage 34 (MapPartitionsRDD[102] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:08.536 INFO  MemoryStore - Block broadcast_45 stored as values in memory (estimated size 470.5 KiB, free 1919.2 MiB)
22:32:08.538 INFO  MemoryStore - Block broadcast_45_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1919.0 MiB)
22:32:08.538 INFO  BlockManagerInfo - Added broadcast_45_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.8 MiB)
22:32:08.539 INFO  SparkContext - Created broadcast 45 from broadcast at DAGScheduler.scala:1580
22:32:08.539 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 34 (MapPartitionsRDD[102] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:08.539 INFO  TaskSchedulerImpl - Adding task set 34.0 with 1 tasks resource profile 0
22:32:08.540 INFO  TaskSetManager - Starting task 0.0 in stage 34.0 (TID 81) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7912 bytes) 
22:32:08.540 INFO  Executor - Running task 0.0 in stage 34.0 (TID 81)
22:32:08.594 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam:0+216896
22:32:08.614 INFO  Executor - Finished task 0.0 in stage 34.0 (TID 81). 650184 bytes result sent to driver
22:32:08.617 INFO  TaskSetManager - Finished task 0.0 in stage 34.0 (TID 81) in 77 ms on localhost (executor driver) (1/1)
22:32:08.617 INFO  TaskSchedulerImpl - Removed TaskSet 34.0, whose tasks have all completed, from pool 
22:32:08.617 INFO  DAGScheduler - ResultStage 34 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.102 s
22:32:08.617 INFO  DAGScheduler - Job 25 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.618 INFO  TaskSchedulerImpl - Killing all running tasks in stage 34: Stage finished
22:32:08.618 INFO  DAGScheduler - Job 25 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.104110 s
22:32:08.627 INFO  MemoryStore - Block broadcast_46 stored as values in memory (estimated size 536.0 B, free 1919.0 MiB)
22:32:08.628 INFO  MemoryStore - Block broadcast_46_piece0 stored as bytes in memory (estimated size 187.0 B, free 1919.0 MiB)
22:32:08.628 INFO  BlockManagerInfo - Added broadcast_46_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.8 MiB)
22:32:08.628 INFO  SparkContext - Created broadcast 46 from broadcast at CramSource.java:114
22:32:08.632 INFO  MemoryStore - Block broadcast_47 stored as values in memory (estimated size 325.2 KiB, free 1918.7 MiB)
22:32:08.644 INFO  MemoryStore - Block broadcast_47_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.6 MiB)
22:32:08.645 INFO  BlockManagerInfo - Added broadcast_47_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:08.645 INFO  SparkContext - Created broadcast 47 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.674 INFO  MemoryStore - Block broadcast_48 stored as values in memory (estimated size 536.0 B, free 1918.6 MiB)
22:32:08.675 INFO  MemoryStore - Block broadcast_48_piece0 stored as bytes in memory (estimated size 187.0 B, free 1918.6 MiB)
22:32:08.676 INFO  BlockManagerInfo - Added broadcast_48_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.7 MiB)
22:32:08.676 INFO  SparkContext - Created broadcast 48 from broadcast at CramSource.java:114
22:32:08.679 INFO  MemoryStore - Block broadcast_49 stored as values in memory (estimated size 325.2 KiB, free 1918.3 MiB)
22:32:08.686 INFO  MemoryStore - Block broadcast_49_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.3 MiB)
22:32:08.686 INFO  BlockManagerInfo - Added broadcast_49_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:08.686 INFO  SparkContext - Created broadcast 49 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.706 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:08.707 INFO  DAGScheduler - Got job 26 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:08.707 INFO  DAGScheduler - Final stage: ResultStage 35 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:08.707 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.707 INFO  DAGScheduler - Missing parents: List()
22:32:08.707 INFO  DAGScheduler - Submitting ResultStage 35 (ParallelCollectionRDD[107] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:08.708 INFO  MemoryStore - Block broadcast_50 stored as values in memory (estimated size 3.0 KiB, free 1918.2 MiB)
22:32:08.709 INFO  MemoryStore - Block broadcast_50_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1918.2 MiB)
22:32:08.709 INFO  BlockManagerInfo - Added broadcast_50_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.7 MiB)
22:32:08.709 INFO  SparkContext - Created broadcast 50 from broadcast at DAGScheduler.scala:1580
22:32:08.710 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 35 (ParallelCollectionRDD[107] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:08.710 INFO  TaskSchedulerImpl - Adding task set 35.0 with 4 tasks resource profile 0
22:32:08.713 INFO  TaskSetManager - Starting task 0.0 in stage 35.0 (TID 82) (localhost, executor driver, partition 0, PROCESS_LOCAL, 45675 bytes) 
22:32:08.715 INFO  TaskSetManager - Starting task 1.0 in stage 35.0 (TID 83) (localhost, executor driver, partition 1, PROCESS_LOCAL, 45855 bytes) 
22:32:08.716 INFO  TaskSetManager - Starting task 2.0 in stage 35.0 (TID 84) (localhost, executor driver, partition 2, PROCESS_LOCAL, 45795 bytes) 
22:32:08.717 INFO  TaskSetManager - Starting task 3.0 in stage 35.0 (TID 85) (localhost, executor driver, partition 3, PROCESS_LOCAL, 46068 bytes) 
22:32:08.718 INFO  Executor - Running task 2.0 in stage 35.0 (TID 84)
22:32:08.718 INFO  Executor - Running task 1.0 in stage 35.0 (TID 83)
22:32:08.718 INFO  Executor - Running task 3.0 in stage 35.0 (TID 85)
22:32:08.718 INFO  Executor - Running task 0.0 in stage 35.0 (TID 82)
22:32:08.721 INFO  Executor - Finished task 1.0 in stage 35.0 (TID 83). 39130 bytes result sent to driver
22:32:08.721 INFO  Executor - Finished task 3.0 in stage 35.0 (TID 85). 39343 bytes result sent to driver
22:32:08.721 INFO  Executor - Finished task 2.0 in stage 35.0 (TID 84). 39070 bytes result sent to driver
22:32:08.722 INFO  TaskSetManager - Finished task 1.0 in stage 35.0 (TID 83) in 9 ms on localhost (executor driver) (1/4)
22:32:08.722 INFO  Executor - Finished task 0.0 in stage 35.0 (TID 82). 38950 bytes result sent to driver
22:32:08.723 INFO  TaskSetManager - Finished task 3.0 in stage 35.0 (TID 85) in 7 ms on localhost (executor driver) (2/4)
22:32:08.724 INFO  TaskSetManager - Finished task 2.0 in stage 35.0 (TID 84) in 9 ms on localhost (executor driver) (3/4)
22:32:08.724 INFO  TaskSetManager - Finished task 0.0 in stage 35.0 (TID 82) in 14 ms on localhost (executor driver) (4/4)
22:32:08.724 INFO  DAGScheduler - ResultStage 35 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.016 s
22:32:08.724 INFO  DAGScheduler - Job 26 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.725 INFO  TaskSchedulerImpl - Removed TaskSet 35.0, whose tasks have all completed, from pool 
22:32:08.725 INFO  TaskSchedulerImpl - Killing all running tasks in stage 35: Stage finished
22:32:08.725 INFO  DAGScheduler - Job 26 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.018497 s
22:32:08.727 INFO  FileInputFormat - Total input files to process : 1
22:32:08.756 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:08.756 INFO  DAGScheduler - Got job 27 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:08.757 INFO  DAGScheduler - Final stage: ResultStage 36 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:08.757 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.757 INFO  DAGScheduler - Missing parents: List()
22:32:08.757 INFO  DAGScheduler - Submitting ResultStage 36 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:08.771 INFO  MemoryStore - Block broadcast_51 stored as values in memory (estimated size 316.4 KiB, free 1917.9 MiB)
22:32:08.773 INFO  MemoryStore - Block broadcast_51_piece0 stored as bytes in memory (estimated size 115.5 KiB, free 1917.8 MiB)
22:32:08.773 INFO  BlockManagerInfo - Added broadcast_51_piece0 in memory on localhost:35275 (size: 115.5 KiB, free: 1919.6 MiB)
22:32:08.774 INFO  SparkContext - Created broadcast 51 from broadcast at DAGScheduler.scala:1580
22:32:08.774 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 36 (MapPartitionsRDD[113] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:08.774 INFO  TaskSchedulerImpl - Adding task set 36.0 with 1 tasks resource profile 0
22:32:08.775 INFO  TaskSetManager - Starting task 0.0 in stage 36.0 (TID 86) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7839 bytes) 
22:32:08.775 INFO  Executor - Running task 0.0 in stage 36.0 (TID 86)
22:32:08.810 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram:0+50619
22:32:08.831 INFO  Executor - Finished task 0.0 in stage 36.0 (TID 86). 154101 bytes result sent to driver
22:32:08.833 INFO  TaskSetManager - Finished task 0.0 in stage 36.0 (TID 86) in 58 ms on localhost (executor driver) (1/1)
22:32:08.833 INFO  TaskSchedulerImpl - Removed TaskSet 36.0, whose tasks have all completed, from pool 
22:32:08.833 INFO  DAGScheduler - ResultStage 36 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.076 s
22:32:08.833 INFO  DAGScheduler - Job 27 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.833 INFO  TaskSchedulerImpl - Killing all running tasks in stage 36: Stage finished
22:32:08.833 INFO  DAGScheduler - Job 27 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.077387 s
22:32:08.841 INFO  MemoryStore - Block broadcast_52 stored as values in memory (estimated size 608.0 B, free 1917.8 MiB)
22:32:08.842 INFO  MemoryStore - Block broadcast_52_piece0 stored as bytes in memory (estimated size 206.0 B, free 1917.8 MiB)
22:32:08.842 INFO  BlockManagerInfo - Added broadcast_52_piece0 in memory on localhost:35275 (size: 206.0 B, free: 1919.6 MiB)
22:32:08.843 INFO  SparkContext - Created broadcast 52 from broadcast at CramSource.java:114
22:32:08.846 INFO  MemoryStore - Block broadcast_53 stored as values in memory (estimated size 325.3 KiB, free 1917.5 MiB)
22:32:08.854 INFO  MemoryStore - Block broadcast_53_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1917.5 MiB)
22:32:08.854 INFO  BlockManagerInfo - Added broadcast_53_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.5 MiB)
22:32:08.854 INFO  SparkContext - Created broadcast 53 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.892 INFO  BlockManagerInfo - Removed broadcast_44_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.5 MiB)
22:32:08.895 INFO  BlockManagerInfo - Removed broadcast_45_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.7 MiB)
22:32:08.901 INFO  BlockManagerInfo - Removed broadcast_47_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.7 MiB)
22:32:08.902 INFO  BlockManagerInfo - Removed broadcast_43_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1919.8 MiB)
22:32:08.903 INFO  BlockManagerInfo - Removed broadcast_51_piece0 on localhost:35275 in memory (size: 115.5 KiB, free: 1919.9 MiB)
22:32:08.907 INFO  BlockManagerInfo - Removed broadcast_48_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.9 MiB)
22:32:08.908 INFO  BlockManagerInfo - Removed broadcast_50_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.9 MiB)
22:32:08.909 INFO  BlockManagerInfo - Removed broadcast_49_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:08.909 INFO  MemoryStore - Block broadcast_54 stored as values in memory (estimated size 608.0 B, free 1919.6 MiB)
22:32:08.911 INFO  MemoryStore - Block broadcast_54_piece0 stored as bytes in memory (estimated size 206.0 B, free 1919.6 MiB)
22:32:08.911 INFO  BlockManagerInfo - Added broadcast_54_piece0 in memory on localhost:35275 (size: 206.0 B, free: 1919.9 MiB)
22:32:08.911 INFO  SparkContext - Created broadcast 54 from broadcast at CramSource.java:114
22:32:08.912 INFO  BlockManagerInfo - Removed broadcast_46_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.9 MiB)
22:32:08.916 INFO  MemoryStore - Block broadcast_55 stored as values in memory (estimated size 325.3 KiB, free 1919.3 MiB)
22:32:08.927 INFO  MemoryStore - Block broadcast_55_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.3 MiB)
22:32:08.928 INFO  BlockManagerInfo - Added broadcast_55_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:08.928 INFO  SparkContext - Created broadcast 55 from newAPIHadoopFile at PathSplitSource.java:96
22:32:08.971 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:08.972 INFO  DAGScheduler - Got job 28 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:08.972 INFO  DAGScheduler - Final stage: ResultStage 37 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:08.972 INFO  DAGScheduler - Parents of final stage: List()
22:32:08.972 INFO  DAGScheduler - Missing parents: List()
22:32:08.972 INFO  DAGScheduler - Submitting ResultStage 37 (ParallelCollectionRDD[118] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:08.973 INFO  MemoryStore - Block broadcast_56 stored as values in memory (estimated size 3.0 KiB, free 1919.3 MiB)
22:32:08.974 INFO  MemoryStore - Block broadcast_56_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1919.3 MiB)
22:32:08.974 INFO  BlockManagerInfo - Added broadcast_56_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.9 MiB)
22:32:08.974 INFO  SparkContext - Created broadcast 56 from broadcast at DAGScheduler.scala:1580
22:32:08.975 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 37 (ParallelCollectionRDD[118] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:08.975 INFO  TaskSchedulerImpl - Adding task set 37.0 with 4 tasks resource profile 0
22:32:08.976 INFO  TaskSetManager - Starting task 0.0 in stage 37.0 (TID 87) (localhost, executor driver, partition 0, PROCESS_LOCAL, 8303 bytes) 
22:32:08.976 INFO  TaskSetManager - Starting task 1.0 in stage 37.0 (TID 88) (localhost, executor driver, partition 1, PROCESS_LOCAL, 8292 bytes) 
22:32:08.977 INFO  TaskSetManager - Starting task 2.0 in stage 37.0 (TID 89) (localhost, executor driver, partition 2, PROCESS_LOCAL, 8292 bytes) 
22:32:08.977 INFO  TaskSetManager - Starting task 3.0 in stage 37.0 (TID 90) (localhost, executor driver, partition 3, PROCESS_LOCAL, 8184 bytes) 
22:32:08.977 INFO  Executor - Running task 0.0 in stage 37.0 (TID 87)
22:32:08.977 INFO  Executor - Running task 1.0 in stage 37.0 (TID 88)
22:32:08.978 INFO  Executor - Running task 2.0 in stage 37.0 (TID 89)
22:32:08.979 INFO  Executor - Running task 3.0 in stage 37.0 (TID 90)
22:32:08.981 INFO  Executor - Finished task 3.0 in stage 37.0 (TID 90). 1416 bytes result sent to driver
22:32:08.981 INFO  Executor - Finished task 1.0 in stage 37.0 (TID 88). 1567 bytes result sent to driver
22:32:08.981 INFO  TaskSetManager - Finished task 3.0 in stage 37.0 (TID 90) in 4 ms on localhost (executor driver) (1/4)
22:32:08.982 INFO  Executor - Finished task 0.0 in stage 37.0 (TID 87). 1578 bytes result sent to driver
22:32:08.983 INFO  TaskSetManager - Finished task 1.0 in stage 37.0 (TID 88) in 7 ms on localhost (executor driver) (2/4)
22:32:08.983 INFO  Executor - Finished task 2.0 in stage 37.0 (TID 89). 1567 bytes result sent to driver
22:32:08.983 INFO  TaskSetManager - Finished task 0.0 in stage 37.0 (TID 87) in 7 ms on localhost (executor driver) (3/4)
22:32:08.983 INFO  TaskSetManager - Finished task 2.0 in stage 37.0 (TID 89) in 6 ms on localhost (executor driver) (4/4)
22:32:08.983 INFO  TaskSchedulerImpl - Removed TaskSet 37.0, whose tasks have all completed, from pool 
22:32:08.983 INFO  DAGScheduler - ResultStage 37 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.010 s
22:32:08.984 INFO  DAGScheduler - Job 28 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:08.984 INFO  TaskSchedulerImpl - Killing all running tasks in stage 37: Stage finished
22:32:08.984 INFO  DAGScheduler - Job 28 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.012670 s
22:32:08.985 INFO  FileInputFormat - Total input files to process : 1
22:32:09.015 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:09.015 INFO  DAGScheduler - Got job 29 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:09.016 INFO  DAGScheduler - Final stage: ResultStage 38 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:09.016 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.016 INFO  DAGScheduler - Missing parents: List()
22:32:09.016 INFO  DAGScheduler - Submitting ResultStage 38 (MapPartitionsRDD[124] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:09.030 INFO  MemoryStore - Block broadcast_57 stored as values in memory (estimated size 316.4 KiB, free 1918.9 MiB)
22:32:09.031 INFO  MemoryStore - Block broadcast_57_piece0 stored as bytes in memory (estimated size 115.4 KiB, free 1918.8 MiB)
22:32:09.032 INFO  BlockManagerInfo - Added broadcast_57_piece0 in memory on localhost:35275 (size: 115.4 KiB, free: 1919.8 MiB)
22:32:09.032 INFO  SparkContext - Created broadcast 57 from broadcast at DAGScheduler.scala:1580
22:32:09.032 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 38 (MapPartitionsRDD[124] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:09.032 INFO  TaskSchedulerImpl - Adding task set 38.0 with 1 tasks resource profile 0
22:32:09.033 INFO  TaskSetManager - Starting task 0.0 in stage 38.0 (TID 91) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7851 bytes) 
22:32:09.034 INFO  Executor - Running task 0.0 in stage 38.0 (TID 91)
22:32:09.067 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/valid.cram:0+13330
22:32:09.072 INFO  Executor - Finished task 0.0 in stage 38.0 (TID 91). 3736 bytes result sent to driver
22:32:09.073 INFO  TaskSetManager - Finished task 0.0 in stage 38.0 (TID 91) in 40 ms on localhost (executor driver) (1/1)
22:32:09.073 INFO  TaskSchedulerImpl - Removed TaskSet 38.0, whose tasks have all completed, from pool 
22:32:09.073 INFO  DAGScheduler - ResultStage 38 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.057 s
22:32:09.074 INFO  DAGScheduler - Job 29 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.074 INFO  TaskSchedulerImpl - Killing all running tasks in stage 38: Stage finished
22:32:09.074 INFO  DAGScheduler - Job 29 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.058852 s
22:32:09.079 INFO  MemoryStore - Block broadcast_58 stored as values in memory (estimated size 325.2 KiB, free 1918.5 MiB)
22:32:09.086 INFO  MemoryStore - Block broadcast_58_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.5 MiB)
22:32:09.087 INFO  BlockManagerInfo - Added broadcast_58_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:09.087 INFO  SparkContext - Created broadcast 58 from newAPIHadoopFile at PathSplitSource.java:96
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, The unaligned mate start position is 69400, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, The unaligned mate start position is 69471, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, The unaligned mate start position is 69569, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, The unaligned mate start position is 69592, should be 0
22:32:09.120 INFO  MemoryStore - Block broadcast_59 stored as values in memory (estimated size 325.2 KiB, free 1918.1 MiB)
22:32:09.132 INFO  MemoryStore - Block broadcast_59_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.1 MiB)
22:32:09.133 INFO  BlockManagerInfo - Added broadcast_59_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:09.133 INFO  SparkContext - Created broadcast 59 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.163 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:09.163 INFO  DAGScheduler - Got job 30 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:09.163 INFO  DAGScheduler - Final stage: ResultStage 39 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:09.163 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.163 INFO  DAGScheduler - Missing parents: List()
22:32:09.163 INFO  DAGScheduler - Submitting ResultStage 39 (ParallelCollectionRDD[130] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:09.164 INFO  MemoryStore - Block broadcast_60 stored as values in memory (estimated size 3.0 KiB, free 1918.1 MiB)
22:32:09.165 INFO  MemoryStore - Block broadcast_60_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1918.1 MiB)
22:32:09.165 INFO  BlockManagerInfo - Added broadcast_60_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.7 MiB)
22:32:09.165 INFO  SparkContext - Created broadcast 60 from broadcast at DAGScheduler.scala:1580
22:32:09.166 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 39 (ParallelCollectionRDD[130] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:09.166 INFO  TaskSchedulerImpl - Adding task set 39.0 with 4 tasks resource profile 0
22:32:09.167 INFO  TaskSetManager - Starting task 0.0 in stage 39.0 (TID 92) (localhost, executor driver, partition 0, PROCESS_LOCAL, 47080 bytes) 
22:32:09.167 INFO  TaskSetManager - Starting task 1.0 in stage 39.0 (TID 93) (localhost, executor driver, partition 1, PROCESS_LOCAL, 47194 bytes) 
22:32:09.168 INFO  TaskSetManager - Starting task 2.0 in stage 39.0 (TID 94) (localhost, executor driver, partition 2, PROCESS_LOCAL, 47105 bytes) 
22:32:09.168 INFO  TaskSetManager - Starting task 3.0 in stage 39.0 (TID 95) (localhost, executor driver, partition 3, PROCESS_LOCAL, 47530 bytes) 
22:32:09.169 INFO  Executor - Running task 1.0 in stage 39.0 (TID 93)
22:32:09.170 INFO  Executor - Running task 0.0 in stage 39.0 (TID 92)
22:32:09.170 INFO  Executor - Running task 2.0 in stage 39.0 (TID 94)
22:32:09.171 INFO  Executor - Finished task 1.0 in stage 39.0 (TID 93). 40469 bytes result sent to driver
22:32:09.172 INFO  Executor - Finished task 0.0 in stage 39.0 (TID 92). 40355 bytes result sent to driver
22:32:09.172 INFO  Executor - Finished task 2.0 in stage 39.0 (TID 94). 40380 bytes result sent to driver
22:32:09.173 INFO  Executor - Running task 3.0 in stage 39.0 (TID 95)
22:32:09.174 INFO  TaskSetManager - Finished task 1.0 in stage 39.0 (TID 93) in 7 ms on localhost (executor driver) (1/4)
22:32:09.174 INFO  TaskSetManager - Finished task 0.0 in stage 39.0 (TID 92) in 8 ms on localhost (executor driver) (2/4)
22:32:09.175 INFO  TaskSetManager - Finished task 2.0 in stage 39.0 (TID 94) in 8 ms on localhost (executor driver) (3/4)
22:32:09.175 INFO  Executor - Finished task 3.0 in stage 39.0 (TID 95). 40805 bytes result sent to driver
22:32:09.176 INFO  TaskSetManager - Finished task 3.0 in stage 39.0 (TID 95) in 8 ms on localhost (executor driver) (4/4)
22:32:09.176 INFO  TaskSchedulerImpl - Removed TaskSet 39.0, whose tasks have all completed, from pool 
22:32:09.176 INFO  DAGScheduler - ResultStage 39 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.012 s
22:32:09.177 INFO  DAGScheduler - Job 30 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.177 INFO  TaskSchedulerImpl - Killing all running tasks in stage 39: Stage finished
22:32:09.177 INFO  DAGScheduler - Job 30 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.013990 s
22:32:09.178 INFO  FileInputFormat - Total input files to process : 1
22:32:09.221 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:09.221 INFO  DAGScheduler - Got job 31 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:09.221 INFO  DAGScheduler - Final stage: ResultStage 40 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:09.221 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.222 INFO  DAGScheduler - Missing parents: List()
22:32:09.222 INFO  DAGScheduler - Submitting ResultStage 40 (MapPartitionsRDD[137] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:09.242 INFO  MemoryStore - Block broadcast_61 stored as values in memory (estimated size 470.5 KiB, free 1917.6 MiB)
22:32:09.244 INFO  MemoryStore - Block broadcast_61_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1917.5 MiB)
22:32:09.244 INFO  BlockManagerInfo - Added broadcast_61_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.5 MiB)
22:32:09.245 INFO  SparkContext - Created broadcast 61 from broadcast at DAGScheduler.scala:1580
22:32:09.245 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 40 (MapPartitionsRDD[137] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:09.245 INFO  TaskSchedulerImpl - Adding task set 40.0 with 1 tasks resource profile 0
22:32:09.246 INFO  TaskSetManager - Starting task 0.0 in stage 40.0 (TID 96) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7838 bytes) 
22:32:09.246 INFO  Executor - Running task 0.0 in stage 40.0 (TID 96)
22:32:09.297 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam:0+57538
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 130, Read name 809R9ABXX101220:5:6:17918:145992, The unaligned mate start position is 69400, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 275, Read name 809R9ABXX101220:5:8:7119:101555, The unaligned mate start position is 69471, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 419, Read name 809R9ABXX101220:5:48:20204:50350, The unaligned mate start position is 69569, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 440, Read name 809R9ABXX101220:5:44:13776:49360, The unaligned mate start position is 69592, should be 0
22:32:09.306 INFO  Executor - Finished task 0.0 in stage 40.0 (TID 96). 159615 bytes result sent to driver
22:32:09.308 INFO  TaskSetManager - Finished task 0.0 in stage 40.0 (TID 96) in 62 ms on localhost (executor driver) (1/1)
22:32:09.308 INFO  TaskSchedulerImpl - Removed TaskSet 40.0, whose tasks have all completed, from pool 
22:32:09.308 INFO  DAGScheduler - ResultStage 40 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.086 s
22:32:09.308 INFO  DAGScheduler - Job 31 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.308 INFO  TaskSchedulerImpl - Killing all running tasks in stage 40: Stage finished
22:32:09.308 INFO  DAGScheduler - Job 31 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.087602 s
22:32:09.314 INFO  MemoryStore - Block broadcast_62 stored as values in memory (estimated size 536.0 B, free 1917.5 MiB)
22:32:09.315 INFO  MemoryStore - Block broadcast_62_piece0 stored as bytes in memory (estimated size 187.0 B, free 1917.5 MiB)
22:32:09.315 INFO  BlockManagerInfo - Added broadcast_62_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.5 MiB)
22:32:09.315 INFO  SparkContext - Created broadcast 62 from broadcast at CramSource.java:114
22:32:09.318 INFO  MemoryStore - Block broadcast_63 stored as values in memory (estimated size 325.2 KiB, free 1917.1 MiB)
22:32:09.326 INFO  MemoryStore - Block broadcast_63_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1917.1 MiB)
22:32:09.326 INFO  BlockManagerInfo - Added broadcast_63_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.4 MiB)
22:32:09.326 INFO  SparkContext - Created broadcast 63 from newAPIHadoopFile at PathSplitSource.java:96
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, The unaligned mate start position is 69400, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, The unaligned mate start position is 69471, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, The unaligned mate start position is 69569, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, The unaligned mate start position is 69592, should be 0
22:32:09.355 INFO  MemoryStore - Block broadcast_64 stored as values in memory (estimated size 536.0 B, free 1917.1 MiB)
22:32:09.356 INFO  MemoryStore - Block broadcast_64_piece0 stored as bytes in memory (estimated size 187.0 B, free 1917.1 MiB)
22:32:09.356 INFO  BlockManagerInfo - Added broadcast_64_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.4 MiB)
22:32:09.357 INFO  SparkContext - Created broadcast 64 from broadcast at CramSource.java:114
22:32:09.360 INFO  MemoryStore - Block broadcast_65 stored as values in memory (estimated size 325.2 KiB, free 1916.8 MiB)
22:32:09.367 INFO  MemoryStore - Block broadcast_65_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1916.7 MiB)
22:32:09.367 INFO  BlockManagerInfo - Added broadcast_65_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.4 MiB)
22:32:09.368 INFO  SparkContext - Created broadcast 65 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.389 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:09.389 INFO  DAGScheduler - Got job 32 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:09.389 INFO  DAGScheduler - Final stage: ResultStage 41 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:09.389 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.389 INFO  DAGScheduler - Missing parents: List()
22:32:09.389 INFO  DAGScheduler - Submitting ResultStage 41 (ParallelCollectionRDD[142] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:09.391 INFO  MemoryStore - Block broadcast_66 stored as values in memory (estimated size 3.0 KiB, free 1916.7 MiB)
22:32:09.391 INFO  MemoryStore - Block broadcast_66_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1916.7 MiB)
22:32:09.392 INFO  BlockManagerInfo - Added broadcast_66_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.4 MiB)
22:32:09.392 INFO  SparkContext - Created broadcast 66 from broadcast at DAGScheduler.scala:1580
22:32:09.392 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 41 (ParallelCollectionRDD[142] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:09.392 INFO  TaskSchedulerImpl - Adding task set 41.0 with 4 tasks resource profile 0
22:32:09.394 INFO  TaskSetManager - Starting task 0.0 in stage 41.0 (TID 97) (localhost, executor driver, partition 0, PROCESS_LOCAL, 45675 bytes) 
22:32:09.395 INFO  TaskSetManager - Starting task 1.0 in stage 41.0 (TID 98) (localhost, executor driver, partition 1, PROCESS_LOCAL, 45855 bytes) 
22:32:09.396 INFO  TaskSetManager - Starting task 2.0 in stage 41.0 (TID 99) (localhost, executor driver, partition 2, PROCESS_LOCAL, 45795 bytes) 
22:32:09.397 INFO  TaskSetManager - Starting task 3.0 in stage 41.0 (TID 100) (localhost, executor driver, partition 3, PROCESS_LOCAL, 46068 bytes) 
22:32:09.397 INFO  Executor - Running task 0.0 in stage 41.0 (TID 97)
22:32:09.397 INFO  Executor - Running task 3.0 in stage 41.0 (TID 100)
22:32:09.397 INFO  Executor - Running task 2.0 in stage 41.0 (TID 99)
22:32:09.398 INFO  Executor - Running task 1.0 in stage 41.0 (TID 98)
22:32:09.401 INFO  Executor - Finished task 2.0 in stage 41.0 (TID 99). 39113 bytes result sent to driver
22:32:09.403 INFO  Executor - Finished task 3.0 in stage 41.0 (TID 100). 39386 bytes result sent to driver
22:32:09.404 INFO  Executor - Finished task 0.0 in stage 41.0 (TID 97). 38950 bytes result sent to driver
22:32:09.405 INFO  Executor - Finished task 1.0 in stage 41.0 (TID 98). 39173 bytes result sent to driver
22:32:09.405 INFO  TaskSetManager - Finished task 3.0 in stage 41.0 (TID 100) in 9 ms on localhost (executor driver) (1/4)
22:32:09.406 INFO  TaskSetManager - Finished task 0.0 in stage 41.0 (TID 97) in 13 ms on localhost (executor driver) (2/4)
22:32:09.406 INFO  TaskSetManager - Finished task 1.0 in stage 41.0 (TID 98) in 12 ms on localhost (executor driver) (3/4)
22:32:09.407 INFO  TaskSetManager - Finished task 2.0 in stage 41.0 (TID 99) in 12 ms on localhost (executor driver) (4/4)
22:32:09.407 INFO  TaskSchedulerImpl - Removed TaskSet 41.0, whose tasks have all completed, from pool 
22:32:09.408 INFO  DAGScheduler - ResultStage 41 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.018 s
22:32:09.408 INFO  DAGScheduler - Job 32 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.408 INFO  TaskSchedulerImpl - Killing all running tasks in stage 41: Stage finished
22:32:09.408 INFO  DAGScheduler - Job 32 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.019528 s
22:32:09.410 INFO  FileInputFormat - Total input files to process : 1
22:32:09.441 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:09.442 INFO  DAGScheduler - Got job 33 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:09.442 INFO  DAGScheduler - Final stage: ResultStage 42 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:09.442 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.442 INFO  DAGScheduler - Missing parents: List()
22:32:09.443 INFO  DAGScheduler - Submitting ResultStage 42 (MapPartitionsRDD[148] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:09.458 INFO  MemoryStore - Block broadcast_67 stored as values in memory (estimated size 316.4 KiB, free 1916.4 MiB)
22:32:09.460 INFO  MemoryStore - Block broadcast_67_piece0 stored as bytes in memory (estimated size 115.5 KiB, free 1916.3 MiB)
22:32:09.460 INFO  BlockManagerInfo - Added broadcast_67_piece0 in memory on localhost:35275 (size: 115.5 KiB, free: 1919.3 MiB)
22:32:09.461 INFO  SparkContext - Created broadcast 67 from broadcast at DAGScheduler.scala:1580
22:32:09.461 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 42 (MapPartitionsRDD[148] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:09.461 INFO  TaskSchedulerImpl - Adding task set 42.0 with 1 tasks resource profile 0
22:32:09.462 INFO  TaskSetManager - Starting task 0.0 in stage 42.0 (TID 101) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7839 bytes) 
22:32:09.462 INFO  Executor - Running task 0.0 in stage 42.0 (TID 101)
22:32:09.498 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram:0+50619
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 129, Read name 809R9ABXX101220:5:6:17918:145992, The unaligned mate start position is 69400, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 274, Read name 809R9ABXX101220:5:8:7119:101555, The unaligned mate start position is 69471, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 418, Read name 809R9ABXX101220:5:48:20204:50350, The unaligned mate start position is 69569, should be 0
Ignoring SAM validation error: ERROR::INVALID_ALIGNMENT_START:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, Mate Alignment start should be 0 because reference name = *.
Ignoring SAM validation error: ERROR::INVALID_FLAG_MATE_UNMAPPED:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, Mapped mate should have mate reference name
Ignoring SAM validation error: ERROR::INVALID_UNALIGNED_MATE_START:Record 439, Read name 809R9ABXX101220:5:44:13776:49360, The unaligned mate start position is 69592, should be 0
22:32:09.518 INFO  Executor - Finished task 0.0 in stage 42.0 (TID 101). 154101 bytes result sent to driver
22:32:09.520 INFO  TaskSetManager - Finished task 0.0 in stage 42.0 (TID 101) in 58 ms on localhost (executor driver) (1/1)
22:32:09.520 INFO  TaskSchedulerImpl - Removed TaskSet 42.0, whose tasks have all completed, from pool 
22:32:09.521 INFO  DAGScheduler - ResultStage 42 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.078 s
22:32:09.521 INFO  DAGScheduler - Job 33 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.521 INFO  TaskSchedulerImpl - Killing all running tasks in stage 42: Stage finished
22:32:09.521 INFO  DAGScheduler - Job 33 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.079573 s
22:32:09.542 INFO  BlockManagerInfo - Removed broadcast_53_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.3 MiB)
22:32:09.543 INFO  MemoryStore - Block broadcast_68 stored as values in memory (estimated size 325.2 KiB, free 1916.3 MiB)
22:32:09.544 INFO  BlockManagerInfo - Removed broadcast_52_piece0 on localhost:35275 in memory (size: 206.0 B, free: 1919.3 MiB)
22:32:09.545 INFO  BlockManagerInfo - Removed broadcast_64_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.3 MiB)
22:32:09.547 INFO  BlockManagerInfo - Removed broadcast_59_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.4 MiB)
22:32:09.549 INFO  BlockManagerInfo - Removed broadcast_57_piece0 on localhost:35275 in memory (size: 115.4 KiB, free: 1919.5 MiB)
22:32:09.550 INFO  BlockManagerInfo - Removed broadcast_66_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.5 MiB)
22:32:09.550 INFO  BlockManagerInfo - Removed broadcast_63_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:09.551 INFO  BlockManagerInfo - Removed broadcast_56_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.6 MiB)
22:32:09.552 INFO  BlockManagerInfo - Removed broadcast_58_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:09.553 INFO  BlockManagerInfo - Removed broadcast_67_piece0 on localhost:35275 in memory (size: 115.5 KiB, free: 1919.7 MiB)
22:32:09.554 INFO  BlockManagerInfo - Removed broadcast_61_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.9 MiB)
22:32:09.555 INFO  BlockManagerInfo - Removed broadcast_55_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:09.556 INFO  BlockManagerInfo - Removed broadcast_62_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.9 MiB)
22:32:09.557 INFO  MemoryStore - Block broadcast_68_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.3 MiB)
22:32:09.557 INFO  BlockManagerInfo - Removed broadcast_60_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.9 MiB)
22:32:09.557 INFO  BlockManagerInfo - Added broadcast_68_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:09.558 INFO  BlockManagerInfo - Removed broadcast_65_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:09.558 INFO  SparkContext - Created broadcast 68 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.559 INFO  BlockManagerInfo - Removed broadcast_54_piece0 on localhost:35275 in memory (size: 206.0 B, free: 1919.9 MiB)
22:32:09.596 INFO  MemoryStore - Block broadcast_69 stored as values in memory (estimated size 325.2 KiB, free 1919.3 MiB)
22:32:09.604 INFO  MemoryStore - Block broadcast_69_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.3 MiB)
22:32:09.604 INFO  BlockManagerInfo - Added broadcast_69_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:09.604 INFO  SparkContext - Created broadcast 69 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.632 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:09.632 INFO  DAGScheduler - Got job 34 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:09.632 INFO  DAGScheduler - Final stage: ResultStage 43 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:09.632 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.633 INFO  DAGScheduler - Missing parents: List()
22:32:09.633 INFO  DAGScheduler - Submitting ResultStage 43 (ParallelCollectionRDD[154] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:09.634 INFO  MemoryStore - Block broadcast_70 stored as values in memory (estimated size 3.0 KiB, free 1919.3 MiB)
22:32:09.634 INFO  MemoryStore - Block broadcast_70_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1919.3 MiB)
22:32:09.635 INFO  BlockManagerInfo - Added broadcast_70_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.9 MiB)
22:32:09.635 INFO  SparkContext - Created broadcast 70 from broadcast at DAGScheduler.scala:1580
22:32:09.635 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 43 (ParallelCollectionRDD[154] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:09.635 INFO  TaskSchedulerImpl - Adding task set 43.0 with 4 tasks resource profile 0
22:32:09.636 INFO  TaskSetManager - Starting task 0.0 in stage 43.0 (TID 102) (localhost, executor driver, partition 0, PROCESS_LOCAL, 47080 bytes) 
22:32:09.637 INFO  TaskSetManager - Starting task 1.0 in stage 43.0 (TID 103) (localhost, executor driver, partition 1, PROCESS_LOCAL, 47194 bytes) 
22:32:09.637 INFO  TaskSetManager - Starting task 2.0 in stage 43.0 (TID 104) (localhost, executor driver, partition 2, PROCESS_LOCAL, 47105 bytes) 
22:32:09.638 INFO  TaskSetManager - Starting task 3.0 in stage 43.0 (TID 105) (localhost, executor driver, partition 3, PROCESS_LOCAL, 47530 bytes) 
22:32:09.639 INFO  Executor - Running task 0.0 in stage 43.0 (TID 102)
22:32:09.639 INFO  Executor - Running task 1.0 in stage 43.0 (TID 103)
22:32:09.639 INFO  Executor - Running task 3.0 in stage 43.0 (TID 105)
22:32:09.640 INFO  Executor - Running task 2.0 in stage 43.0 (TID 104)
22:32:09.642 INFO  Executor - Finished task 1.0 in stage 43.0 (TID 103). 40469 bytes result sent to driver
22:32:09.642 INFO  Executor - Finished task 0.0 in stage 43.0 (TID 102). 40355 bytes result sent to driver
22:32:09.642 INFO  Executor - Finished task 2.0 in stage 43.0 (TID 104). 40380 bytes result sent to driver
22:32:09.643 INFO  TaskSetManager - Finished task 1.0 in stage 43.0 (TID 103) in 6 ms on localhost (executor driver) (1/4)
22:32:09.643 INFO  TaskSetManager - Finished task 0.0 in stage 43.0 (TID 102) in 7 ms on localhost (executor driver) (2/4)
22:32:09.644 INFO  Executor - Finished task 3.0 in stage 43.0 (TID 105). 40805 bytes result sent to driver
22:32:09.645 INFO  TaskSetManager - Finished task 2.0 in stage 43.0 (TID 104) in 8 ms on localhost (executor driver) (3/4)
22:32:09.645 INFO  TaskSetManager - Finished task 3.0 in stage 43.0 (TID 105) in 7 ms on localhost (executor driver) (4/4)
22:32:09.645 INFO  TaskSchedulerImpl - Removed TaskSet 43.0, whose tasks have all completed, from pool 
22:32:09.645 INFO  DAGScheduler - ResultStage 43 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.012 s
22:32:09.645 INFO  DAGScheduler - Job 34 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.645 INFO  TaskSchedulerImpl - Killing all running tasks in stage 43: Stage finished
22:32:09.646 INFO  DAGScheduler - Job 34 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.013601 s
22:32:09.647 INFO  FileInputFormat - Total input files to process : 1
22:32:09.688 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:09.689 INFO  DAGScheduler - Got job 35 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:09.689 INFO  DAGScheduler - Final stage: ResultStage 44 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:09.689 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.689 INFO  DAGScheduler - Missing parents: List()
22:32:09.690 INFO  DAGScheduler - Submitting ResultStage 44 (MapPartitionsRDD[161] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:09.710 INFO  MemoryStore - Block broadcast_71 stored as values in memory (estimated size 470.5 KiB, free 1918.8 MiB)
22:32:09.711 INFO  MemoryStore - Block broadcast_71_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1918.6 MiB)
22:32:09.712 INFO  BlockManagerInfo - Added broadcast_71_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.7 MiB)
22:32:09.712 INFO  SparkContext - Created broadcast 71 from broadcast at DAGScheduler.scala:1580
22:32:09.712 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 44 (MapPartitionsRDD[161] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:09.712 INFO  TaskSchedulerImpl - Adding task set 44.0 with 1 tasks resource profile 0
22:32:09.713 INFO  TaskSetManager - Starting task 0.0 in stage 44.0 (TID 106) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7838 bytes) 
22:32:09.714 INFO  Executor - Running task 0.0 in stage 44.0 (TID 106)
22:32:09.758 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam:0+57538
22:32:09.764 INFO  Executor - Finished task 0.0 in stage 44.0 (TID 106). 159572 bytes result sent to driver
22:32:09.766 INFO  TaskSetManager - Finished task 0.0 in stage 44.0 (TID 106) in 53 ms on localhost (executor driver) (1/1)
22:32:09.766 INFO  TaskSchedulerImpl - Removed TaskSet 44.0, whose tasks have all completed, from pool 
22:32:09.766 INFO  DAGScheduler - ResultStage 44 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.076 s
22:32:09.766 INFO  DAGScheduler - Job 35 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.766 INFO  TaskSchedulerImpl - Killing all running tasks in stage 44: Stage finished
22:32:09.766 INFO  DAGScheduler - Job 35 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.077758 s
22:32:09.773 INFO  MemoryStore - Block broadcast_72 stored as values in memory (estimated size 536.0 B, free 1918.6 MiB)
22:32:09.774 INFO  MemoryStore - Block broadcast_72_piece0 stored as bytes in memory (estimated size 187.0 B, free 1918.6 MiB)
22:32:09.774 INFO  BlockManagerInfo - Added broadcast_72_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.7 MiB)
22:32:09.775 INFO  SparkContext - Created broadcast 72 from broadcast at CramSource.java:114
22:32:09.778 INFO  MemoryStore - Block broadcast_73 stored as values in memory (estimated size 325.2 KiB, free 1918.3 MiB)
22:32:09.786 INFO  MemoryStore - Block broadcast_73_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.3 MiB)
22:32:09.787 INFO  BlockManagerInfo - Added broadcast_73_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:09.787 INFO  SparkContext - Created broadcast 73 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.814 INFO  MemoryStore - Block broadcast_74 stored as values in memory (estimated size 536.0 B, free 1918.3 MiB)
22:32:09.814 INFO  MemoryStore - Block broadcast_74_piece0 stored as bytes in memory (estimated size 187.0 B, free 1918.3 MiB)
22:32:09.815 INFO  BlockManagerInfo - Added broadcast_74_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.7 MiB)
22:32:09.815 INFO  SparkContext - Created broadcast 74 from broadcast at CramSource.java:114
22:32:09.817 INFO  MemoryStore - Block broadcast_75 stored as values in memory (estimated size 325.2 KiB, free 1917.9 MiB)
22:32:09.825 INFO  MemoryStore - Block broadcast_75_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1917.9 MiB)
22:32:09.826 INFO  BlockManagerInfo - Added broadcast_75_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.6 MiB)
22:32:09.826 INFO  SparkContext - Created broadcast 75 from newAPIHadoopFile at PathSplitSource.java:96
22:32:09.847 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:111
22:32:09.848 INFO  DAGScheduler - Got job 36 (collect at ReadsSparkSourceUnitTest.java:111) with 4 output partitions
22:32:09.848 INFO  DAGScheduler - Final stage: ResultStage 45 (collect at ReadsSparkSourceUnitTest.java:111)
22:32:09.848 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.848 INFO  DAGScheduler - Missing parents: List()
22:32:09.848 INFO  DAGScheduler - Submitting ResultStage 45 (ParallelCollectionRDD[166] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:09.849 INFO  MemoryStore - Block broadcast_76 stored as values in memory (estimated size 3.0 KiB, free 1917.9 MiB)
22:32:09.850 INFO  MemoryStore - Block broadcast_76_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1917.9 MiB)
22:32:09.850 INFO  BlockManagerInfo - Added broadcast_76_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.6 MiB)
22:32:09.850 INFO  SparkContext - Created broadcast 76 from broadcast at DAGScheduler.scala:1580
22:32:09.850 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 45 (ParallelCollectionRDD[166] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:09.850 INFO  TaskSchedulerImpl - Adding task set 45.0 with 4 tasks resource profile 0
22:32:09.852 INFO  TaskSetManager - Starting task 0.0 in stage 45.0 (TID 107) (localhost, executor driver, partition 0, PROCESS_LOCAL, 45675 bytes) 
22:32:09.852 INFO  TaskSetManager - Starting task 1.0 in stage 45.0 (TID 108) (localhost, executor driver, partition 1, PROCESS_LOCAL, 45855 bytes) 
22:32:09.853 INFO  TaskSetManager - Starting task 2.0 in stage 45.0 (TID 109) (localhost, executor driver, partition 2, PROCESS_LOCAL, 45795 bytes) 
22:32:09.854 INFO  TaskSetManager - Starting task 3.0 in stage 45.0 (TID 110) (localhost, executor driver, partition 3, PROCESS_LOCAL, 46068 bytes) 
22:32:09.855 INFO  Executor - Running task 3.0 in stage 45.0 (TID 110)
22:32:09.857 INFO  Executor - Running task 0.0 in stage 45.0 (TID 107)
22:32:09.857 INFO  Executor - Finished task 3.0 in stage 45.0 (TID 110). 39343 bytes result sent to driver
22:32:09.858 INFO  Executor - Running task 2.0 in stage 45.0 (TID 109)
22:32:09.858 INFO  Executor - Running task 1.0 in stage 45.0 (TID 108)
22:32:09.859 INFO  Executor - Finished task 0.0 in stage 45.0 (TID 107). 38950 bytes result sent to driver
22:32:09.860 INFO  TaskSetManager - Finished task 3.0 in stage 45.0 (TID 110) in 6 ms on localhost (executor driver) (1/4)
22:32:09.860 INFO  Executor - Finished task 2.0 in stage 45.0 (TID 109). 39070 bytes result sent to driver
22:32:09.860 INFO  TaskSetManager - Finished task 0.0 in stage 45.0 (TID 107) in 9 ms on localhost (executor driver) (2/4)
22:32:09.861 INFO  TaskSetManager - Finished task 2.0 in stage 45.0 (TID 109) in 9 ms on localhost (executor driver) (3/4)
22:32:09.862 INFO  Executor - Finished task 1.0 in stage 45.0 (TID 108). 39173 bytes result sent to driver
22:32:09.862 INFO  TaskSetManager - Finished task 1.0 in stage 45.0 (TID 108) in 10 ms on localhost (executor driver) (4/4)
22:32:09.862 INFO  TaskSchedulerImpl - Removed TaskSet 45.0, whose tasks have all completed, from pool 
22:32:09.863 INFO  DAGScheduler - ResultStage 45 (collect at ReadsSparkSourceUnitTest.java:111) finished in 0.015 s
22:32:09.863 INFO  DAGScheduler - Job 36 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.863 INFO  TaskSchedulerImpl - Killing all running tasks in stage 45: Stage finished
22:32:09.863 INFO  DAGScheduler - Job 36 finished: collect at ReadsSparkSourceUnitTest.java:111, took 0.015593 s
22:32:09.864 INFO  FileInputFormat - Total input files to process : 1
22:32:09.893 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:112
22:32:09.894 INFO  DAGScheduler - Got job 37 (collect at ReadsSparkSourceUnitTest.java:112) with 1 output partitions
22:32:09.894 INFO  DAGScheduler - Final stage: ResultStage 46 (collect at ReadsSparkSourceUnitTest.java:112)
22:32:09.894 INFO  DAGScheduler - Parents of final stage: List()
22:32:09.894 INFO  DAGScheduler - Missing parents: List()
22:32:09.894 INFO  DAGScheduler - Submitting ResultStage 46 (MapPartitionsRDD[172] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:09.909 INFO  MemoryStore - Block broadcast_77 stored as values in memory (estimated size 316.4 KiB, free 1917.6 MiB)
22:32:09.911 INFO  MemoryStore - Block broadcast_77_piece0 stored as bytes in memory (estimated size 115.5 KiB, free 1917.5 MiB)
22:32:09.911 INFO  BlockManagerInfo - Added broadcast_77_piece0 in memory on localhost:35275 (size: 115.5 KiB, free: 1919.5 MiB)
22:32:09.911 INFO  SparkContext - Created broadcast 77 from broadcast at DAGScheduler.scala:1580
22:32:09.911 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 46 (MapPartitionsRDD[172] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:09.911 INFO  TaskSchedulerImpl - Adding task set 46.0 with 1 tasks resource profile 0
22:32:09.912 INFO  TaskSetManager - Starting task 0.0 in stage 46.0 (TID 111) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7839 bytes) 
22:32:09.913 INFO  Executor - Running task 0.0 in stage 46.0 (TID 111)
22:32:09.949 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.cram:0+50619
22:32:09.960 INFO  Executor - Finished task 0.0 in stage 46.0 (TID 111). 154101 bytes result sent to driver
22:32:09.961 INFO  TaskSetManager - Finished task 0.0 in stage 46.0 (TID 111) in 49 ms on localhost (executor driver) (1/1)
22:32:09.961 INFO  TaskSchedulerImpl - Removed TaskSet 46.0, whose tasks have all completed, from pool 
22:32:09.962 INFO  DAGScheduler - ResultStage 46 (collect at ReadsSparkSourceUnitTest.java:112) finished in 0.067 s
22:32:09.962 INFO  DAGScheduler - Job 37 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:09.962 INFO  TaskSchedulerImpl - Killing all running tasks in stage 46: Stage finished
22:32:09.962 INFO  DAGScheduler - Job 37 finished: collect at ReadsSparkSourceUnitTest.java:112, took 0.068472 s
22:32:09.968 INFO  MemoryStore - Block broadcast_78 stored as values in memory (estimated size 325.2 KiB, free 1917.1 MiB)
22:32:09.979 INFO  MemoryStore - Block broadcast_78_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1917.1 MiB)
22:32:09.979 INFO  BlockManagerInfo - Added broadcast_78_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.4 MiB)
22:32:09.980 INFO  SparkContext - Created broadcast 78 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.008 INFO  MemoryStore - Block broadcast_79 stored as values in memory (estimated size 536.0 B, free 1917.1 MiB)
22:32:10.009 INFO  MemoryStore - Block broadcast_79_piece0 stored as bytes in memory (estimated size 187.0 B, free 1917.1 MiB)
22:32:10.009 INFO  BlockManagerInfo - Added broadcast_79_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.4 MiB)
22:32:10.009 INFO  SparkContext - Created broadcast 79 from broadcast at CramSource.java:114
22:32:10.013 INFO  MemoryStore - Block broadcast_80 stored as values in memory (estimated size 325.2 KiB, free 1916.8 MiB)
22:32:10.024 INFO  MemoryStore - Block broadcast_80_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1916.7 MiB)
22:32:10.025 INFO  BlockManagerInfo - Added broadcast_80_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.4 MiB)
22:32:10.025 INFO  SparkContext - Created broadcast 80 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.063 WARN  FileSystem - Failed to initialize filesystem hdfs://bogus/path.bam: java.lang.IllegalArgumentException: java.net.UnknownHostException: bogus
22:32:10.070 INFO  MemoryStore - Block broadcast_81 stored as values in memory (estimated size 325.3 KiB, free 1916.4 MiB)
22:32:10.077 INFO  MemoryStore - Block broadcast_81_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1916.3 MiB)
22:32:10.078 INFO  BlockManagerInfo - Added broadcast_81_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.3 MiB)
22:32:10.078 INFO  SparkContext - Created broadcast 81 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.114 INFO  MemoryStore - Block broadcast_82 stored as values in memory (estimated size 325.3 KiB, free 1916.0 MiB)
22:32:10.122 INFO  MemoryStore - Block broadcast_82_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1916.0 MiB)
22:32:10.122 INFO  BlockManagerInfo - Added broadcast_82_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.3 MiB)
22:32:10.122 INFO  SparkContext - Created broadcast 82 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.144 INFO  BlockManagerInfo - Removed broadcast_81_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1919.3 MiB)
22:32:10.145 INFO  BlockManagerInfo - Removed broadcast_76_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.3 MiB)
22:32:10.145 INFO  BlockManagerInfo - Removed broadcast_80_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.4 MiB)
22:32:10.146 INFO  BlockManagerInfo - Removed broadcast_74_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.4 MiB)
22:32:10.147 INFO  BlockManagerInfo - Removed broadcast_70_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.4 MiB)
22:32:10.148 INFO  BlockManagerInfo - Removed broadcast_79_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.4 MiB)
22:32:10.149 INFO  BlockManagerInfo - Removed broadcast_78_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.4 MiB)
22:32:10.149 INFO  BlockManagerInfo - Removed broadcast_73_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.5 MiB)
22:32:10.150 INFO  BlockManagerInfo - Removed broadcast_69_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:10.151 INFO  BlockManagerInfo - Removed broadcast_75_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.6 MiB)
22:32:10.151 INFO  BlockManagerInfo - Removed broadcast_68_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.7 MiB)
22:32:10.152 INFO  BlockManagerInfo - Removed broadcast_77_piece0 on localhost:35275 in memory (size: 115.5 KiB, free: 1919.8 MiB)
22:32:10.153 INFO  BlockManagerInfo - Removed broadcast_71_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.9 MiB)
22:32:10.153 INFO  BlockManagerInfo - Removed broadcast_72_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.9 MiB)
22:32:10.165 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:154
22:32:10.165 INFO  DAGScheduler - Got job 38 (collect at ReadsSparkSourceUnitTest.java:154) with 4 output partitions
22:32:10.165 INFO  DAGScheduler - Final stage: ResultStage 47 (collect at ReadsSparkSourceUnitTest.java:154)
22:32:10.165 INFO  DAGScheduler - Parents of final stage: List()
22:32:10.166 INFO  DAGScheduler - Missing parents: List()
22:32:10.166 INFO  DAGScheduler - Submitting ResultStage 47 (ParallelCollectionRDD[187] at parallelize at ReadsSparkSourceUnitTest.java:310), which has no missing parents
22:32:10.166 INFO  MemoryStore - Block broadcast_83 stored as values in memory (estimated size 3.0 KiB, free 1919.6 MiB)
22:32:10.167 INFO  MemoryStore - Block broadcast_83_piece0 stored as bytes in memory (estimated size 1780.0 B, free 1919.6 MiB)
22:32:10.167 INFO  BlockManagerInfo - Added broadcast_83_piece0 in memory on localhost:35275 (size: 1780.0 B, free: 1919.9 MiB)
22:32:10.168 INFO  SparkContext - Created broadcast 83 from broadcast at DAGScheduler.scala:1580
22:32:10.168 INFO  DAGScheduler - Submitting 4 missing tasks from ResultStage 47 (ParallelCollectionRDD[187] at parallelize at ReadsSparkSourceUnitTest.java:310) (first 15 tasks are for partitions Vector(0, 1, 2, 3))
22:32:10.168 INFO  TaskSchedulerImpl - Adding task set 47.0 with 4 tasks resource profile 0
22:32:10.169 INFO  TaskSetManager - Starting task 0.0 in stage 47.0 (TID 112) (localhost, executor driver, partition 0, PROCESS_LOCAL, 169886 bytes) 
22:32:10.170 INFO  TaskSetManager - Starting task 1.0 in stage 47.0 (TID 113) (localhost, executor driver, partition 1, PROCESS_LOCAL, 170093 bytes) 
22:32:10.170 INFO  TaskSetManager - Starting task 2.0 in stage 47.0 (TID 114) (localhost, executor driver, partition 2, PROCESS_LOCAL, 170081 bytes) 
22:32:10.171 INFO  TaskSetManager - Starting task 3.0 in stage 47.0 (TID 115) (localhost, executor driver, partition 3, PROCESS_LOCAL, 169419 bytes) 
22:32:10.171 INFO  Executor - Running task 0.0 in stage 47.0 (TID 112)
22:32:10.171 INFO  Executor - Running task 1.0 in stage 47.0 (TID 113)
22:32:10.171 INFO  Executor - Running task 2.0 in stage 47.0 (TID 114)
22:32:10.172 INFO  Executor - Running task 3.0 in stage 47.0 (TID 115)
22:32:10.175 INFO  Executor - Finished task 2.0 in stage 47.0 (TID 114). 163356 bytes result sent to driver
22:32:10.176 INFO  Executor - Finished task 3.0 in stage 47.0 (TID 115). 162694 bytes result sent to driver
22:32:10.176 INFO  Executor - Finished task 0.0 in stage 47.0 (TID 112). 163161 bytes result sent to driver
22:32:10.177 INFO  Executor - Finished task 1.0 in stage 47.0 (TID 113). 163411 bytes result sent to driver
22:32:10.177 INFO  TaskSetManager - Finished task 2.0 in stage 47.0 (TID 114) in 7 ms on localhost (executor driver) (1/4)
22:32:10.178 INFO  TaskSetManager - Finished task 3.0 in stage 47.0 (TID 115) in 8 ms on localhost (executor driver) (2/4)
22:32:10.178 INFO  TaskSetManager - Finished task 0.0 in stage 47.0 (TID 112) in 9 ms on localhost (executor driver) (3/4)
22:32:10.179 INFO  TaskSetManager - Finished task 1.0 in stage 47.0 (TID 113) in 10 ms on localhost (executor driver) (4/4)
22:32:10.179 INFO  TaskSchedulerImpl - Removed TaskSet 47.0, whose tasks have all completed, from pool 
22:32:10.179 INFO  DAGScheduler - ResultStage 47 (collect at ReadsSparkSourceUnitTest.java:154) finished in 0.013 s
22:32:10.179 INFO  DAGScheduler - Job 38 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:10.179 INFO  TaskSchedulerImpl - Killing all running tasks in stage 47: Stage finished
22:32:10.179 INFO  DAGScheduler - Job 38 finished: collect at ReadsSparkSourceUnitTest.java:154, took 0.013977 s
22:32:10.190 INFO  FileInputFormat - Total input files to process : 2
22:32:10.231 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:155
22:32:10.232 INFO  DAGScheduler - Got job 39 (collect at ReadsSparkSourceUnitTest.java:155) with 2 output partitions
22:32:10.232 INFO  DAGScheduler - Final stage: ResultStage 48 (collect at ReadsSparkSourceUnitTest.java:155)
22:32:10.232 INFO  DAGScheduler - Parents of final stage: List()
22:32:10.232 INFO  DAGScheduler - Missing parents: List()
22:32:10.232 INFO  DAGScheduler - Submitting ResultStage 48 (MapPartitionsRDD[194] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:10.252 INFO  MemoryStore - Block broadcast_84 stored as values in memory (estimated size 470.5 KiB, free 1919.2 MiB)
22:32:10.254 INFO  MemoryStore - Block broadcast_84_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1919.0 MiB)
22:32:10.254 INFO  BlockManagerInfo - Added broadcast_84_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.8 MiB)
22:32:10.255 INFO  SparkContext - Created broadcast 84 from broadcast at DAGScheduler.scala:1580
22:32:10.255 INFO  DAGScheduler - Submitting 2 missing tasks from ResultStage 48 (MapPartitionsRDD[194] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0, 1))
22:32:10.255 INFO  TaskSchedulerImpl - Adding task set 48.0 with 2 tasks resource profile 0
22:32:10.256 INFO  TaskSetManager - Starting task 0.0 in stage 48.0 (TID 116) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7928 bytes) 
22:32:10.256 INFO  TaskSetManager - Starting task 1.0 in stage 48.0 (TID 117) (localhost, executor driver, partition 1, PROCESS_LOCAL, 7928 bytes) 
22:32:10.257 INFO  Executor - Running task 0.0 in stage 48.0 (TID 116)
22:32:10.257 INFO  Executor - Running task 1.0 in stage 48.0 (TID 117)
22:32:10.317 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.sharded.bam/part-r-00001.bam:0+131967
22:32:10.317 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.sharded.bam/part-r-00000.bam:0+131967
22:32:10.341 INFO  Executor - Finished task 0.0 in stage 48.0 (TID 116). 325633 bytes result sent to driver
22:32:10.343 INFO  Executor - Finished task 1.0 in stage 48.0 (TID 117). 325633 bytes result sent to driver
22:32:10.344 INFO  TaskSetManager - Finished task 0.0 in stage 48.0 (TID 116) in 88 ms on localhost (executor driver) (1/2)
22:32:10.346 INFO  TaskSetManager - Finished task 1.0 in stage 48.0 (TID 117) in 90 ms on localhost (executor driver) (2/2)
22:32:10.346 INFO  TaskSchedulerImpl - Removed TaskSet 48.0, whose tasks have all completed, from pool 
22:32:10.346 INFO  DAGScheduler - ResultStage 48 (collect at ReadsSparkSourceUnitTest.java:155) finished in 0.113 s
22:32:10.346 INFO  DAGScheduler - Job 39 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:10.346 INFO  TaskSchedulerImpl - Killing all running tasks in stage 48: Stage finished
22:32:10.347 INFO  DAGScheduler - Job 39 finished: collect at ReadsSparkSourceUnitTest.java:155, took 0.115262 s
22:32:10.359 INFO  MiniDFSCluster - starting cluster: numNameNodes=1, numDataNodes=1
22:32:10.360 INFO  NameNode - Formatting using clusterid: testClusterID
22:32:10.361 INFO  FSEditLog - Edit logging is async:true
22:32:10.372 INFO  FSNamesystem - KeyProvider: null
22:32:10.372 INFO  FSNamesystem - fsLock is fair: true
22:32:10.372 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
22:32:10.372 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
22:32:10.372 INFO  FSNamesystem - supergroup             = supergroup
22:32:10.372 INFO  FSNamesystem - isPermissionEnabled    = true
22:32:10.372 INFO  FSNamesystem - isStoragePolicyEnabled = true
22:32:10.372 INFO  FSNamesystem - HA Enabled: false
22:32:10.373 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:10.373 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:10.373 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
22:32:10.373 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
22:32:10.373 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
22:32:10.373 INFO  BlockManager - The block deletion will start around 2025 Mar 28 22:32:10
22:32:10.373 INFO  GSet - Computing capacity for map BlocksMap
22:32:10.373 INFO  GSet - VM type       = 64-bit
22:32:10.374 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
22:32:10.374 INFO  GSet - capacity      = 2^23 = 8388608 entries
22:32:10.375 INFO  BlockManager - Storage policy satisfier is disabled
22:32:10.375 INFO  BlockManager - dfs.block.access.token.enable = false
22:32:10.375 INFO  BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval
22:32:10.375 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
22:32:10.375 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
22:32:10.375 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
22:32:10.375 INFO  BlockManager - defaultReplication         = 1
22:32:10.375 INFO  BlockManager - maxReplication             = 512
22:32:10.375 INFO  BlockManager - minReplication             = 1
22:32:10.375 INFO  BlockManager - maxReplicationStreams      = 2
22:32:10.375 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
22:32:10.376 INFO  BlockManager - encryptDataTransfer        = false
22:32:10.376 INFO  BlockManager - maxNumBlocksToLog          = 1000
22:32:10.376 INFO  GSet - Computing capacity for map INodeMap
22:32:10.376 INFO  GSet - VM type       = 64-bit
22:32:10.376 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
22:32:10.376 INFO  GSet - capacity      = 2^22 = 4194304 entries
22:32:10.377 INFO  FSDirectory - ACLs enabled? true
22:32:10.377 INFO  FSDirectory - POSIX ACL inheritance enabled? true
22:32:10.377 INFO  FSDirectory - XAttrs enabled? true
22:32:10.377 INFO  NameNode - Caching file names occurring more than 10 times
22:32:10.377 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536
22:32:10.377 INFO  SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false
22:32:10.377 INFO  SnapshotManager - SkipList is disabled
22:32:10.377 INFO  GSet - Computing capacity for map cachedBlocks
22:32:10.377 INFO  GSet - VM type       = 64-bit
22:32:10.377 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
22:32:10.377 INFO  GSet - capacity      = 2^20 = 1048576 entries
22:32:10.377 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
22:32:10.377 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
22:32:10.377 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
22:32:10.378 INFO  FSNamesystem - Retry cache on namenode is enabled
22:32:10.378 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
22:32:10.378 INFO  GSet - Computing capacity for map NameNodeRetryCache
22:32:10.378 INFO  GSet - VM type       = 64-bit
22:32:10.378 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
22:32:10.378 INFO  GSet - capacity      = 2^17 = 131072 entries
22:32:10.378 INFO  FSImage - Allocated new BlockPoolId: BP-2019517390-10.1.0.27-1743201130378
22:32:10.381 INFO  Storage - Storage directory /tmp/minicluster_storage3855638396955912502/name-0-1 has been successfully formatted.
22:32:10.384 INFO  Storage - Storage directory /tmp/minicluster_storage3855638396955912502/name-0-2 has been successfully formatted.
22:32:10.399 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage3855638396955912502/name-0-2/current/fsimage.ckpt_0000000000000000000 using no compression
22:32:10.400 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage3855638396955912502/name-0-1/current/fsimage.ckpt_0000000000000000000 using no compression
22:32:10.404 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage3855638396955912502/name-0-1/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
22:32:10.404 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage3855638396955912502/name-0-2/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
22:32:10.407 INFO  NNStorageRetentionManager - Going to retain 1 images with txid >= 0
22:32:10.407 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:10.412 INFO  FSNamesystem - Stopping services started for active state
22:32:10.413 INFO  FSNamesystem - Stopping services started for standby state
22:32:10.414 INFO  NameNode - createNameNode []
22:32:10.415 WARN  MetricsConfig - Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
22:32:10.416 INFO  MetricsSystemImpl - Scheduled Metric snapshot period at 10 second(s).
22:32:10.416 INFO  MetricsSystemImpl - NameNode metrics system started
22:32:10.417 INFO  NameNodeUtils - fs.defaultFS is hdfs://127.0.0.1:0
22:32:10.434 INFO  JvmPauseMonitor - Starting JVM pause monitor
22:32:10.435 INFO  DFSUtil - Filter initializers set : org.apache.hadoop.http.lib.StaticUserWebFilter,org.apache.hadoop.hdfs.web.AuthFilterInitializer
22:32:10.435 INFO  DFSUtil - Starting Web-server for hdfs at: http://localhost:0
22:32:10.436 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:10.438 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
22:32:10.439 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs
22:32:10.439 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
22:32:10.441 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context hdfs
22:32:10.441 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context static
22:32:10.441 INFO  HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint.
22:32:10.441 INFO  HttpServer2 - addJerseyResourcePackage: packageName=org.apache.hadoop.hdfs.server.namenode.web.resources;org.apache.hadoop.hdfs.web.resources, pathSpec=/webhdfs/v1/*
22:32:10.442 INFO  HttpServer2 - Jetty bound to port 40593
22:32:10.442 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
22:32:10.443 INFO  session - DefaultSessionIdManager workerName=node0
22:32:10.445 INFO  session - No SessionScavenger set, using defaults
22:32:10.445 INFO  session - node0 Scavenging every 600000ms
22:32:10.446 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:10.446 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@1062338c{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE}
22:32:10.572 INFO  ContextHandler - Started o.e.j.w.WebAppContext@5f17910c{hdfs,/,file:///tmp/jetty-localhost-40593-hadoop-hdfs-3_4_0-tests_jar-_-any-3453286786905559527/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs}
22:32:10.572 INFO  AbstractConnector - Started ServerConnector@572a3331{HTTP/1.1, (http/1.1)}{localhost:40593}
22:32:10.573 INFO  Server - Started @32744ms
22:32:10.574 INFO  FSEditLog - Edit logging is async:true
22:32:10.589 INFO  FSNamesystem - KeyProvider: null
22:32:10.589 INFO  FSNamesystem - fsLock is fair: true
22:32:10.589 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
22:32:10.589 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
22:32:10.589 INFO  FSNamesystem - supergroup             = supergroup
22:32:10.589 INFO  FSNamesystem - isPermissionEnabled    = true
22:32:10.589 INFO  FSNamesystem - isStoragePolicyEnabled = true
22:32:10.589 INFO  FSNamesystem - HA Enabled: false
22:32:10.589 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:10.589 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:10.589 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
22:32:10.590 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
22:32:10.590 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
22:32:10.590 INFO  BlockManager - The block deletion will start around 2025 Mar 28 22:32:10
22:32:10.590 INFO  GSet - Computing capacity for map BlocksMap
22:32:10.590 INFO  GSet - VM type       = 64-bit
22:32:10.590 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
22:32:10.590 INFO  GSet - capacity      = 2^23 = 8388608 entries
22:32:10.592 INFO  BlockManager - Storage policy satisfier is disabled
22:32:10.592 INFO  BlockManager - dfs.block.access.token.enable = false
22:32:10.592 INFO  BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval
22:32:10.592 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
22:32:10.592 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
22:32:10.592 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
22:32:10.592 INFO  BlockManager - defaultReplication         = 1
22:32:10.592 INFO  BlockManager - maxReplication             = 512
22:32:10.592 INFO  BlockManager - minReplication             = 1
22:32:10.592 INFO  BlockManager - maxReplicationStreams      = 2
22:32:10.592 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
22:32:10.592 INFO  BlockManager - encryptDataTransfer        = false
22:32:10.592 INFO  BlockManager - maxNumBlocksToLog          = 1000
22:32:10.592 INFO  GSet - Computing capacity for map INodeMap
22:32:10.592 INFO  GSet - VM type       = 64-bit
22:32:10.592 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
22:32:10.592 INFO  GSet - capacity      = 2^22 = 4194304 entries
22:32:10.593 INFO  FSDirectory - ACLs enabled? true
22:32:10.593 INFO  FSDirectory - POSIX ACL inheritance enabled? true
22:32:10.593 INFO  FSDirectory - XAttrs enabled? true
22:32:10.593 INFO  NameNode - Caching file names occurring more than 10 times
22:32:10.593 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536
22:32:10.593 INFO  SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false
22:32:10.593 INFO  SnapshotManager - SkipList is disabled
22:32:10.593 INFO  GSet - Computing capacity for map cachedBlocks
22:32:10.593 INFO  GSet - VM type       = 64-bit
22:32:10.594 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
22:32:10.594 INFO  GSet - capacity      = 2^20 = 1048576 entries
22:32:10.594 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
22:32:10.594 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
22:32:10.594 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
22:32:10.594 INFO  FSNamesystem - Retry cache on namenode is enabled
22:32:10.594 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
22:32:10.594 INFO  GSet - Computing capacity for map NameNodeRetryCache
22:32:10.594 INFO  GSet - VM type       = 64-bit
22:32:10.594 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
22:32:10.594 INFO  GSet - capacity      = 2^17 = 131072 entries
22:32:10.596 INFO  Storage - Lock on /tmp/minicluster_storage3855638396955912502/name-0-1/in_use.lock acquired by nodename 2817@fv-az1720-291
22:32:10.598 INFO  Storage - Lock on /tmp/minicluster_storage3855638396955912502/name-0-2/in_use.lock acquired by nodename 2817@fv-az1720-291
22:32:10.599 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage3855638396955912502/name-0-1/current
22:32:10.599 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage3855638396955912502/name-0-2/current
22:32:10.599 INFO  FSImage - No edit log streams selected.
22:32:10.599 INFO  FSImage - Planning to load image: FSImageFile(file=/tmp/minicluster_storage3855638396955912502/name-0-1/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
22:32:10.600 INFO  FSImageFormatPBINode - Loading 1 INodes.
22:32:10.601 INFO  FSImageFormatPBINode - Successfully loaded 1 inodes
22:32:10.601 INFO  FSImageFormatPBINode - Completed update blocks map and name cache, total waiting duration 0ms.
22:32:10.601 INFO  FSImageFormatProtobuf - Loaded FSImage in 0 seconds.
22:32:10.601 INFO  FSImage - Loaded image for txid 0 from /tmp/minicluster_storage3855638396955912502/name-0-1/current/fsimage_0000000000000000000
22:32:10.601 INFO  FSNamesystem - Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
22:32:10.601 INFO  FSEditLog - Starting log segment at 1
22:32:10.607 INFO  NameCache - initialized with 0 entries 0 lookups
22:32:10.607 INFO  FSNamesystem - Finished loading FSImage in 12 msecs
22:32:10.607 INFO  NameNode - RPC server is binding to localhost:0
22:32:10.607 INFO  NameNode - Enable NameNode state context:false
22:32:10.607 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false.
22:32:10.607 INFO  Server - Listener at localhost:45651
22:32:10.608 INFO  Server - Starting Socket Reader #1 for port 0
22:32:10.610 INFO  NameNode - Clients are to use localhost:45651 to access this namenode/service.
22:32:10.611 INFO  FSNamesystem - Registered FSNamesystemState, ReplicatedBlocksState and ECBlockGroupsState MBeans.
22:32:10.627 INFO  LeaseManager - Number of blocks under construction: 0
22:32:10.628 INFO  DatanodeAdminDefaultMonitor - Initialized the Default Decommission and Maintenance monitor
22:32:10.629 INFO  BlockManager - Start MarkedDeleteBlockScrubber thread
22:32:10.629 INFO  BlockManager - initializing replication queues
22:32:10.630 INFO  StateChange - STATE* Leaving safe mode after 0 secs
22:32:10.631 INFO  StateChange - STATE* Network topology has 0 racks and 0 datanodes
22:32:10.631 INFO  StateChange - STATE* UnderReplicatedBlocks has 0 blocks
22:32:10.634 INFO  BlockManager - Total number of blocks            = 0
22:32:10.634 INFO  BlockManager - Number of invalid blocks          = 0
22:32:10.634 INFO  BlockManager - Number of under-replicated blocks = 0
22:32:10.634 INFO  BlockManager - Number of  over-replicated blocks = 0
22:32:10.634 INFO  BlockManager - Number of blocks being written    = 0
22:32:10.634 INFO  StateChange - STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 4 msec
22:32:10.634 INFO  BlockManager - Reconstruction queues initialisation progress: 0.0, total number of blocks processed: 0/0
22:32:10.635 INFO  Server - IPC Server Responder: starting
22:32:10.635 INFO  Server - IPC Server listener on 0: starting
22:32:10.637 INFO  NameNode - NameNode RPC up at: localhost/127.0.0.1:45651.
22:32:10.637 INFO  FSNamesystem - Starting services required for active state
22:32:10.637 INFO  FSDirectory - Initializing quota with 12 thread(s)
22:32:10.638 INFO  FSDirectory - Quota initialization completed in 0 milliseconds
name space=1
storage space=0
storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0, PROVIDED=0, NVDIMM=0
22:32:10.640 INFO  CacheReplicationMonitor - Starting CacheReplicationMonitor with interval 30000 milliseconds
22:32:10.640 INFO  MiniDFSCluster - Starting DataNode 0 with dfs.datanode.data.dir: [DISK]file:/tmp/minicluster_storage3855638396955912502/data/data1,[DISK]file:/tmp/minicluster_storage3855638396955912502/data/data2
22:32:10.641 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage3855638396955912502/data/data1
22:32:10.641 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage3855638396955912502/data/data2
22:32:10.655 INFO  MetricsSystemImpl - DataNode metrics system started (again)
22:32:10.655 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:10.655 INFO  BlockScanner - Initialized block scanner with targetBytesPerSec 1048576
22:32:10.655 INFO  DataNode - Configured hostname is 127.0.0.1
22:32:10.655 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:10.655 INFO  DataNode - Starting DataNode with maxLockedMemory = 0
22:32:10.656 INFO  DataNode - Opened streaming server at /127.0.0.1:35023
22:32:10.656 INFO  DataNode - Balancing bandwidth is 104857600 bytes/s
22:32:10.656 INFO  DataNode - Number threads for balancing is 100
22:32:10.659 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:10.662 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
22:32:10.662 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode
22:32:10.663 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
22:32:10.663 INFO  HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint.
22:32:10.663 INFO  HttpServer2 - Jetty bound to port 34819
22:32:10.663 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
22:32:10.664 INFO  session - DefaultSessionIdManager workerName=node0
22:32:10.664 INFO  session - No SessionScavenger set, using defaults
22:32:10.664 INFO  session - node0 Scavenging every 660000ms
22:32:10.665 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@7b7d5d8d{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE}
22:32:10.783 INFO  ContextHandler - Started o.e.j.w.WebAppContext@2fe44d9b{datanode,/,file:///tmp/jetty-localhost-34819-hadoop-hdfs-3_4_0-tests_jar-_-any-18023359217913925248/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode}
22:32:10.783 INFO  AbstractConnector - Started ServerConnector@7b742ec1{HTTP/1.1, (http/1.1)}{localhost:34819}
22:32:10.783 INFO  Server - Started @32954ms
22:32:10.786 WARN  DatanodeHttpServer - Got null for restCsrfPreventionFilter - will not do any filtering.
22:32:10.787 INFO  DatanodeHttpServer - Listening HTTP traffic on /127.0.0.1:35453
22:32:10.787 INFO  DataNode - dnUserName = runner
22:32:10.787 INFO  DataNode - supergroup = supergroup
22:32:10.787 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false.
22:32:10.788 INFO  Server - Listener at localhost:41099
22:32:10.788 INFO  JvmPauseMonitor - Starting JVM pause monitor
22:32:10.788 INFO  Server - Starting Socket Reader #1 for port 0
22:32:10.790 INFO  DataNode - Opened IPC server at /127.0.0.1:41099
22:32:10.806 INFO  DataNode - Refresh request received for nameservices: null
22:32:10.807 INFO  DataNode - Starting BPOfferServices for nameservices: <default>
22:32:10.807 INFO  DataNode - Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:45651 starting to offer service
22:32:10.810 INFO  Server - IPC Server Responder: starting
22:32:10.810 INFO  Server - IPC Server listener on 0: starting
22:32:10.810 WARN  Server - IPC Server handler 0 on default port 45651, call Call#3 Retry#0 org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol.versionRequest from localhost:36056 / 127.0.0.1:36056
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService.getDescriptor(DatanodeProtocolProtos.java:34977) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.getDescriptorForType(DatanodeProtocolProtos.java:34750) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:10.811 WARN  DataNode - Problem connecting to server: localhost/127.0.0.1:45651
22:32:10.814 WARN  Server - IPC Server handler 1 on default port 45651, call Call#4 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:36060 / 127.0.0.1:36060
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:10.816 WARN  Server - IPC Server handler 2 on default port 45651, call Call#5 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:36060 / 127.0.0.1:36060
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:10.817 WARN  MiniDFSCluster - Tried waitActive() 2 time(s) and failed, giving up.  org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169)
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203)

	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584)
	at org.apache.hadoop.ipc.Client.call(Client.java:1529)
	at org.apache.hadoop.ipc.Client.call(Client.java:1426)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
	at jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713)
	at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
	at jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:74)
	at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSourceUnitTest.testCRAMReferenceFromHDFS(ReadsSparkSourceUnitTest.java:231)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
	at org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
	at org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
	at org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
	at org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
	at org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
	at org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
	at org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
	at org.testng.TestRunner.privateRun(TestRunner.java:829)
	at org.testng.TestRunner.run(TestRunner.java:602)
	at org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
	at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
	at org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
	at org.testng.SuiteRunner.run(SuiteRunner.java:330)
	at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
	at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
	at org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
	at org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
	at org.testng.TestNG.runSuites(TestNG.java:1099)
	at org.testng.TestNG.run(TestNG.java:1067)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
	at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
	at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
	at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
	at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)

22:32:10.817 INFO  MiniDFSCluster - Shutting down the Mini HDFS Cluster
22:32:10.817 INFO  MiniDFSCluster - Shutting down DataNode 0
22:32:10.817 INFO  DataNode - Closing all peers.
22:32:10.822 INFO  ContextHandler - Stopped o.e.j.w.WebAppContext@2fe44d9b{datanode,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode}
22:32:10.823 INFO  AbstractConnector - Stopped ServerConnector@7b742ec1{HTTP/1.1, (http/1.1)}{localhost:0}
22:32:10.823 INFO  session - node0 Stopped scavenging
22:32:10.823 INFO  ContextHandler - Stopped o.e.j.s.ServletContextHandler@7b7d5d8d{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED}
22:32:10.824 INFO  DataNode - Waiting up to 30 seconds for transfer threads to complete
22:32:10.824 INFO  Server - Stopping server on 41099
22:32:10.825 INFO  Server - Stopping IPC Server listener on 0
22:32:10.825 INFO  Server - Stopping IPC Server Responder
22:32:10.825 INFO  DataNode - BPOfferService Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:45651 interrupted while requesting version info from NN
22:32:10.825 ERROR DataNode - Initialization failed for Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:45651. Exiting. 
java.io.IOException: DN shut down before block pool connected
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.retrieveNamespaceInfo(BPServiceActor.java:276) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:306) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:889) [hadoop-hdfs-3.4.0.jar:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
22:32:10.826 WARN  DataNode - Ending block pool service for: Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:45651
22:32:10.826 INFO  DataNode - Removed Block pool <registering> (Datanode Uuid unassigned)
22:32:10.826 INFO  DataNode - Shutdown complete.
22:32:10.826 WARN  DataSetLockManager - not open lock leak check func
22:32:10.826 INFO  MiniDFSCluster - Shutting down the namenode
22:32:10.826 INFO  FSNamesystem - Stopping services started for active state
22:32:10.826 ERROR DataNode - Command processor encountered interrupt and exit.
22:32:10.826 WARN  DataNode - Ending command processor service for: Thread[Command processor,5,main]
22:32:10.827 INFO  FSNamesystem - NameNodeEditLogRoller was interrupted, exiting
22:32:10.827 INFO  FSEditLog - Ending log segment 1, 1
22:32:10.827 INFO  FSNamesystem - LazyPersistFileScrubber was interrupted, exiting
22:32:10.827 INFO  FSEditLog - Number of transactions: 2 Total time for transactions(ms): 0 Number of transactions batched in Syncs: 0 Number of syncs: 3 SyncTimes(ms): 1 1 
22:32:10.828 INFO  FileJournalManager - Finalizing edits file /tmp/minicluster_storage3855638396955912502/name-0-1/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage3855638396955912502/name-0-1/current/edits_0000000000000000001-0000000000000000002
22:32:10.828 INFO  FileJournalManager - Finalizing edits file /tmp/minicluster_storage3855638396955912502/name-0-2/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage3855638396955912502/name-0-2/current/edits_0000000000000000001-0000000000000000002
22:32:10.828 INFO  FSEditLog - FSEditLogAsync was interrupted, exiting
22:32:10.828 INFO  CacheReplicationMonitor - Shutting down CacheReplicationMonitor
22:32:10.828 INFO  Server - Stopping server on 45651
22:32:10.829 INFO  Server - Stopping IPC Server listener on 0
22:32:10.830 INFO  Server - Stopping IPC Server Responder
22:32:10.830 INFO  BlockManager - Stopping RedundancyMonitor.
22:32:10.830 INFO  BlockManager - Stopping MarkedDeleteBlockScrubber.
22:32:10.831 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:10.836 INFO  FSNamesystem - Stopping services started for active state
22:32:10.836 INFO  FSNamesystem - Stopping services started for standby state
22:32:10.838 INFO  ContextHandler - Stopped o.e.j.w.WebAppContext@5f17910c{hdfs,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs}
22:32:10.838 INFO  AbstractConnector - Stopped ServerConnector@572a3331{HTTP/1.1, (http/1.1)}{localhost:0}
22:32:10.838 INFO  session - node0 Stopped scavenging
22:32:10.838 INFO  ContextHandler - Stopped o.e.j.s.ServletContextHandler@1062338c{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED}
22:32:10.840 INFO  MetricsSystemImpl - Stopping DataNode metrics system...
22:32:10.840 INFO  MetricsSystemImpl - DataNode metrics system stopped.
22:32:10.840 INFO  MetricsSystemImpl - DataNode metrics system shutdown complete.
22:32:10.862 INFO  MemoryStore - Block broadcast_85 stored as values in memory (estimated size 325.2 KiB, free 1918.7 MiB)
22:32:10.877 INFO  MemoryStore - Block broadcast_85_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.6 MiB)
22:32:10.877 INFO  BlockManagerInfo - Added broadcast_85_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:10.877 INFO  SparkContext - Created broadcast 85 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.910 INFO  MemoryStore - Block broadcast_86 stored as values in memory (estimated size 325.3 KiB, free 1918.3 MiB)
22:32:10.918 INFO  MemoryStore - Block broadcast_86_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.3 MiB)
22:32:10.918 INFO  BlockManagerInfo - Added broadcast_86_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:10.919 INFO  SparkContext - Created broadcast 86 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.955 INFO  MemoryStore - Block broadcast_87 stored as values in memory (estimated size 325.3 KiB, free 1917.9 MiB)
22:32:10.971 INFO  BlockManagerInfo - Removed broadcast_82_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1919.7 MiB)
22:32:10.972 INFO  BlockManagerInfo - Removed broadcast_85_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.8 MiB)
22:32:10.975 INFO  MemoryStore - Block broadcast_87_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1918.6 MiB)
22:32:10.975 INFO  BlockManagerInfo - Added broadcast_87_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.7 MiB)
22:32:10.976 INFO  SparkContext - Created broadcast 87 from newAPIHadoopFile at PathSplitSource.java:96
22:32:10.979 INFO  BlockManagerInfo - Removed broadcast_84_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.9 MiB)
22:32:10.981 INFO  BlockManagerInfo - Removed broadcast_83_piece0 on localhost:35275 in memory (size: 1780.0 B, free: 1919.9 MiB)
22:32:10.982 INFO  BlockManagerInfo - Removed broadcast_86_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:11.006 INFO  MemoryStore - Block broadcast_88 stored as values in memory (estimated size 536.0 B, free 1919.6 MiB)
22:32:11.007 INFO  MemoryStore - Block broadcast_88_piece0 stored as bytes in memory (estimated size 187.0 B, free 1919.6 MiB)
22:32:11.007 INFO  BlockManagerInfo - Added broadcast_88_piece0 in memory on localhost:35275 (size: 187.0 B, free: 1919.9 MiB)
22:32:11.007 INFO  SparkContext - Created broadcast 88 from broadcast at CramSource.java:114
22:32:11.009 INFO  MemoryStore - Block broadcast_89 stored as values in memory (estimated size 325.2 KiB, free 1919.3 MiB)
22:32:11.017 INFO  MemoryStore - Block broadcast_89_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.3 MiB)
22:32:11.017 INFO  BlockManagerInfo - Added broadcast_89_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:11.017 INFO  SparkContext - Created broadcast 89 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.039 INFO  MemoryStore - Block broadcast_90 stored as values in memory (estimated size 608.0 B, free 1919.3 MiB)
22:32:11.039 INFO  MemoryStore - Block broadcast_90_piece0 stored as bytes in memory (estimated size 206.0 B, free 1919.3 MiB)
22:32:11.040 INFO  BlockManagerInfo - Added broadcast_90_piece0 in memory on localhost:35275 (size: 206.0 B, free: 1919.9 MiB)
22:32:11.040 INFO  SparkContext - Created broadcast 90 from broadcast at CramSource.java:114
22:32:11.042 INFO  MemoryStore - Block broadcast_91 stored as values in memory (estimated size 325.3 KiB, free 1918.9 MiB)
22:32:11.050 INFO  MemoryStore - Block broadcast_91_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.9 MiB)
22:32:11.050 INFO  BlockManagerInfo - Added broadcast_91_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.8 MiB)
22:32:11.050 INFO  SparkContext - Created broadcast 91 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.072 INFO  MemoryStore - Block broadcast_92 stored as values in memory (estimated size 325.3 KiB, free 1918.6 MiB)
22:32:11.080 INFO  MemoryStore - Block broadcast_92_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.5 MiB)
22:32:11.080 INFO  BlockManagerInfo - Added broadcast_92_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.8 MiB)
22:32:11.081 INFO  SparkContext - Created broadcast 92 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.115 INFO  FileInputFormat - Total input files to process : 1
22:32:11.186 INFO  SparkContext - Starting job: collect at ReadsSparkSourceUnitTest.java:163
22:32:11.186 INFO  DAGScheduler - Got job 40 (collect at ReadsSparkSourceUnitTest.java:163) with 1 output partitions
22:32:11.186 INFO  DAGScheduler - Final stage: ResultStage 49 (collect at ReadsSparkSourceUnitTest.java:163)
22:32:11.187 INFO  DAGScheduler - Parents of final stage: List()
22:32:11.187 INFO  DAGScheduler - Missing parents: List()
22:32:11.187 INFO  DAGScheduler - Submitting ResultStage 49 (MapPartitionsRDD[224] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:11.222 INFO  MemoryStore - Block broadcast_93 stored as values in memory (estimated size 470.5 KiB, free 1918.1 MiB)
22:32:11.233 INFO  BlockManagerInfo - Removed broadcast_88_piece0 on localhost:35275 in memory (size: 187.0 B, free: 1919.8 MiB)
22:32:11.234 INFO  BlockManagerInfo - Removed broadcast_91_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.8 MiB)
22:32:11.235 INFO  MemoryStore - Block broadcast_93_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1918.3 MiB)
22:32:11.235 INFO  BlockManagerInfo - Added broadcast_93_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.7 MiB)
22:32:11.236 INFO  BlockManagerInfo - Removed broadcast_89_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.7 MiB)
22:32:11.236 INFO  BlockManagerInfo - Removed broadcast_90_piece0 on localhost:35275 in memory (size: 206.0 B, free: 1919.7 MiB)
22:32:11.237 INFO  SparkContext - Created broadcast 93 from broadcast at DAGScheduler.scala:1580
22:32:11.237 INFO  BlockManagerInfo - Removed broadcast_87_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1919.8 MiB)
22:32:11.238 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 49 (MapPartitionsRDD[224] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:11.238 INFO  TaskSchedulerImpl - Adding task set 49.0 with 1 tasks resource profile 0
22:32:11.239 INFO  TaskSetManager - Starting task 0.0 in stage 49.0 (TID 118) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7883 bytes) 
22:32:11.239 INFO  Executor - Running task 0.0 in stage 49.0 (TID 118)
22:32:11.299 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/BQSR/HiSeq.1mb.1RG.2k_lines.alternate.bam:0+221635
22:32:11.319 INFO  Executor - Finished task 0.0 in stage 49.0 (TID 118). 650184 bytes result sent to driver
22:32:11.323 INFO  TaskSetManager - Finished task 0.0 in stage 49.0 (TID 118) in 84 ms on localhost (executor driver) (1/1)
22:32:11.323 INFO  TaskSchedulerImpl - Removed TaskSet 49.0, whose tasks have all completed, from pool 
22:32:11.324 INFO  DAGScheduler - ResultStage 49 (collect at ReadsSparkSourceUnitTest.java:163) finished in 0.136 s
22:32:11.324 INFO  DAGScheduler - Job 40 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:11.324 INFO  TaskSchedulerImpl - Killing all running tasks in stage 49: Stage finished
22:32:11.324 INFO  DAGScheduler - Job 40 finished: collect at ReadsSparkSourceUnitTest.java:163, took 0.138225 s
22:32:11.329 INFO  MemoryStore - Block broadcast_94 stored as values in memory (estimated size 208.0 B, free 1919.0 MiB)
22:32:11.333 INFO  MemoryStore - Block broadcast_94_piece0 stored as bytes in memory (estimated size 222.0 B, free 1919.0 MiB)
22:32:11.333 INFO  BlockManagerInfo - Added broadcast_94_piece0 in memory on localhost:35275 (size: 222.0 B, free: 1919.8 MiB)
22:32:11.333 INFO  SparkContext - Created broadcast 94 from broadcast at AbstractBinarySamSource.java:82
22:32:11.336 INFO  MemoryStore - Block broadcast_95 stored as values in memory (estimated size 325.2 KiB, free 1918.7 MiB)
22:32:11.348 INFO  MemoryStore - Block broadcast_95_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.6 MiB)
22:32:11.349 INFO  BlockManagerInfo - Added broadcast_95_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:11.349 INFO  SparkContext - Created broadcast 95 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.390 INFO  FileInputFormat - Total input files to process : 1
22:32:11.395 INFO  SparkContext - Starting job: count at ReadsSparkSourceUnitTest.java:265
22:32:11.396 INFO  DAGScheduler - Got job 41 (count at ReadsSparkSourceUnitTest.java:265) with 1 output partitions
22:32:11.396 INFO  DAGScheduler - Final stage: ResultStage 50 (count at ReadsSparkSourceUnitTest.java:265)
22:32:11.396 INFO  DAGScheduler - Parents of final stage: List()
22:32:11.396 INFO  DAGScheduler - Missing parents: List()
22:32:11.396 INFO  DAGScheduler - Submitting ResultStage 50 (MapPartitionsRDD[231] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:11.429 INFO  MemoryStore - Block broadcast_96 stored as values in memory (estimated size 470.5 KiB, free 1918.2 MiB)
22:32:11.431 INFO  MemoryStore - Block broadcast_96_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1918.0 MiB)
22:32:11.431 INFO  BlockManagerInfo - Added broadcast_96_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.6 MiB)
22:32:11.432 INFO  SparkContext - Created broadcast 96 from broadcast at DAGScheduler.scala:1580
22:32:11.432 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 50 (MapPartitionsRDD[231] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:11.432 INFO  TaskSchedulerImpl - Adding task set 50.0 with 1 tasks resource profile 0
22:32:11.433 INFO  TaskSetManager - Starting task 0.0 in stage 50.0 (TID 119) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7838 bytes) 
22:32:11.433 INFO  Executor - Running task 0.0 in stage 50.0 (TID 119)
22:32:11.493 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/NA12878.chr17_69k_70k.dictFix.bam:0+57538
22:32:11.501 INFO  Executor - Finished task 0.0 in stage 50.0 (TID 119). 989 bytes result sent to driver
22:32:11.501 INFO  TaskSetManager - Finished task 0.0 in stage 50.0 (TID 119) in 68 ms on localhost (executor driver) (1/1)
22:32:11.501 INFO  TaskSchedulerImpl - Removed TaskSet 50.0, whose tasks have all completed, from pool 
22:32:11.502 INFO  DAGScheduler - ResultStage 50 (count at ReadsSparkSourceUnitTest.java:265) finished in 0.105 s
22:32:11.502 INFO  DAGScheduler - Job 41 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:11.502 INFO  TaskSchedulerImpl - Killing all running tasks in stage 50: Stage finished
22:32:11.502 INFO  DAGScheduler - Job 41 finished: count at ReadsSparkSourceUnitTest.java:265, took 0.106673 s
22:32:11.506 INFO  MemoryStore - Block broadcast_97 stored as values in memory (estimated size 160.0 B, free 1918.0 MiB)
22:32:11.506 INFO  MemoryStore - Block broadcast_97_piece0 stored as bytes in memory (estimated size 212.0 B, free 1918.0 MiB)
22:32:11.507 INFO  BlockManagerInfo - Added broadcast_97_piece0 in memory on localhost:35275 (size: 212.0 B, free: 1919.6 MiB)
22:32:11.507 INFO  SparkContext - Created broadcast 97 from broadcast at AbstractBinarySamSource.java:82
22:32:11.509 INFO  MemoryStore - Block broadcast_98 stored as values in memory (estimated size 325.3 KiB, free 1917.7 MiB)
22:32:11.525 INFO  BlockManagerInfo - Removed broadcast_96_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.7 MiB)
22:32:11.526 INFO  BlockManagerInfo - Removed broadcast_95_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.8 MiB)
22:32:11.528 INFO  BlockManagerInfo - Removed broadcast_93_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.9 MiB)
22:32:11.529 INFO  BlockManagerInfo - Removed broadcast_92_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1920.0 MiB)
22:32:11.529 INFO  BlockManagerInfo - Removed broadcast_94_piece0 on localhost:35275 in memory (size: 222.0 B, free: 1920.0 MiB)
22:32:11.534 INFO  MemoryStore - Block broadcast_98_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1919.6 MiB)
22:32:11.534 INFO  BlockManagerInfo - Added broadcast_98_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.9 MiB)
22:32:11.534 INFO  SparkContext - Created broadcast 98 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.577 INFO  FileInputFormat - Total input files to process : 1
22:32:11.582 INFO  SparkContext - Starting job: count at ReadsSparkSourceUnitTest.java:286
22:32:11.582 INFO  DAGScheduler - Got job 42 (count at ReadsSparkSourceUnitTest.java:286) with 1 output partitions
22:32:11.582 INFO  DAGScheduler - Final stage: ResultStage 51 (count at ReadsSparkSourceUnitTest.java:286)
22:32:11.582 INFO  DAGScheduler - Parents of final stage: List()
22:32:11.582 INFO  DAGScheduler - Missing parents: List()
22:32:11.583 INFO  DAGScheduler - Submitting ResultStage 51 (MapPartitionsRDD[238] at filter at ReadsSparkSource.java:96), which has no missing parents
22:32:11.616 INFO  MemoryStore - Block broadcast_99 stored as values in memory (estimated size 470.5 KiB, free 1919.2 MiB)
22:32:11.619 INFO  MemoryStore - Block broadcast_99_piece0 stored as bytes in memory (estimated size 171.0 KiB, free 1919.0 MiB)
22:32:11.619 INFO  BlockManagerInfo - Added broadcast_99_piece0 in memory on localhost:35275 (size: 171.0 KiB, free: 1919.8 MiB)
22:32:11.619 INFO  SparkContext - Created broadcast 99 from broadcast at DAGScheduler.scala:1580
22:32:11.619 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 51 (MapPartitionsRDD[238] at filter at ReadsSparkSource.java:96) (first 15 tasks are for partitions Vector(0))
22:32:11.619 INFO  TaskSchedulerImpl - Adding task set 51.0 with 1 tasks resource profile 0
22:32:11.620 INFO  TaskSetManager - Starting task 0.0 in stage 51.0 (TID 120) (localhost, executor driver, partition 0, PROCESS_LOCAL, 7901 bytes) 
22:32:11.621 INFO  Executor - Running task 0.0 in stage 51.0 (TID 120)
22:32:11.680 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/engine/CEUTrio.HiSeq.WGS.b37.NA12878.snippet_with_unmapped.bam:0+2184
22:32:11.688 INFO  Executor - Finished task 0.0 in stage 51.0 (TID 120). 989 bytes result sent to driver
22:32:11.689 INFO  TaskSetManager - Finished task 0.0 in stage 51.0 (TID 120) in 69 ms on localhost (executor driver) (1/1)
22:32:11.689 INFO  TaskSchedulerImpl - Removed TaskSet 51.0, whose tasks have all completed, from pool 
22:32:11.689 INFO  DAGScheduler - ResultStage 51 (count at ReadsSparkSourceUnitTest.java:286) finished in 0.106 s
22:32:11.690 INFO  DAGScheduler - Job 42 is finished. Cancelling potential speculative or zombie tasks for this job
22:32:11.690 INFO  TaskSchedulerImpl - Killing all running tasks in stage 51: Stage finished
22:32:11.690 INFO  DAGScheduler - Job 42 finished: count at ReadsSparkSourceUnitTest.java:286, took 0.108338 s
22:32:11.695 INFO  MemoryStore - Block broadcast_100 stored as values in memory (estimated size 325.3 KiB, free 1918.7 MiB)
22:32:11.708 INFO  MemoryStore - Block broadcast_100_piece0 stored as bytes in memory (estimated size 55.6 KiB, free 1918.6 MiB)
22:32:11.708 INFO  BlockManagerInfo - Added broadcast_100_piece0 in memory on localhost:35275 (size: 55.6 KiB, free: 1919.7 MiB)
22:32:11.709 INFO  SparkContext - Created broadcast 100 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.753 INFO  MemoryStore - Block broadcast_101 stored as values in memory (estimated size 325.6 KiB, free 1918.3 MiB)
22:32:11.764 INFO  MemoryStore - Block broadcast_101_piece0 stored as bytes in memory (estimated size 55.7 KiB, free 1918.3 MiB)
22:32:11.765 INFO  BlockManagerInfo - Added broadcast_101_piece0 in memory on localhost:35275 (size: 55.7 KiB, free: 1919.7 MiB)
22:32:11.766 INFO  SparkContext - Created broadcast 101 from newAPIHadoopFile at PathSplitSource.java:96
22:32:11.807 INFO  FileInputFormat - Total input files to process : 1
22:32:11.808 INFO  FileInputFormat - Total input files to process : 1
22:32:11.824 INFO  MiniDFSCluster - starting cluster: numNameNodes=1, numDataNodes=1
22:32:11.825 INFO  NameNode - Formatting using clusterid: testClusterID
22:32:11.825 INFO  FSEditLog - Edit logging is async:true
22:32:11.836 INFO  FSNamesystem - KeyProvider: null
22:32:11.836 INFO  FSNamesystem - fsLock is fair: true
22:32:11.836 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
22:32:11.836 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
22:32:11.836 INFO  FSNamesystem - supergroup             = supergroup
22:32:11.836 INFO  FSNamesystem - isPermissionEnabled    = true
22:32:11.836 INFO  FSNamesystem - isStoragePolicyEnabled = true
22:32:11.836 INFO  FSNamesystem - HA Enabled: false
22:32:11.836 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:11.836 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:11.836 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
22:32:11.836 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
22:32:11.837 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
22:32:11.837 INFO  BlockManager - The block deletion will start around 2025 Mar 28 22:32:11
22:32:11.837 INFO  GSet - Computing capacity for map BlocksMap
22:32:11.837 INFO  GSet - VM type       = 64-bit
22:32:11.837 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
22:32:11.837 INFO  GSet - capacity      = 2^23 = 8388608 entries
22:32:11.847 INFO  BlockManagerInfo - Removed broadcast_99_piece0 on localhost:35275 in memory (size: 171.0 KiB, free: 1919.8 MiB)
22:32:11.847 INFO  BlockManagerInfo - Removed broadcast_100_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:11.847 INFO  BlockManager - Storage policy satisfier is disabled
22:32:11.847 INFO  BlockManager - dfs.block.access.token.enable = false
22:32:11.848 INFO  BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval
22:32:11.848 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
22:32:11.848 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
22:32:11.848 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
22:32:11.848 INFO  BlockManager - defaultReplication         = 1
22:32:11.848 INFO  BlockManager - maxReplication             = 512
22:32:11.848 INFO  BlockManager - minReplication             = 1
22:32:11.848 INFO  BlockManager - maxReplicationStreams      = 2
22:32:11.848 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
22:32:11.848 INFO  BlockManager - encryptDataTransfer        = false
22:32:11.848 INFO  BlockManager - maxNumBlocksToLog          = 1000
22:32:11.848 INFO  GSet - Computing capacity for map INodeMap
22:32:11.848 INFO  GSet - VM type       = 64-bit
22:32:11.848 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
22:32:11.848 INFO  GSet - capacity      = 2^22 = 4194304 entries
22:32:11.858 INFO  FSDirectory - ACLs enabled? true
22:32:11.858 INFO  FSDirectory - POSIX ACL inheritance enabled? true
22:32:11.858 INFO  FSDirectory - XAttrs enabled? true
22:32:11.858 INFO  NameNode - Caching file names occurring more than 10 times
22:32:11.858 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536
22:32:11.858 INFO  SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false
22:32:11.858 INFO  SnapshotManager - SkipList is disabled
22:32:11.858 INFO  GSet - Computing capacity for map cachedBlocks
22:32:11.858 INFO  GSet - VM type       = 64-bit
22:32:11.858 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
22:32:11.858 INFO  GSet - capacity      = 2^20 = 1048576 entries
22:32:11.858 INFO  BlockManagerInfo - Removed broadcast_97_piece0 on localhost:35275 in memory (size: 212.0 B, free: 1919.9 MiB)
22:32:11.859 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
22:32:11.859 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
22:32:11.859 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
22:32:11.859 INFO  FSNamesystem - Retry cache on namenode is enabled
22:32:11.859 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
22:32:11.859 INFO  GSet - Computing capacity for map NameNodeRetryCache
22:32:11.859 INFO  GSet - VM type       = 64-bit
22:32:11.859 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
22:32:11.859 INFO  GSet - capacity      = 2^17 = 131072 entries
22:32:11.859 INFO  BlockManagerInfo - Removed broadcast_98_piece0 on localhost:35275 in memory (size: 55.6 KiB, free: 1919.9 MiB)
22:32:11.860 INFO  FSImage - Allocated new BlockPoolId: BP-403303612-10.1.0.27-1743201131860
22:32:11.860 INFO  BlockManagerInfo - Removed broadcast_101_piece0 on localhost:35275 in memory (size: 55.7 KiB, free: 1920.0 MiB)
22:32:11.862 INFO  Storage - Storage directory /tmp/minicluster_storage6542920960376074796/name-0-1 has been successfully formatted.
22:32:11.864 INFO  Storage - Storage directory /tmp/minicluster_storage6542920960376074796/name-0-2 has been successfully formatted.
22:32:11.879 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage6542920960376074796/name-0-1/current/fsimage.ckpt_0000000000000000000 using no compression
22:32:11.879 INFO  FSImageFormatProtobuf - Saving image file /tmp/minicluster_storage6542920960376074796/name-0-2/current/fsimage.ckpt_0000000000000000000 using no compression
22:32:11.884 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage6542920960376074796/name-0-2/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
22:32:11.884 INFO  FSImageFormatProtobuf - Image file /tmp/minicluster_storage6542920960376074796/name-0-1/current/fsimage.ckpt_0000000000000000000 of size 401 bytes saved in 0 seconds .
22:32:11.886 INFO  NNStorageRetentionManager - Going to retain 1 images with txid >= 0
22:32:11.886 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:11.891 INFO  FSNamesystem - Stopping services started for active state
22:32:11.891 INFO  FSNamesystem - Stopping services started for standby state
22:32:11.892 INFO  NameNode - createNameNode []
22:32:11.893 WARN  MetricsConfig - Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
22:32:11.894 INFO  MetricsSystemImpl - Scheduled Metric snapshot period at 10 second(s).
22:32:11.894 INFO  MetricsSystemImpl - NameNode metrics system started
22:32:11.895 INFO  NameNodeUtils - fs.defaultFS is hdfs://127.0.0.1:0
22:32:11.912 INFO  JvmPauseMonitor - Starting JVM pause monitor
22:32:11.912 INFO  DFSUtil - Filter initializers set : org.apache.hadoop.http.lib.StaticUserWebFilter,org.apache.hadoop.hdfs.web.AuthFilterInitializer
22:32:11.912 INFO  DFSUtil - Starting Web-server for hdfs at: http://localhost:0
22:32:11.914 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:11.916 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
22:32:11.916 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context hdfs
22:32:11.916 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
22:32:11.918 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context hdfs
22:32:11.918 INFO  HttpServer2 - Added filter AuthFilter (class=org.apache.hadoop.hdfs.web.AuthFilter) to context static
22:32:11.918 INFO  HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint.
22:32:11.918 INFO  HttpServer2 - addJerseyResourcePackage: packageName=org.apache.hadoop.hdfs.server.namenode.web.resources;org.apache.hadoop.hdfs.web.resources, pathSpec=/webhdfs/v1/*
22:32:11.918 INFO  HttpServer2 - Jetty bound to port 36095
22:32:11.919 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
22:32:11.920 INFO  session - DefaultSessionIdManager workerName=node0
22:32:11.920 INFO  session - No SessionScavenger set, using defaults
22:32:11.920 INFO  session - node0 Scavenging every 660000ms
22:32:11.921 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:11.921 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@2ef28b4b{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE}
22:32:12.017 INFO  ContextHandler - Started o.e.j.w.WebAppContext@32101098{hdfs,/,file:///tmp/jetty-localhost-36095-hadoop-hdfs-3_4_0-tests_jar-_-any-17575665532991709552/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs}
22:32:12.018 INFO  AbstractConnector - Started ServerConnector@4b9eea40{HTTP/1.1, (http/1.1)}{localhost:36095}
22:32:12.018 INFO  Server - Started @34189ms
22:32:12.019 INFO  FSEditLog - Edit logging is async:true
22:32:12.030 INFO  FSNamesystem - KeyProvider: null
22:32:12.030 INFO  FSNamesystem - fsLock is fair: true
22:32:12.030 INFO  FSNamesystem - Detailed lock hold time metrics enabled: false
22:32:12.030 INFO  FSNamesystem - fsOwner                = runner (auth:SIMPLE)
22:32:12.030 INFO  FSNamesystem - supergroup             = supergroup
22:32:12.030 INFO  FSNamesystem - isPermissionEnabled    = true
22:32:12.030 INFO  FSNamesystem - isStoragePolicyEnabled = true
22:32:12.030 INFO  FSNamesystem - HA Enabled: false
22:32:12.030 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:12.030 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:12.030 INFO  DatanodeManager - dfs.block.invalidate.limit : configured=1000, counted=60, effected=1000
22:32:12.030 INFO  DatanodeManager - dfs.namenode.datanode.registration.ip-hostname-check=true
22:32:12.030 INFO  BlockManager - dfs.namenode.startup.delay.block.deletion.sec is set to 000:00:00:00.000
22:32:12.031 INFO  BlockManager - The block deletion will start around 2025 Mar 28 22:32:12
22:32:12.031 INFO  GSet - Computing capacity for map BlocksMap
22:32:12.031 INFO  GSet - VM type       = 64-bit
22:32:12.031 INFO  GSet - 2.0% max memory 3.4 GB = 70 MB
22:32:12.031 INFO  GSet - capacity      = 2^23 = 8388608 entries
22:32:12.041 INFO  BlockManager - Storage policy satisfier is disabled
22:32:12.041 INFO  BlockManager - dfs.block.access.token.enable = false
22:32:12.041 INFO  BlockManagerSafeMode - Using 1000 as SafeModeMonitor Interval
22:32:12.041 INFO  BlockManagerSafeMode - dfs.namenode.safemode.threshold-pct = 0.999
22:32:12.041 INFO  BlockManagerSafeMode - dfs.namenode.safemode.min.datanodes = 0
22:32:12.041 INFO  BlockManagerSafeMode - dfs.namenode.safemode.extension = 0
22:32:12.041 INFO  BlockManager - defaultReplication         = 1
22:32:12.041 INFO  BlockManager - maxReplication             = 512
22:32:12.041 INFO  BlockManager - minReplication             = 1
22:32:12.041 INFO  BlockManager - maxReplicationStreams      = 2
22:32:12.041 INFO  BlockManager - redundancyRecheckInterval  = 3000ms
22:32:12.041 INFO  BlockManager - encryptDataTransfer        = false
22:32:12.041 INFO  BlockManager - maxNumBlocksToLog          = 1000
22:32:12.041 INFO  GSet - Computing capacity for map INodeMap
22:32:12.041 INFO  GSet - VM type       = 64-bit
22:32:12.042 INFO  GSet - 1.0% max memory 3.4 GB = 35 MB
22:32:12.042 INFO  GSet - capacity      = 2^22 = 4194304 entries
22:32:12.051 INFO  FSDirectory - ACLs enabled? true
22:32:12.051 INFO  FSDirectory - POSIX ACL inheritance enabled? true
22:32:12.051 INFO  FSDirectory - XAttrs enabled? true
22:32:12.051 INFO  NameNode - Caching file names occurring more than 10 times
22:32:12.051 INFO  SnapshotManager - Loaded config captureOpenFiles: false, skipCaptureAccessTimeOnlyChange: false, snapshotDiffAllowSnapRootDescendant: true, maxSnapshotFSLimit: 65536, maxSnapshotLimit: 65536
22:32:12.051 INFO  SnapshotManager - dfs.namenode.snapshot.deletion.ordered = false
22:32:12.051 INFO  SnapshotManager - SkipList is disabled
22:32:12.051 INFO  GSet - Computing capacity for map cachedBlocks
22:32:12.051 INFO  GSet - VM type       = 64-bit
22:32:12.051 INFO  GSet - 0.25% max memory 3.4 GB = 8.8 MB
22:32:12.051 INFO  GSet - capacity      = 2^20 = 1048576 entries
22:32:12.052 INFO  TopMetrics - NNTop conf: dfs.namenode.top.window.num.buckets = 10
22:32:12.052 INFO  TopMetrics - NNTop conf: dfs.namenode.top.num.users = 10
22:32:12.052 INFO  TopMetrics - NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
22:32:12.052 INFO  FSNamesystem - Retry cache on namenode is enabled
22:32:12.052 INFO  FSNamesystem - Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
22:32:12.052 INFO  GSet - Computing capacity for map NameNodeRetryCache
22:32:12.052 INFO  GSet - VM type       = 64-bit
22:32:12.052 INFO  GSet - 0.029999999329447746% max memory 3.4 GB = 1.0 MB
22:32:12.052 INFO  GSet - capacity      = 2^17 = 131072 entries
22:32:12.054 INFO  Storage - Lock on /tmp/minicluster_storage6542920960376074796/name-0-1/in_use.lock acquired by nodename 2817@fv-az1720-291
22:32:12.055 INFO  Storage - Lock on /tmp/minicluster_storage6542920960376074796/name-0-2/in_use.lock acquired by nodename 2817@fv-az1720-291
22:32:12.055 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage6542920960376074796/name-0-1/current
22:32:12.055 INFO  FileJournalManager - Recovering unfinalized segments in /tmp/minicluster_storage6542920960376074796/name-0-2/current
22:32:12.055 INFO  FSImage - No edit log streams selected.
22:32:12.055 INFO  FSImage - Planning to load image: FSImageFile(file=/tmp/minicluster_storage6542920960376074796/name-0-1/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
22:32:12.056 INFO  FSImageFormatPBINode - Loading 1 INodes.
22:32:12.057 INFO  FSImageFormatPBINode - Successfully loaded 1 inodes
22:32:12.057 INFO  FSImageFormatPBINode - Completed update blocks map and name cache, total waiting duration 0ms.
22:32:12.057 INFO  FSImageFormatProtobuf - Loaded FSImage in 0 seconds.
22:32:12.057 INFO  FSImage - Loaded image for txid 0 from /tmp/minicluster_storage6542920960376074796/name-0-1/current/fsimage_0000000000000000000
22:32:12.057 INFO  FSNamesystem - Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
22:32:12.057 INFO  FSEditLog - Starting log segment at 1
22:32:12.062 INFO  NameCache - initialized with 0 entries 0 lookups
22:32:12.063 INFO  FSNamesystem - Finished loading FSImage in 10 msecs
22:32:12.063 INFO  NameNode - RPC server is binding to localhost:0
22:32:12.063 INFO  NameNode - Enable NameNode state context:false
22:32:12.063 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false.
22:32:12.063 INFO  Server - Listener at localhost:32833
22:32:12.064 INFO  Server - Starting Socket Reader #1 for port 0
22:32:12.066 INFO  NameNode - Clients are to use localhost:32833 to access this namenode/service.
22:32:12.067 INFO  FSNamesystem - Registered FSNamesystemState, ReplicatedBlocksState and ECBlockGroupsState MBeans.
22:32:12.079 INFO  LeaseManager - Number of blocks under construction: 0
22:32:12.079 INFO  DatanodeAdminDefaultMonitor - Initialized the Default Decommission and Maintenance monitor
22:32:12.081 INFO  BlockManager - Start MarkedDeleteBlockScrubber thread
22:32:12.081 INFO  BlockManager - initializing replication queues
22:32:12.082 INFO  StateChange - STATE* Leaving safe mode after 0 secs
22:32:12.082 INFO  StateChange - STATE* Network topology has 0 racks and 0 datanodes
22:32:12.082 INFO  StateChange - STATE* UnderReplicatedBlocks has 0 blocks
22:32:12.087 INFO  BlockManager - Total number of blocks            = 0
22:32:12.087 INFO  BlockManager - Number of invalid blocks          = 0
22:32:12.087 INFO  BlockManager - Number of under-replicated blocks = 0
22:32:12.087 INFO  BlockManager - Number of  over-replicated blocks = 0
22:32:12.087 INFO  BlockManager - Number of blocks being written    = 0
22:32:12.088 INFO  StateChange - STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 6 msec
22:32:12.088 INFO  BlockManager - Reconstruction queues initialisation progress: 0.0, total number of blocks processed: 0/0
22:32:12.090 INFO  Server - IPC Server Responder: starting
22:32:12.094 INFO  Server - IPC Server listener on 0: starting
22:32:12.097 INFO  NameNode - NameNode RPC up at: localhost/127.0.0.1:32833.
22:32:12.097 INFO  FSNamesystem - Starting services required for active state
22:32:12.097 INFO  FSDirectory - Initializing quota with 12 thread(s)
22:32:12.098 INFO  FSDirectory - Quota initialization completed in 0 milliseconds
name space=1
storage space=0
storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0, PROVIDED=0, NVDIMM=0
22:32:12.100 INFO  CacheReplicationMonitor - Starting CacheReplicationMonitor with interval 30000 milliseconds
22:32:12.101 INFO  MiniDFSCluster - Starting DataNode 0 with dfs.datanode.data.dir: [DISK]file:/tmp/minicluster_storage6542920960376074796/data/data1,[DISK]file:/tmp/minicluster_storage6542920960376074796/data/data2
22:32:12.102 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage6542920960376074796/data/data1
22:32:12.102 INFO  ThrottledAsyncChecker - Scheduling a check for [DISK]file:/tmp/minicluster_storage6542920960376074796/data/data2
22:32:12.117 INFO  MetricsSystemImpl - DataNode metrics system started (again)
22:32:12.117 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:12.118 INFO  BlockScanner - Initialized block scanner with targetBytesPerSec 1048576
22:32:12.118 INFO  DataNode - Configured hostname is 127.0.0.1
22:32:12.118 INFO  Util - dfs.datanode.fileio.profiling.sampling.percentage set to 0. Disabling file IO profiling
22:32:12.118 INFO  DataNode - Starting DataNode with maxLockedMemory = 0
22:32:12.120 INFO  DataNode - Opened streaming server at /127.0.0.1:40563
22:32:12.120 INFO  DataNode - Balancing bandwidth is 104857600 bytes/s
22:32:12.120 INFO  DataNode - Number threads for balancing is 100
22:32:12.122 WARN  AuthenticationFilter - Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/runner/hadoop-http-auth-signature-secret
22:32:12.132 INFO  HttpServer2 - Added global filter 'safety' (class=org.apache.hadoop.http.HttpServer2$QuotingInputFilter)
22:32:12.133 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context datanode
22:32:12.133 INFO  HttpServer2 - Added filter static_user_filter (class=org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter) to context static
22:32:12.133 INFO  HttpServer2 - ASYNC_PROFILER_HOME environment variable and async.profiler.home system property not specified. Disabling /prof endpoint.
22:32:12.133 INFO  HttpServer2 - Jetty bound to port 38029
22:32:12.133 INFO  Server - jetty-9.4.56.v20240826; built: 2024-08-26T17:15:05.868Z; git: ec6782ff5ead824dabdcf47fa98f90a4aedff401; jvm 17.0.6+10
22:32:12.135 INFO  session - DefaultSessionIdManager workerName=node0
22:32:12.135 INFO  session - No SessionScavenger set, using defaults
22:32:12.135 INFO  session - node0 Scavenging every 600000ms
22:32:12.136 INFO  ContextHandler - Started o.e.j.s.ServletContextHandler@2e607b8{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,AVAILABLE}
22:32:12.232 INFO  ContextHandler - Started o.e.j.w.WebAppContext@5b5d7140{datanode,/,file:///tmp/jetty-localhost-38029-hadoop-hdfs-3_4_0-tests_jar-_-any-816577712577200095/webapp/,AVAILABLE}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode}
22:32:12.232 INFO  AbstractConnector - Started ServerConnector@facdae3{HTTP/1.1, (http/1.1)}{localhost:38029}
22:32:12.232 INFO  Server - Started @34403ms
22:32:12.235 WARN  DatanodeHttpServer - Got null for restCsrfPreventionFilter - will not do any filtering.
22:32:12.236 INFO  DatanodeHttpServer - Listening HTTP traffic on /127.0.0.1:43911
22:32:12.236 INFO  DataNode - dnUserName = runner
22:32:12.236 INFO  DataNode - supergroup = supergroup
22:32:12.236 INFO  CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue, queueCapacity: 1000, scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler, ipcBackoff: false, ipcFailOver: false.
22:32:12.237 INFO  Server - Listener at localhost:45891
22:32:12.237 INFO  JvmPauseMonitor - Starting JVM pause monitor
22:32:12.237 INFO  Server - Starting Socket Reader #1 for port 0
22:32:12.239 INFO  DataNode - Opened IPC server at /127.0.0.1:45891
22:32:12.256 INFO  DataNode - Refresh request received for nameservices: null
22:32:12.256 INFO  DataNode - Starting BPOfferServices for nameservices: <default>
22:32:12.257 INFO  DataNode - Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:32833 starting to offer service
22:32:12.259 INFO  Server - IPC Server Responder: starting
22:32:12.259 WARN  Server - IPC Server handler 0 on default port 32833, call Call#6 Retry#0 org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol.versionRequest from localhost:36668 / 127.0.0.1:36668
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService.getDescriptor(DatanodeProtocolProtos.java:34977) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos$DatanodeProtocolService$2.getDescriptorForType(DatanodeProtocolProtos.java:34750) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:12.260 WARN  DataNode - Problem connecting to server: localhost/127.0.0.1:32833
22:32:12.260 INFO  Server - IPC Server listener on 0: starting
22:32:12.265 WARN  Server - IPC Server handler 1 on default port 32833, call Call#7 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:36676 / 127.0.0.1:36676
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:12.266 WARN  Server - IPC Server handler 2 on default port 32833, call Call#8 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getDatanodeReport from localhost:36676 / 127.0.0.1:36676
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java) ~[hadoop-hdfs-client-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) ~[hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) ~[hadoop-common-3.4.0.jar:?]
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) [?:?]
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439) [?:?]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) [hadoop-common-3.4.0.jar:?]
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203) [hadoop-common-3.4.0.jar:?]
22:32:12.267 WARN  MiniDFSCluster - Tried waitActive() 2 time(s) and failed, giving up.  org.apache.hadoop.ipc.RemoteException(java.lang.NoClassDefFoundError): Could not initialize class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol.getDescriptor(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.getDescriptorForType(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169)
	at java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
	at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3203)

	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584)
	at org.apache.hadoop.ipc.Client.call(Client.java:1529)
	at org.apache.hadoop.ipc.Client.call(Client.java:1426)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
	at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
	at jdk.proxy3/jdk.proxy3.$Proxy65.getDatanodeReport(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$getDatanodeReport$28(ClientNamenodeProtocolTranslatorPB.java:713)
	at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDatanodeReport(ClientNamenodeProtocolTranslatorPB.java:713)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
	at jdk.proxy3/jdk.proxy3.$Proxy66.getDatanodeReport(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.datanodeReport(DFSClient.java:2134)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2869)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2920)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1848)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:995)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:594)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:533)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:30)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.getMiniCluster(MiniClusterUtils.java:38)
	at org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:74)
	at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSourceUnitTest.testReadFromFileAndHDFS(ReadsSparkSourceUnitTest.java:203)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
	at org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
	at org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
	at org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
	at org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
	at org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
	at org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
	at org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1511)
	at org.testng.TestRunner.privateRun(TestRunner.java:829)
	at org.testng.TestRunner.run(TestRunner.java:602)
	at org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
	at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
	at org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
	at org.testng.SuiteRunner.run(SuiteRunner.java:330)
	at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
	at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
	at org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
	at org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
	at org.testng.TestNG.runSuites(TestNG.java:1099)
	at org.testng.TestNG.run(TestNG.java:1067)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
	at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:568)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
	at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
	at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
	at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
	at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
	at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)

22:32:12.267 INFO  MiniDFSCluster - Shutting down the Mini HDFS Cluster
22:32:12.267 INFO  MiniDFSCluster - Shutting down DataNode 0
22:32:12.268 INFO  DataNode - Closing all peers.
22:32:12.273 INFO  ContextHandler - Stopped o.e.j.w.WebAppContext@5b5d7140{datanode,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/datanode}
22:32:12.273 INFO  AbstractConnector - Stopped ServerConnector@facdae3{HTTP/1.1, (http/1.1)}{localhost:0}
22:32:12.273 INFO  session - node0 Stopped scavenging
22:32:12.274 INFO  ContextHandler - Stopped o.e.j.s.ServletContextHandler@2e607b8{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED}
22:32:12.275 INFO  DataNode - Waiting up to 30 seconds for transfer threads to complete
22:32:12.275 INFO  Server - Stopping server on 45891
22:32:12.276 INFO  Server - Stopping IPC Server listener on 0
22:32:12.276 INFO  Server - Stopping IPC Server Responder
22:32:12.276 INFO  DataNode - BPOfferService Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:32833 interrupted while requesting version info from NN
22:32:12.276 ERROR DataNode - Command processor encountered interrupt and exit.
22:32:12.277 WARN  DataNode - Ending command processor service for: Thread[Command processor,5,main]
22:32:12.276 ERROR DataNode - Initialization failed for Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:32833. Exiting. 
java.io.IOException: DN shut down before block pool connected
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.retrieveNamespaceInfo(BPServiceActor.java:276) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:306) ~[hadoop-hdfs-3.4.0.jar:?]
	at org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:889) [hadoop-hdfs-3.4.0.jar:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
22:32:12.277 WARN  DataNode - Ending block pool service for: Block pool <registering> (Datanode Uuid unassigned) service to localhost/127.0.0.1:32833
22:32:12.277 INFO  DataNode - Removed Block pool <registering> (Datanode Uuid unassigned)
22:32:12.277 INFO  DataNode - Shutdown complete.
22:32:12.277 WARN  DataSetLockManager - not open lock leak check func
22:32:12.277 INFO  MiniDFSCluster - Shutting down the namenode
22:32:12.277 INFO  FSNamesystem - Stopping services started for active state
22:32:12.278 INFO  FSEditLog - Ending log segment 1, 1
22:32:12.278 INFO  FSEditLog - Number of transactions: 2 Total time for transactions(ms): 0 Number of transactions batched in Syncs: 0 Number of syncs: 3 SyncTimes(ms): 1 1 
22:32:12.278 INFO  FileJournalManager - Finalizing edits file /tmp/minicluster_storage6542920960376074796/name-0-1/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage6542920960376074796/name-0-1/current/edits_0000000000000000001-0000000000000000002
22:32:12.279 INFO  FileJournalManager - Finalizing edits file /tmp/minicluster_storage6542920960376074796/name-0-2/current/edits_inprogress_0000000000000000001 -> /tmp/minicluster_storage6542920960376074796/name-0-2/current/edits_0000000000000000001-0000000000000000002
22:32:12.279 INFO  FSEditLog - FSEditLogAsync was interrupted, exiting
22:32:12.279 INFO  FSNamesystem - LazyPersistFileScrubber was interrupted, exiting
22:32:12.279 INFO  Server - Stopping server on 32833
22:32:12.279 INFO  FSNamesystem - NameNodeEditLogRoller was interrupted, exiting
22:32:12.280 INFO  CacheReplicationMonitor - Shutting down CacheReplicationMonitor
22:32:12.281 INFO  Server - Stopping IPC Server Responder
22:32:12.281 INFO  Server - Stopping IPC Server listener on 0
22:32:12.282 INFO  BlockManager - Stopping MarkedDeleteBlockScrubber.
22:32:12.282 INFO  BlockManager - Stopping RedundancyMonitor.
22:32:12.283 INFO  DatanodeManager - Slow peers collection thread shutdown
22:32:12.289 INFO  FSNamesystem - Stopping services started for active state
22:32:12.289 INFO  FSNamesystem - Stopping services started for standby state
22:32:12.290 INFO  ContextHandler - Stopped o.e.j.w.WebAppContext@32101098{hdfs,/,null,STOPPED}{jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/hdfs}
22:32:12.291 INFO  AbstractConnector - Stopped ServerConnector@4b9eea40{HTTP/1.1, (http/1.1)}{localhost:0}
22:32:12.291 INFO  session - node0 Stopped scavenging
22:32:12.291 INFO  ContextHandler - Stopped o.e.j.s.ServletContextHandler@2ef28b4b{static,/static,jar:file:/home/runner/.gradle/caches/modules-2/files-2.1/org.apache.hadoop/hadoop-hdfs/3.4.0/18f9797a908899efbe4e27f6d5b76420f446a695/hadoop-hdfs-3.4.0-tests.jar!/webapps/static,STOPPED}
22:32:12.292 INFO  MetricsSystemImpl - Stopping DataNode metrics system...
22:32:12.292 INFO  MetricsSystemImpl - DataNode metrics system stopped.
22:32:12.292 INFO  MetricsSystemImpl - DataNode metrics system shutdown complete.