Class org.broadinstitute.hellbender.tools.spark.bwa.BwaAndMarkDuplicatesPipelineSparkIntegrationTest
Failed tests
test
org.apache.spark.SparkException: Job aborted.
at app//org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:106)
at app//org.apache.spark.rdd.PairRDDFunctions.$anonfun$saveAsNewAPIHadoopDataset$1(PairRDDFunctions.scala:1078)
at app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at app//org.apache.spark.rdd.RDD.withScope(RDD.scala:407)
at app//org.apache.spark.rdd.PairRDDFunctions.saveAsNewAPIHadoopDataset(PairRDDFunctions.scala:1076)
at app//org.apache.spark.rdd.PairRDDFunctions.$anonfun$saveAsNewAPIHadoopFile$2(PairRDDFunctions.scala:995)
at app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at app//org.apache.spark.rdd.RDD.withScope(RDD.scala:407)
at app//org.apache.spark.rdd.PairRDDFunctions.saveAsNewAPIHadoopFile(PairRDDFunctions.scala:986)
at app//org.apache.spark.api.java.JavaPairRDD.saveAsNewAPIHadoopFile(JavaPairRDD.scala:825)
at app//org.disq_bio.disq.impl.formats.bam.BamSink.save(BamSink.java:93)
at app//org.disq_bio.disq.HtsjdkReadsRddStorage.write(HtsjdkReadsRddStorage.java:233)
at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:142)
at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:111)
at app//org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:67)
at app//org.broadinstitute.hellbender.tools.spark.pipelines.BwaAndMarkDuplicatesPipelineSpark.runTool(BwaAndMarkDuplicatesPipelineSpark.java:71)
at app//org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535)
at app//org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222)
at app//org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:152)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:193)
at app//org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27)
at app//org.broadinstitute.hellbender.testutils.CommandLineProgramTester.runCommandLine(CommandLineProgramTester.java:108)
at app//org.broadinstitute.hellbender.tools.spark.bwa.BwaAndMarkDuplicatesPipelineSparkIntegrationTest.test(BwaAndMarkDuplicatesPipelineSparkIntegrationTest.java:37)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
at app//org.testng.TestRunner.run(TestRunner.java:602)
at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at app//org.testng.TestNG.runSuites(TestNG.java:1099)
at app//org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 232.0 failed 1 times, most recent failure: Lost task 0.0 in stage 232.0 (TID 286) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine$ReadAligner.<init>(BwaSparkEngine.java:122)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine.lambda$align$88f889e2$1(BwaSparkEngine.java:104)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at app//org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
at app//scala.collection.immutable.List.foreach(List.scala:334)
at app//org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
at app//scala.Option.foreach(Option.scala:437)
at app//org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
at app//org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2451)
at app//org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:83)
... 76 more
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine$ReadAligner.<init>(BwaSparkEngine.java:122)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine.lambda$align$88f889e2$1(BwaSparkEngine.java:104)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Tests
| Test |
Duration |
Result |
| test |
0.601s |
failed |
Standard error
17:46:38.368 ERROR Executor - Exception in task 0.0 in stage 232.0 (TID 286)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine$ReadAligner.<init>(BwaSparkEngine.java:122) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine.lambda$align$88f889e2$1(BwaSparkEngine.java:104) ~[main/:?]
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:46:38.383 ERROR TaskSetManager - Task 0 in stage 232.0 failed 1 times; aborting job
17:46:38.387 ERROR SparkHadoopWriter - Aborting job job_202510271746387483779854923589915_2020.
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 232.0 failed 1 times, most recent failure: Lost task 0.0 in stage 232.0 (TID 286) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine$ReadAligner.<init>(BwaSparkEngine.java:122)
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine.lambda$align$88f889e2$1(BwaSparkEngine.java:104)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.immutable.List.foreach(List.scala:334) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.Option.foreach(Option.scala:437) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2398) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2419) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2451) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:83) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.PairRDDFunctions.$anonfun$saveAsNewAPIHadoopDataset$1(PairRDDFunctions.scala:1078) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.withScope(RDD.scala:407) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.PairRDDFunctions.saveAsNewAPIHadoopDataset(PairRDDFunctions.scala:1076) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.PairRDDFunctions.$anonfun$saveAsNewAPIHadoopFile$2(PairRDDFunctions.scala:995) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.withScope(RDD.scala:407) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.PairRDDFunctions.saveAsNewAPIHadoopFile(PairRDDFunctions.scala:986) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.api.java.JavaPairRDD.saveAsNewAPIHadoopFile(JavaPairRDD.scala:825) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.disq_bio.disq.impl.formats.bam.BamSink.save(BamSink.java:93) ~[disq-0.3.8.jar:?]
at org.disq_bio.disq.HtsjdkReadsRddStorage.write(HtsjdkReadsRddStorage.java:233) ~[disq-0.3.8.jar:?]
at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:142) ~[main/:?]
at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:111) ~[main/:?]
at org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSink.writeReads(ReadsSparkSink.java:67) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.pipelines.BwaAndMarkDuplicatesPipelineSpark.runTool(BwaAndMarkDuplicatesPipelineSpark.java:71) ~[main/:?]
at org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535) ~[main/:?]
at org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31) ~[main/:?]
at org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150) ~[main/:?]
at org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203) ~[main/:?]
at org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222) ~[main/:?]
at org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166) ~[main/:?]
at org.broadinstitute.hellbender.Main.instanceMain(Main.java:152) ~[main/:?]
at org.broadinstitute.hellbender.Main.instanceMain(Main.java:193) ~[main/:?]
at org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27) ~[test/:?]
at org.broadinstitute.hellbender.testutils.CommandLineProgramTester.runCommandLine(CommandLineProgramTester.java:108) ~[testUtils/:?]
at org.broadinstitute.hellbender.tools.spark.bwa.BwaAndMarkDuplicatesPipelineSparkIntegrationTest.test(BwaAndMarkDuplicatesPipelineSparkIntegrationTest.java:37) ~[test/:?]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?]
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
at java.base/java.lang.reflect.Method.invoke(Method.java:568) ~[?:?]
at org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148) ~[testng-7.7.0.jar:7.7.0]
at org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128) ~[testng-7.7.0.jar:7.7.0]
at java.base/java.util.ArrayList.forEach(ArrayList.java:1511) [?:?]
at org.testng.TestRunner.privateRun(TestRunner.java:829) [testng-7.7.0.jar:7.7.0]
at org.testng.TestRunner.run(TestRunner.java:602) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunner.runTest(SuiteRunner.java:437) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunner.privateRun(SuiteRunner.java:391) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunner.run(SuiteRunner.java:330) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52) [testng-7.7.0.jar:7.7.0]
at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95) [testng-7.7.0.jar:7.7.0]
at org.testng.TestNG.runSuitesSequentially(TestNG.java:1256) [testng-7.7.0.jar:7.7.0]
at org.testng.TestNG.runSuitesLocally(TestNG.java:1176) [testng-7.7.0.jar:7.7.0]
at org.testng.TestNG.runSuites(TestNG.java:1099) [testng-7.7.0.jar:7.7.0]
at org.testng.TestNG.run(TestNG.java:1067) [testng-7.7.0.jar:7.7.0]
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153) [gradle-testing-jvm-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95) [gradle-testing-jvm-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63) [gradle-testing-base-infrastructure-8.10.2.jar:8.10.2]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?]
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
at java.base/java.lang.reflect.Method.invoke(Method.java:568) ~[?:?]
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36) [gradle-messaging-8.10.2.jar:8.10.2]
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-8.10.2.jar:8.10.2]
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33) [gradle-messaging-8.10.2.jar:8.10.2]
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92) [gradle-messaging-8.10.2.jar:8.10.2]
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source) [?:?]
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200) [gradle-testing-base-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132) [gradle-testing-base-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103) [gradle-testing-base-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63) [gradle-testing-base-infrastructure-8.10.2.jar:8.10.2]
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56) [gradle-worker-main-8.10.2.jar:8.10.2]
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121) [gradle-worker-main-8.10.2.jar:8.10.2]
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71) [gradle-worker-main-8.10.2.jar:8.10.2]
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69) [gradle-worker.jar:?]
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74) [gradle-worker.jar:?]
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine$ReadAligner.<init>(BwaSparkEngine.java:122) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.bwa.BwaSparkEngine.lambda$align$88f889e2$1(BwaSparkEngine.java:104) ~[main/:?]
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) ~[?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) ~[?:?]
at java.base/java.lang.Thread.run(Thread.java:833) ~[?:?]
[October 27, 2025 at 5:46:38 PM UTC] org.broadinstitute.hellbender.tools.spark.pipelines.BwaAndMarkDuplicatesPipelineSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=935329792