Class org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSparkIntegrationTest
Failed tests
testPipelineTool[0](pipeline_input.bam, pipeline_output.bam, pipeline_output.txt, pipeline_output.filter.metrics, pipeline_output.score.metrics, false)
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 394.0 failed 1 times, most recent failure: Lost task 0.0 in stage 394.0 (TID 413) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at app//org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
at app//scala.collection.immutable.List.foreach(List.scala:334)
at app//org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
at app//scala.Option.foreach(Option.scala:437)
at app//org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
at app//org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2463)
at app//org.apache.spark.rdd.RDD.count(RDD.scala:1293)
at app//org.apache.spark.api.java.JavaRDDLike.count(JavaRDDLike.scala:469)
at app//org.apache.spark.api.java.JavaRDDLike.count$(JavaRDDLike.scala:469)
at app//org.apache.spark.api.java.AbstractJavaRDDLike.count(JavaRDDLike.scala:45)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterFileLogger.logReadsAfterHostFilter(PSFilterFileLogger.java:47)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.doFilter(PSFilter.java:284)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark.runTool(PathSeqPipelineSpark.java:238)
at app//org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535)
at app//org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222)
at app//org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:152)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:193)
at app//org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27)
at app//org.broadinstitute.hellbender.testutils.CommandLineProgramTester.runCommandLine(CommandLineProgramTester.java:112)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSparkIntegrationTest.testPipelineTool(PathSeqPipelineSparkIntegrationTest.java:76)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
at app//org.testng.TestRunner.run(TestRunner.java:602)
at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at app//org.testng.TestNG.runSuites(TestNG.java:1099)
at app//org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Suppressed: java.lang.IllegalStateException: Cannot compute metrics if primary, pre-aligned host, quality, host, duplicate, or final paired read counts are not initialized
at org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterMetrics.computeDerivedMetrics(PSFilterMetrics.java:75)
at org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterFileLogger.close(PSFilterFileLogger.java:64)
at org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark.runTool(PathSeqPipelineSpark.java:236)
... 57 more
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
testPipelineTool[1](pipeline_input_aligned.bam, pipeline_output_aligned.bam, pipeline_output_aligned.txt, pipeline_output_aligned.filter.metrics, pipeline_output_aligned.score.metrics, true)
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 399.0 failed 1 times, most recent failure: Lost task 0.0 in stage 399.0 (TID 418) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at app//org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
at app//scala.collection.immutable.List.foreach(List.scala:334)
at app//org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
at app//scala.Option.foreach(Option.scala:437)
at app//org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
at app//org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2463)
at app//org.apache.spark.rdd.RDD.count(RDD.scala:1293)
at app//org.apache.spark.api.java.JavaRDDLike.count(JavaRDDLike.scala:469)
at app//org.apache.spark.api.java.JavaRDDLike.count$(JavaRDDLike.scala:469)
at app//org.apache.spark.api.java.AbstractJavaRDDLike.count(JavaRDDLike.scala:45)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterFileLogger.logReadsAfterHostFilter(PSFilterFileLogger.java:47)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.doFilter(PSFilter.java:284)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark.runTool(PathSeqPipelineSpark.java:238)
at app//org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535)
at app//org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222)
at app//org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:152)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:193)
at app//org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27)
at app//org.broadinstitute.hellbender.testutils.CommandLineProgramTester.runCommandLine(CommandLineProgramTester.java:112)
at app//org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSparkIntegrationTest.testPipelineTool(PathSeqPipelineSparkIntegrationTest.java:76)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
at app//org.testng.TestRunner.run(TestRunner.java:602)
at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at app//org.testng.TestNG.runSuites(TestNG.java:1099)
at app//org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Suppressed: java.lang.IllegalStateException: Cannot compute metrics if primary, pre-aligned host, quality, host, duplicate, or final paired read counts are not initialized
at org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterMetrics.computeDerivedMetrics(PSFilterMetrics.java:75)
at org.broadinstitute.hellbender.tools.spark.pathseq.loggers.PSFilterFileLogger.close(PSFilterFileLogger.java:64)
at org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark.runTool(PathSeqPipelineSpark.java:236)
... 57 more
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330)
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29)
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216)
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855)
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Tests
| Test |
Duration |
Result |
| testPipelineTool[0](pipeline_input.bam, pipeline_output.bam, pipeline_output.txt, pipeline_output.filter.metrics, pipeline_output.score.metrics, false) |
1.136s |
failed |
| testPipelineTool[1](pipeline_input_aligned.bam, pipeline_output_aligned.bam, pipeline_output_aligned.txt, pipeline_output_aligned.filter.metrics, pipeline_output_aligned.score.metrics, true) |
0.532s |
failed |
Standard error
17:46:48.175 ERROR Executor - Exception in task 0.0 in stage 394.0 (TID 413)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216) ~[main/:?]
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:46:48.176 ERROR TaskSetManager - Task 0 in stage 394.0 failed 1 times; aborting job
[October 27, 2025 at 5:46:48 PM UTC] org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=935329792
17:46:48.706 ERROR Executor - Exception in task 0.0 in stage 399.0 (TID 418)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBBWA_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.loadNativeLibrary(BwaMemIndex.java:447) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndex.<init>(BwaMemIndex.java:330) ~[gatk-bwamem-jni-1.0.4.jar:?]
at org.broadinstitute.hellbender.utils.bwa.BwaMemIndexCache.getInstance(BwaMemIndexCache.java:24) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.pathseq.PSBwaFilter.<init>(PSBwaFilter.java:29) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.pathseq.PSFilter.lambda$doBwaFilter$9ded5e08$1(PSFilter.java:216) ~[main/:?]
at org.apache.spark.api.java.JavaRDDLike.$anonfun$mapPartitions$1(JavaRDDLike.scala:153) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:855) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.rdd.RDD.iterator(RDD.scala:328) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:46:48.708 ERROR TaskSetManager - Task 0 in stage 399.0 failed 1 times; aborting job
[October 27, 2025 at 5:46:48 PM UTC] org.broadinstitute.hellbender.tools.spark.pathseq.PathSeqPipelineSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=935329792