Class org.broadinstitute.hellbender.tools.spark.sv.integration.StructuralVariationDiscoveryPipelineSparkIntegrationTest
Failed tests
testSVDiscoverPipelineRunnableLocal[0](StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs{bam-loc='/home/runner/work/gatk/gatk/src/test/resources/large//sv/SVIntegrationTest_hg19.bam', kmer-ignore-list-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/dummy.kill.kmers', aligner-fef-index-img-loc='/home/runner/work/gatk/gatk/src/test/resources/large/human_g1k_v37.20.21.fasta.img', cnv-calls-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/hg19_DEL_cnv_calls.vcf', output-dir='/tmp/new921381074256615202'})
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 2389.0 failed 1 times, most recent failure: Lost task 0.0 in stage 2389.0 (TID 1892) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137)
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23)
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070)
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584)
at scala.collection.mutable.Growable.addAll(Growable.scala:62)
at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75)
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505)
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303)
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at app//org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
at app//scala.collection.immutable.List.foreach(List.scala:334)
at app//org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
at app//scala.Option.foreach(Option.scala:437)
at app//org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
at app//org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2463)
at app//org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1046)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at app//org.apache.spark.rdd.RDD.withScope(RDD.scala:407)
at app//org.apache.spark.rdd.RDD.collect(RDD.scala:1045)
at app//org.apache.spark.api.java.JavaRDDLike.collect(JavaRDDLike.scala:362)
at app//org.apache.spark.api.java.JavaRDDLike.collect$(JavaRDDLike.scala:361)
at app//org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45)
at app//org.broadinstitute.hellbender.tools.spark.sv.evidence.FindBreakpointEvidenceSpark.handleAssemblies(FindBreakpointEvidenceSpark.java:596)
at app//org.broadinstitute.hellbender.tools.spark.sv.evidence.FindBreakpointEvidenceSpark.gatherEvidenceAndWriteContigSamFile(FindBreakpointEvidenceSpark.java:189)
at app//org.broadinstitute.hellbender.tools.spark.sv.StructuralVariationDiscoveryPipelineSpark.runTool(StructuralVariationDiscoveryPipelineSpark.java:163)
at app//org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535)
at app//org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222)
at app//org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:152)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:193)
at app//org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27)
at app//org.broadinstitute.hellbender.tools.spark.sv.integration.StructuralVariationDiscoveryPipelineSparkIntegrationTest.testSVDiscoverPipelineRunnableLocal(StructuralVariationDiscoveryPipelineSparkIntegrationTest.java:105)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
at app//org.testng.TestRunner.run(TestRunner.java:602)
at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at app//org.testng.TestNG.runSuites(TestNG.java:1099)
at app//org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137)
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23)
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070)
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584)
at scala.collection.mutable.Growable.addAll(Growable.scala:62)
at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75)
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505)
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303)
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
testSVDiscoverPipelineRunnableMiniCluster[0](StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs{bam-loc='/home/runner/work/gatk/gatk/src/test/resources/large//sv/SVIntegrationTest_hg19.bam', kmer-ignore-list-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/dummy.kill.kmers', aligner-fef-index-img-loc='/home/runner/work/gatk/gatk/src/test/resources/large/human_g1k_v37.20.21.fasta.img', cnv-calls-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/hg19_DEL_cnv_calls.vcf', output-dir='/tmp/new10220892705178568894'})
org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 2402.0 failed 1 times, most recent failure: Lost task 2.0 in stage 2402.0 (TID 1911) (localhost executor driver): java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137)
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23)
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070)
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584)
at scala.collection.mutable.Growable.addAll(Growable.scala:62)
at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75)
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505)
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303)
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Driver stacktrace:
at app//org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2844)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2780)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2779)
at app//scala.collection.immutable.List.foreach(List.scala:334)
at app//org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2779)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1242)
at app//scala.Option.foreach(Option.scala:437)
at app//org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1242)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3048)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2982)
at app//org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2971)
at app//org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:984)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)
at app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2463)
at app//org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1046)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at app//org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at app//org.apache.spark.rdd.RDD.withScope(RDD.scala:407)
at app//org.apache.spark.rdd.RDD.collect(RDD.scala:1045)
at app//org.apache.spark.api.java.JavaRDDLike.collect(JavaRDDLike.scala:362)
at app//org.apache.spark.api.java.JavaRDDLike.collect$(JavaRDDLike.scala:361)
at app//org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45)
at app//org.broadinstitute.hellbender.tools.spark.sv.evidence.FindBreakpointEvidenceSpark.handleAssemblies(FindBreakpointEvidenceSpark.java:596)
at app//org.broadinstitute.hellbender.tools.spark.sv.evidence.FindBreakpointEvidenceSpark.gatherEvidenceAndWriteContigSamFile(FindBreakpointEvidenceSpark.java:189)
at app//org.broadinstitute.hellbender.tools.spark.sv.StructuralVariationDiscoveryPipelineSpark.runTool(StructuralVariationDiscoveryPipelineSpark.java:163)
at app//org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:535)
at app//org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:31)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:150)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.java:203)
at app//org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:222)
at app//org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:166)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:152)
at app//org.broadinstitute.hellbender.Main.instanceMain(Main.java:193)
at app//org.broadinstitute.hellbender.CommandLineProgramTest.runCommandLine(CommandLineProgramTest.java:27)
at app//org.broadinstitute.hellbender.tools.spark.sv.integration.StructuralVariationDiscoveryPipelineSparkIntegrationTest.lambda$testSVDiscoverPipelineRunnableMiniCluster$0(StructuralVariationDiscoveryPipelineSparkIntegrationTest.java:175)
at app//org.broadinstitute.hellbender.testutils.MiniClusterUtils.runOnIsolatedMiniCluster(MiniClusterUtils.java:75)
at app//org.broadinstitute.hellbender.tools.spark.sv.integration.StructuralVariationDiscoveryPipelineSparkIntegrationTest.testSVDiscoverPipelineRunnableMiniCluster(StructuralVariationDiscoveryPipelineSparkIntegrationTest.java:124)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at app//org.testng.internal.invokers.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:139)
at app//org.testng.internal.invokers.TestInvoker.invokeMethod(TestInvoker.java:677)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethod(TestInvoker.java:221)
at app//org.testng.internal.invokers.MethodRunner.runInSequence(MethodRunner.java:50)
at app//org.testng.internal.invokers.TestInvoker$MethodInvocationAgent.invoke(TestInvoker.java:969)
at app//org.testng.internal.invokers.TestInvoker.invokeTestMethods(TestInvoker.java:194)
at app//org.testng.internal.invokers.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:148)
at app//org.testng.internal.invokers.TestMethodWorker.run(TestMethodWorker.java:128)
at java.base@17.0.6/java.util.ArrayList.forEach(ArrayList.java:1511)
at app//org.testng.TestRunner.privateRun(TestRunner.java:829)
at app//org.testng.TestRunner.run(TestRunner.java:602)
at app//org.testng.SuiteRunner.runTest(SuiteRunner.java:437)
at app//org.testng.SuiteRunner.runSequentially(SuiteRunner.java:431)
at app//org.testng.SuiteRunner.privateRun(SuiteRunner.java:391)
at app//org.testng.SuiteRunner.run(SuiteRunner.java:330)
at app//org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at app//org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:95)
at app//org.testng.TestNG.runSuitesSequentially(TestNG.java:1256)
at app//org.testng.TestNG.runSuitesLocally(TestNG.java:1176)
at app//org.testng.TestNG.runSuites(TestNG.java:1099)
at app//org.testng.TestNG.run(TestNG.java:1067)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.runTests(TestNGTestClassProcessor.java:153)
at org.gradle.api.internal.tasks.testing.testng.TestNGTestClassProcessor.stop(TestNGTestClassProcessor.java:95)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.stop(SuiteTestClassProcessor.java:63)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base@17.0.6/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base@17.0.6/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base@17.0.6/java.lang.reflect.Method.invoke(Method.java:568)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:92)
at jdk.proxy1/jdk.proxy1.$Proxy4.stop(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$3.run(TestWorker.java:200)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:132)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:103)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:63)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:121)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:71)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at app//worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137)
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72)
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23)
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070)
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584)
at scala.collection.mutable.Growable.addAll(Growable.scala:62)
at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75)
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505)
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303)
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046)
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93)
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161)
at org.apache.spark.scheduler.Task.run(Task.scala:141)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
Tests
| Test |
Duration |
Result |
| testSVDiscoverPipelineRunnableLocal[0](StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs{bam-loc='/home/runner/work/gatk/gatk/src/test/resources/large//sv/SVIntegrationTest_hg19.bam', kmer-ignore-list-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/dummy.kill.kmers', aligner-fef-index-img-loc='/home/runner/work/gatk/gatk/src/test/resources/large/human_g1k_v37.20.21.fasta.img', cnv-calls-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/hg19_DEL_cnv_calls.vcf', output-dir='/tmp/new921381074256615202'}) |
1.872s |
failed |
| testSVDiscoverPipelineRunnableMiniCluster[0](StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs{bam-loc='/home/runner/work/gatk/gatk/src/test/resources/large//sv/SVIntegrationTest_hg19.bam', kmer-ignore-list-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/dummy.kill.kmers', aligner-fef-index-img-loc='/home/runner/work/gatk/gatk/src/test/resources/large/human_g1k_v37.20.21.fasta.img', cnv-calls-loc='src/test/resources/org/broadinstitute/hellbender/tools/spark/sv/integration/inputs/hg19_DEL_cnv_calls.vcf', output-dir='/tmp/new10220892705178568894'}) |
4.134s |
failed |
Standard error
17:48:36.849 ERROR Executor - Exception in task 1.0 in stage 2389.0 (TID 1893)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23) ~[main/:?]
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll(Growable.scala:62) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll$(Growable.scala:57) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498) ~[scala-library-2.13.14.jar:?]
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:48:36.850 ERROR Executor - Exception in task 0.0 in stage 2389.0 (TID 1892)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23) ~[main/:?]
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll(Growable.scala:62) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll$(Growable.scala:57) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498) ~[scala-library-2.13.14.jar:?]
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:48:36.849 ERROR Executor - Exception in task 2.0 in stage 2389.0 (TID 1894)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23) ~[main/:?]
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll(Growable.scala:62) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll$(Growable.scala:57) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498) ~[scala-library-2.13.14.jar:?]
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:48:36.851 ERROR TaskSetManager - Task 0 in stage 2389.0 failed 1 times; aborting job
[October 27, 2025 at 5:48:36 PM UTC] org.broadinstitute.hellbender.tools.spark.sv.StructuralVariationDiscoveryPipelineSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=2260729856
17:48:40.964 ERROR Executor - Exception in task 2.0 in stage 2402.0 (TID 1911)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23) ~[main/:?]
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll(Growable.scala:62) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll$(Growable.scala:57) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498) ~[scala-library-2.13.14.jar:?]
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:48:40.964 ERROR Executor - Exception in task 0.0 in stage 2402.0 (TID 1909)
java.lang.IllegalStateException: We have pre-built fermi-lite binaries only for x86_64 and amd64. Your os.arch is aarch64.Set property LIBFML_PATH to point to a native library for your architecture.
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.loadNativeLibrary(FermiLiteAssembler.java:137) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.utils.fermi.FermiLiteAssembler.<init>(FermiLiteAssembler.java:24) ~[gatk-fermilite-jni-1.2.0.jar:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:72) ~[main/:?]
at org.broadinstitute.hellbender.tools.spark.sv.evidence.FermiLiteAssemblyHandler.apply(FermiLiteAssemblyHandler.java:23) ~[main/:?]
at org.apache.spark.api.java.JavaPairRDD$.$anonfun$toScalaFunction$1(JavaPairRDD.scala:1070) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at scala.collection.Iterator$$anon$9.next(Iterator.scala:584) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll(Growable.scala:62) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.Growable.addAll$(Growable.scala:57) ~[scala-library-2.13.14.jar:?]
at scala.collection.mutable.ArrayBuilder.addAll(ArrayBuilder.scala:75) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray(IterableOnce.scala:1505) ~[scala-library-2.13.14.jar:?]
at scala.collection.IterableOnceOps.toArray$(IterableOnce.scala:1498) ~[scala-library-2.13.14.jar:?]
at scala.collection.AbstractIterator.toArray(Iterator.scala:1303) ~[scala-library-2.13.14.jar:?]
at org.apache.spark.rdd.RDD.$anonfun$collect$2(RDD.scala:1046) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.SparkContext.$anonfun$runJob$5(SparkContext.scala:2438) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
17:48:40.965 ERROR TaskSetManager - Task 2 in stage 2402.0 failed 1 times; aborting job
[October 27, 2025 at 5:48:40 PM UTC] org.broadinstitute.hellbender.tools.spark.sv.StructuralVariationDiscoveryPipelineSpark done. Elapsed time: 0.04 minutes.
Runtime.totalMemory()=2178940928
17:48:40.972 ERROR DataNode - Command processor encountered interrupt and exit.