Class org.broadinstitute.hellbender.tools.spark.pipelines.MarkDuplicatesSparkIntegrationTest

152

tests

0

failures

0

ignored

1m0.92s

duration

100%

successful

Tests

Test Duration Result
assertThatExpectedOutputUpdateToggleIsDisabled 0s passed
testAssertAssumeUnsortedFilesAreQueryGroupedFiles 0.549s passed
testAssertCorrectSortOrderMultipleBams 0.123s passed
testAssertCorrectSortOrderMultipleBamsOverriding 0.508s passed
testBulkFragmentsNoDuplicates 0.995s passed
testBulkFragmentsWithDuplicates 2.041s passed
testDifferentChromosomesInOppositeOrder 0.302s passed
testDuplicateDetectionDataProviderWithMetrics[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.chr1.1-1K.unmarkedDups.markDuplicate.metrics) 0.286s passed
testDuplicateDetectionDataProviderWithMetrics[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.optical_dupes.markDuplicate.metrics) 0.252s passed
testDuplicateDetectionDataProviderWithMetrics[2](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.inputSingleLibrarySolexa16404.metrics) 0.278s passed
testFlowModeFlag 1.587s passed
testHashCollisionHandling 1.127s passed
testMDOrder[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug.bam) 0.352s passed
testMDOrder[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug2.bam) 0.285s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[0](true, true, true) 0.296s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[1](true, false, true) 0.302s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[2](false, true, true) 0.309s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[3](true, true, false) 0.328s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[4](true, false, false) 0.304s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[5](false, true, false) 0.318s passed
testMappedFragmentAndMappedPairFirstOfPairNonPrimary 0.310s passed
testMappedFragmentAndMatePairFirstUnmapped 0.309s passed
testMappedFragmentAndMatePairSecondUnmapped 0.324s passed
testMappedPairAndMappedFragmentAndMatePairFirstUnmapped 0.305s passed
testMappedPairAndMappedFragmentAndMatePairSecondUnmapped 0.336s passed
testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped 0.297s passed
testMappedPairAndMatePairFirstUnmapped 0.309s passed
testMappedPairAndMatePairSecondUnmapped 0.302s passed
testMappedPairWithFirstEndSamePositionAndOther 0.302s passed
testMappedPairWithSamePosition 0.310s passed
testMappedPairWithSamePositionSameCigar 0.306s passed
testMarkDuplicatesSparkDeletingDuplicateReads[0]([Ljava.io.File;@51e9ffb4, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.381s passed
testMarkDuplicatesSparkDeletingDuplicateReads[1]([Ljava.io.File;@16c116a8, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.380s passed
testMarkDuplicatesSparkDeletingDuplicateReads[2]([Ljava.io.File;@2adb0932, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.397s passed
testMarkDuplicatesSparkDeletingDuplicateReads[3]([Ljava.io.File;@28b51c55, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.349s passed
testMarkDuplicatesSparkDeletingDuplicateReads[4]([Ljava.io.File;@20c4b01e, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.336s passed
testMarkDuplicatesSparkDeletingDuplicateReads[5]([Ljava.io.File;@13aeea16, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.201s passed
testMarkDuplicatesSparkDeletingDuplicateReads[6]([Ljava.io.File;@1230436f, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.162s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[0]([Ljava.io.File;@4166cfba, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.376s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[1]([Ljava.io.File;@36cb55ef, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.392s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[2]([Ljava.io.File;@68623472, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.391s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[3]([Ljava.io.File;@5a9eeaca, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.337s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[4]([Ljava.io.File;@6f3c6214, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.340s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[5]([Ljava.io.File;@2eb84ddd, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.194s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[6]([Ljava.io.File;@ea0cef7, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.217s passed
testMarkDuplicatesSparkIntegrationTestLocal[0]([Ljava.io.File;@1dcea992, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.342s passed
testMarkDuplicatesSparkIntegrationTestLocal[1]([Ljava.io.File;@7b7dd9d8, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.372s passed
testMarkDuplicatesSparkIntegrationTestLocal[2]([Ljava.io.File;@27d9b7f8, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.388s passed
testMarkDuplicatesSparkIntegrationTestLocal[3]([Ljava.io.File;@50840185, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.329s passed
testMarkDuplicatesSparkIntegrationTestLocal[4]([Ljava.io.File;@46eaacb4, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.344s passed
testMarkDuplicatesSparkIntegrationTestLocal[5]([Ljava.io.File;@9ff10d3, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.178s passed
testMarkDuplicatesSparkIntegrationTestLocal[6]([Ljava.io.File;@5b3a2e5, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.233s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[0]([Ljava.io.File;@c5119ef, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.351s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[1]([Ljava.io.File;@5e050e90, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.364s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[2]([Ljava.io.File;@df207ba, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.355s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[3]([Ljava.io.File;@48e0459, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.338s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[4]([Ljava.io.File;@54ed2587, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.326s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[5]([Ljava.io.File;@39032698, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.139s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[6]([Ljava.io.File;@4a6d89e6, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.182s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[0]([Ljava.io.File;@28a71692, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.344s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[1]([Ljava.io.File;@4b507338, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.371s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[2]([Ljava.io.File;@303e73c7, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.367s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[3]([Ljava.io.File;@7c43125d, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.320s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[4]([Ljava.io.File;@48320c90, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.332s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[5]([Ljava.io.File;@dc26fd3, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.175s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[6]([Ljava.io.File;@2f668033, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.187s passed
testMatePairFirstUnmapped 0.299s passed
testMatePairSecondUnmapped 0.303s passed
testNoReadGroupInRead 0.235s passed
testNonExistantReadGroupInRead 0.235s passed
testNullOpticalDuplicates 0.328s passed
testOpticalDuplicateClusterOneEndSamePositionOneCluster 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance 0.296s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.308s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.314s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.316s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.319s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.297s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.310s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.316s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.301s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.307s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.302s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.315s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.299s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.306s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.303s passed
testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance 0.314s passed
testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate 0.303s passed
testOpticalDuplicateFinding 0.298s passed
testOpticalDuplicateFindingPxelDistance 0.301s passed
testOpticalDuplicatesAndPCRDuplicatesOrientationDifference 0.313s passed
testOpticalDuplicatesDifferentReadGroups 0.309s passed
testOpticalDuplicatesTheSameReadGroup 0.304s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.300s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.298s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.310s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.322s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.300s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.302s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.303s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.311s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.298s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.301s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.311s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.299s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.313s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.320s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.305s passed
testPathologicalOrderingAtTheSamePosition 0.317s passed
testReadSameStartPositionOrientationOverride 0.298s passed
testReadsHaveSameStartPositionButDifferentChromosomeNonEquivalence 0.300s passed
testSecondEndIsBeforeFirstInCoordinate 0.313s passed
testSingleMappedFragment 0.319s passed
testSingleMappedFragmentAndSingleMappedPair 0.299s passed
testSingleMappedFragmentAndTwoMappedPairs 0.317s passed
testSingleMappedPair 0.307s passed
testSingleUnmappedFragment 0.313s passed
testSingleUnmappedPair 0.295s passed
testStackOverFlowPairSetSwap 0.309s passed
testSupplementaryReadUnmappedMate 0.313s passed
testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs 0.306s passed
testThreeMappedPairs 0.300s passed
testThreeMappedPairsWithMatchingSecondMate 0.297s passed
testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs 0.305s passed
testTwoGroupsOnDifferentChromosomesOfTwoFragments 0.321s passed
testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs 0.299s passed
testTwoMappedFragments 0.297s passed
testTwoMappedPairWithSamePosition 0.313s passed
testTwoMappedPairWithSamePositionDifferentStrands 0.311s passed
testTwoMappedPairWithSamePositionDifferentStrands2 0.302s passed
testTwoMappedPairs 0.312s passed
testTwoMappedPairsAndMappedSecondaryFragment 0.305s passed
testTwoMappedPairsAndTerminalUnmappedFragment 0.303s passed
testTwoMappedPairsAndTerminalUnmappedPair 0.313s passed
testTwoMappedPairsMatesSoftClipped 0.322s passed
testTwoMappedPairsWithOppositeOrientations 0.300s passed
testTwoMappedPairsWithOppositeOrientationsNumberTwo 0.310s passed
testTwoMappedPairsWithSoftClipping 0.299s passed
testTwoMappedPairsWithSoftClippingBoth 0.307s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnly 0.293s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar 0.302s passed
testTwoMappedPairsWithSupplementaryReads[0](true, true, true) 0.302s passed
testTwoMappedPairsWithSupplementaryReads[1](true, false, true) 0.304s passed
testTwoMappedPairsWithSupplementaryReads[2](false, true, true) 0.294s passed
testTwoMappedPairsWithSupplementaryReads[3](true, true, false) 0.296s passed
testTwoMappedPairsWithSupplementaryReads[4](true, false, false) 0.314s passed
testTwoMappedPairsWithSupplementaryReads[5](false, true, false) 0.295s passed
testTwoUnmappedFragments 0.292s passed

Standard error

[May 27, 2025 at 4:35:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1690304512
[May 27, 2025 at 4:35:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1690304512
[May 27, 2025 at 4:35:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
16:35:15.464 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.464 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
16:35:15.464 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:35:15.464 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
16:35:15.464 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
16:35:15.464 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:35:15 PM UTC
16:35:15.464 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.464 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.465 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:35:15.465 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:35:15.465 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:35:15.465 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:35:15.465 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:35:15.465 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:35:15.465 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:35:15.465 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:35:15.465 INFO  MarkDuplicatesSpark - Initializing engine
16:35:15.465 INFO  MarkDuplicatesSpark - Done initializing engine
16:35:15.465 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:35:15.468 INFO  MemoryStore - Block broadcast_1371 stored as values in memory (estimated size 306.3 KiB, free 1898.9 MiB)
16:35:15.478 INFO  MemoryStore - Block broadcast_1371_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1898.8 MiB)
16:35:15.478 INFO  BlockManagerInfo - Added broadcast_1371_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.8 MiB)
16:35:15.478 INFO  SparkContext - Created broadcast 1371 from newAPIHadoopFile at PathSplitSource.java:96
16:35:15.498 INFO  MemoryStore - Block broadcast_1372 stored as values in memory (estimated size 306.3 KiB, free 1898.5 MiB)
16:35:15.505 INFO  MemoryStore - Block broadcast_1372_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1898.4 MiB)
16:35:15.505 INFO  BlockManagerInfo - Added broadcast_1372_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.7 MiB)
16:35:15.505 INFO  SparkContext - Created broadcast 1372 from newAPIHadoopFile at PathSplitSource.java:96
16:35:15.538 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:35:15.539 INFO  FileInputFormat - Total input files to process : 1
16:35:15.540 INFO  DAGScheduler - Registering RDD 3483 (mapToPair at SparkUtils.java:161) as input to shuffle 100
16:35:15.540 INFO  DAGScheduler - Got job 423 (collect at SparkUtils.java:205) with 1 output partitions
16:35:15.540 INFO  DAGScheduler - Final stage: ResultStage 735 (collect at SparkUtils.java:205)
16:35:15.540 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 734)
16:35:15.540 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 734)
16:35:15.541 INFO  DAGScheduler - Submitting ShuffleMapStage 734 (MapPartitionsRDD[3483] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:15.558 INFO  MemoryStore - Block broadcast_1373 stored as values in memory (estimated size 460.4 KiB, free 1898.0 MiB)
16:35:15.561 INFO  MemoryStore - Block broadcast_1373_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1897.8 MiB)
16:35:15.561 INFO  BlockManagerInfo - Added broadcast_1373_piece0 in memory on localhost:46629 (size: 202.5 KiB, free: 1918.5 MiB)
16:35:15.561 INFO  SparkContext - Created broadcast 1373 from broadcast at DAGScheduler.scala:1580
16:35:15.562 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 734 (MapPartitionsRDD[3483] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:15.562 INFO  TaskSchedulerImpl - Adding task set 734.0 with 1 tasks resource profile 0
16:35:15.562 INFO  TaskSetManager - Starting task 0.0 in stage 734.0 (TID 676) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9735 bytes) 
16:35:15.564 INFO  Executor - Running task 0.0 in stage 734.0 (TID 676)
16:35:15.598 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam:0+1507
16:35:15.603 INFO  Executor - Finished task 0.0 in stage 734.0 (TID 676). 1148 bytes result sent to driver
16:35:15.604 INFO  TaskSetManager - Finished task 0.0 in stage 734.0 (TID 676) in 42 ms on localhost (executor driver) (1/1)
16:35:15.604 INFO  TaskSchedulerImpl - Removed TaskSet 734.0, whose tasks have all completed, from pool 
16:35:15.604 INFO  DAGScheduler - ShuffleMapStage 734 (mapToPair at SparkUtils.java:161) finished in 0.063 s
16:35:15.604 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.604 INFO  DAGScheduler - running: HashSet()
16:35:15.604 INFO  DAGScheduler - waiting: HashSet(ResultStage 735)
16:35:15.605 INFO  DAGScheduler - failed: HashSet()
16:35:15.605 INFO  DAGScheduler - Submitting ResultStage 735 (MapPartitionsRDD[3486] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:35:15.605 INFO  MemoryStore - Block broadcast_1374 stored as values in memory (estimated size 7.4 KiB, free 1897.8 MiB)
16:35:15.606 INFO  MemoryStore - Block broadcast_1374_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1897.8 MiB)
16:35:15.606 INFO  BlockManagerInfo - Added broadcast_1374_piece0 in memory on localhost:46629 (size: 4.1 KiB, free: 1918.5 MiB)
16:35:15.606 INFO  SparkContext - Created broadcast 1374 from broadcast at DAGScheduler.scala:1580
16:35:15.606 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 735 (MapPartitionsRDD[3486] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:35:15.607 INFO  TaskSchedulerImpl - Adding task set 735.0 with 1 tasks resource profile 0
16:35:15.607 INFO  TaskSetManager - Starting task 0.0 in stage 735.0 (TID 677) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:15.607 INFO  Executor - Running task 0.0 in stage 735.0 (TID 677)
16:35:15.609 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.609 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.611 INFO  Executor - Finished task 0.0 in stage 735.0 (TID 677). 2238 bytes result sent to driver
16:35:15.612 INFO  TaskSetManager - Finished task 0.0 in stage 735.0 (TID 677) in 5 ms on localhost (executor driver) (1/1)
16:35:15.612 INFO  TaskSchedulerImpl - Removed TaskSet 735.0, whose tasks have all completed, from pool 
16:35:15.612 INFO  DAGScheduler - ResultStage 735 (collect at SparkUtils.java:205) finished in 0.007 s
16:35:15.612 INFO  DAGScheduler - Job 423 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:15.612 INFO  TaskSchedulerImpl - Killing all running tasks in stage 735: Stage finished
16:35:15.612 INFO  DAGScheduler - Job 423 finished: collect at SparkUtils.java:205, took 0.074125 s
16:35:15.618 INFO  MemoryStore - Block broadcast_1375 stored as values in memory (estimated size 1632.0 B, free 1897.8 MiB)
16:35:15.619 INFO  MemoryStore - Block broadcast_1375_piece0 stored as bytes in memory (estimated size 145.0 B, free 1897.8 MiB)
16:35:15.619 INFO  BlockManagerInfo - Added broadcast_1375_piece0 in memory on localhost:46629 (size: 145.0 B, free: 1918.5 MiB)
16:35:15.619 INFO  SparkContext - Created broadcast 1375 from broadcast at MarkDuplicatesSparkUtils.java:126
16:35:15.620 INFO  MemoryStore - Block broadcast_1376 stored as values in memory (estimated size 304.0 B, free 1897.8 MiB)
16:35:15.620 INFO  MemoryStore - Block broadcast_1376_piece0 stored as bytes in memory (estimated size 37.0 B, free 1897.8 MiB)
16:35:15.620 INFO  BlockManagerInfo - Added broadcast_1376_piece0 in memory on localhost:46629 (size: 37.0 B, free: 1918.5 MiB)
16:35:15.620 INFO  SparkContext - Created broadcast 1376 from broadcast at MarkDuplicatesSparkUtils.java:127
16:35:15.637 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:35:15.638 INFO  DAGScheduler - Registering RDD 3495 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 103
16:35:15.638 INFO  DAGScheduler - Registering RDD 3499 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 102
16:35:15.638 INFO  DAGScheduler - Registering RDD 3503 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 101
16:35:15.638 INFO  DAGScheduler - Got job 424 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:35:15.638 INFO  DAGScheduler - Final stage: ResultStage 740 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:35:15.639 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 739)
16:35:15.639 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 739)
16:35:15.639 INFO  DAGScheduler - Submitting ShuffleMapStage 737 (MapPartitionsRDD[3495] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:35:15.640 INFO  MemoryStore - Block broadcast_1377 stored as values in memory (estimated size 24.2 KiB, free 1897.7 MiB)
16:35:15.640 INFO  MemoryStore - Block broadcast_1377_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1897.7 MiB)
16:35:15.640 INFO  BlockManagerInfo - Added broadcast_1377_piece0 in memory on localhost:46629 (size: 12.6 KiB, free: 1918.5 MiB)
16:35:15.641 INFO  SparkContext - Created broadcast 1377 from broadcast at DAGScheduler.scala:1580
16:35:15.641 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 737 (MapPartitionsRDD[3495] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:35:15.641 INFO  TaskSchedulerImpl - Adding task set 737.0 with 1 tasks resource profile 0
16:35:15.641 INFO  TaskSetManager - Starting task 0.0 in stage 737.0 (TID 678) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
16:35:15.642 INFO  Executor - Running task 0.0 in stage 737.0 (TID 678)
16:35:15.644 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.644 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.648 INFO  Executor - Finished task 0.0 in stage 737.0 (TID 678). 1922 bytes result sent to driver
16:35:15.648 INFO  TaskSetManager - Finished task 0.0 in stage 737.0 (TID 678) in 7 ms on localhost (executor driver) (1/1)
16:35:15.648 INFO  DAGScheduler - ShuffleMapStage 737 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.009 s
16:35:15.648 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.648 INFO  DAGScheduler - running: HashSet()
16:35:15.648 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 738, ShuffleMapStage 739, ResultStage 740)
16:35:15.648 INFO  DAGScheduler - failed: HashSet()
16:35:15.649 INFO  DAGScheduler - Submitting ShuffleMapStage 738 (MapPartitionsRDD[3499] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:35:15.649 INFO  TaskSchedulerImpl - Removed TaskSet 737.0, whose tasks have all completed, from pool 
16:35:15.650 INFO  MemoryStore - Block broadcast_1378 stored as values in memory (estimated size 28.3 KiB, free 1897.7 MiB)
16:35:15.650 INFO  MemoryStore - Block broadcast_1378_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1897.7 MiB)
16:35:15.650 INFO  BlockManagerInfo - Added broadcast_1378_piece0 in memory on localhost:46629 (size: 14.8 KiB, free: 1918.5 MiB)
16:35:15.650 INFO  SparkContext - Created broadcast 1378 from broadcast at DAGScheduler.scala:1580
16:35:15.651 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 738 (MapPartitionsRDD[3499] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:35:15.651 INFO  TaskSchedulerImpl - Adding task set 738.0 with 1 tasks resource profile 0
16:35:15.651 INFO  TaskSetManager - Starting task 0.0 in stage 738.0 (TID 679) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
16:35:15.651 INFO  Executor - Running task 0.0 in stage 738.0 (TID 679)
16:35:15.654 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.654 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.659 INFO  Executor - Finished task 0.0 in stage 738.0 (TID 679). 1922 bytes result sent to driver
16:35:15.659 INFO  TaskSetManager - Finished task 0.0 in stage 738.0 (TID 679) in 8 ms on localhost (executor driver) (1/1)
16:35:15.660 INFO  TaskSchedulerImpl - Removed TaskSet 738.0, whose tasks have all completed, from pool 
16:35:15.660 INFO  DAGScheduler - ShuffleMapStage 738 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.011 s
16:35:15.660 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.660 INFO  DAGScheduler - running: HashSet()
16:35:15.660 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 739, ResultStage 740)
16:35:15.660 INFO  DAGScheduler - failed: HashSet()
16:35:15.660 INFO  DAGScheduler - Submitting ShuffleMapStage 739 (MapPartitionsRDD[3503] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:35:15.661 INFO  MemoryStore - Block broadcast_1379 stored as values in memory (estimated size 19.0 KiB, free 1897.7 MiB)
16:35:15.661 INFO  MemoryStore - Block broadcast_1379_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1897.7 MiB)
16:35:15.662 INFO  BlockManagerInfo - Added broadcast_1379_piece0 in memory on localhost:46629 (size: 9.7 KiB, free: 1918.5 MiB)
16:35:15.662 INFO  SparkContext - Created broadcast 1379 from broadcast at DAGScheduler.scala:1580
16:35:15.662 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 739 (MapPartitionsRDD[3503] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:35:15.662 INFO  TaskSchedulerImpl - Adding task set 739.0 with 1 tasks resource profile 0
16:35:15.663 INFO  TaskSetManager - Starting task 0.0 in stage 739.0 (TID 680) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:15.663 INFO  Executor - Running task 0.0 in stage 739.0 (TID 680)
16:35:15.665 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.665 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.667 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.667 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.669 INFO  Executor - Finished task 0.0 in stage 739.0 (TID 680). 1922 bytes result sent to driver
16:35:15.669 INFO  TaskSetManager - Finished task 0.0 in stage 739.0 (TID 680) in 7 ms on localhost (executor driver) (1/1)
16:35:15.670 INFO  DAGScheduler - ShuffleMapStage 739 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.010 s
16:35:15.670 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.670 INFO  DAGScheduler - running: HashSet()
16:35:15.670 INFO  DAGScheduler - waiting: HashSet(ResultStage 740)
16:35:15.670 INFO  DAGScheduler - failed: HashSet()
16:35:15.670 INFO  DAGScheduler - Submitting ResultStage 740 (MapPartitionsRDD[3505] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:35:15.671 INFO  MemoryStore - Block broadcast_1380 stored as values in memory (estimated size 20.2 KiB, free 1897.6 MiB)
16:35:15.671 INFO  MemoryStore - Block broadcast_1380_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1897.6 MiB)
16:35:15.672 INFO  TaskSchedulerImpl - Removed TaskSet 739.0, whose tasks have all completed, from pool 
16:35:15.672 INFO  BlockManagerInfo - Added broadcast_1380_piece0 in memory on localhost:46629 (size: 10.6 KiB, free: 1918.5 MiB)
16:35:15.672 INFO  SparkContext - Created broadcast 1380 from broadcast at DAGScheduler.scala:1580
16:35:15.673 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 740 (MapPartitionsRDD[3505] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:35:15.673 INFO  TaskSchedulerImpl - Adding task set 740.0 with 1 tasks resource profile 0
16:35:15.673 INFO  TaskSetManager - Starting task 0.0 in stage 740.0 (TID 681) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:15.674 INFO  Executor - Running task 0.0 in stage 740.0 (TID 681)
16:35:15.676 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.676 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.679 INFO  Executor - Finished task 0.0 in stage 740.0 (TID 681). 1944 bytes result sent to driver
16:35:15.679 INFO  TaskSetManager - Finished task 0.0 in stage 740.0 (TID 681) in 6 ms on localhost (executor driver) (1/1)
16:35:15.679 INFO  TaskSchedulerImpl - Removed TaskSet 740.0, whose tasks have all completed, from pool 
16:35:15.680 INFO  DAGScheduler - ResultStage 740 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.010 s
16:35:15.680 INFO  DAGScheduler - Job 424 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:15.680 INFO  TaskSchedulerImpl - Killing all running tasks in stage 740: Stage finished
16:35:15.680 INFO  DAGScheduler - Job 424 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.043162 s
16:35:15.695 INFO  MemoryStore - Block broadcast_1381 stored as values in memory (estimated size 20.3 KiB, free 1897.6 MiB)
16:35:15.695 INFO  MemoryStore - Block broadcast_1381_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1897.6 MiB)
16:35:15.695 INFO  BlockManagerInfo - Added broadcast_1381_piece0 in memory on localhost:46629 (size: 1850.0 B, free: 1918.5 MiB)
16:35:15.695 INFO  SparkContext - Created broadcast 1381 from broadcast at ReadsSparkSink.java:133
16:35:15.696 INFO  MemoryStore - Block broadcast_1382 stored as values in memory (estimated size 20.4 KiB, free 1897.6 MiB)
16:35:15.697 INFO  MemoryStore - Block broadcast_1382_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1897.6 MiB)
16:35:15.697 INFO  BlockManagerInfo - Added broadcast_1382_piece0 in memory on localhost:46629 (size: 1850.0 B, free: 1918.5 MiB)
16:35:15.697 INFO  SparkContext - Created broadcast 1382 from broadcast at BamSink.java:76
16:35:15.699 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:15.699 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:15.699 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:15.717 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:35:15.718 INFO  DAGScheduler - Registering RDD 3507 (mapToPair at SparkUtils.java:161) as input to shuffle 104
16:35:15.718 INFO  DAGScheduler - Got job 425 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:35:15.718 INFO  DAGScheduler - Final stage: ResultStage 745 (runJob at SparkHadoopWriter.scala:83)
16:35:15.718 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 744)
16:35:15.718 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 744)
16:35:15.719 INFO  DAGScheduler - Submitting ShuffleMapStage 744 (MapPartitionsRDD[3507] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:15.720 INFO  MemoryStore - Block broadcast_1383 stored as values in memory (estimated size 18.1 KiB, free 1897.6 MiB)
16:35:15.720 INFO  MemoryStore - Block broadcast_1383_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1897.6 MiB)
16:35:15.720 INFO  BlockManagerInfo - Added broadcast_1383_piece0 in memory on localhost:46629 (size: 9.5 KiB, free: 1918.5 MiB)
16:35:15.720 INFO  SparkContext - Created broadcast 1383 from broadcast at DAGScheduler.scala:1580
16:35:15.721 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 744 (MapPartitionsRDD[3507] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:15.721 INFO  TaskSchedulerImpl - Adding task set 744.0 with 1 tasks resource profile 0
16:35:15.721 INFO  TaskSetManager - Starting task 0.0 in stage 744.0 (TID 682) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:15.722 INFO  Executor - Running task 0.0 in stage 744.0 (TID 682)
16:35:15.724 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.724 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.727 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.727 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.728 INFO  Executor - Finished task 0.0 in stage 744.0 (TID 682). 1922 bytes result sent to driver
16:35:15.729 INFO  TaskSetManager - Finished task 0.0 in stage 744.0 (TID 682) in 8 ms on localhost (executor driver) (1/1)
16:35:15.729 INFO  TaskSchedulerImpl - Removed TaskSet 744.0, whose tasks have all completed, from pool 
16:35:15.730 INFO  DAGScheduler - ShuffleMapStage 744 (mapToPair at SparkUtils.java:161) finished in 0.011 s
16:35:15.730 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.730 INFO  DAGScheduler - running: HashSet()
16:35:15.730 INFO  DAGScheduler - waiting: HashSet(ResultStage 745)
16:35:15.730 INFO  DAGScheduler - failed: HashSet()
16:35:15.730 INFO  DAGScheduler - Submitting ResultStage 745 (MapPartitionsRDD[3512] at mapToPair at BamSink.java:91), which has no missing parents
16:35:15.741 INFO  MemoryStore - Block broadcast_1384 stored as values in memory (estimated size 163.7 KiB, free 1897.4 MiB)
16:35:15.742 INFO  MemoryStore - Block broadcast_1384_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1897.3 MiB)
16:35:15.742 INFO  BlockManagerInfo - Added broadcast_1384_piece0 in memory on localhost:46629 (size: 73.9 KiB, free: 1918.4 MiB)
16:35:15.743 INFO  SparkContext - Created broadcast 1384 from broadcast at DAGScheduler.scala:1580
16:35:15.743 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 745 (MapPartitionsRDD[3512] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
16:35:15.743 INFO  TaskSchedulerImpl - Adding task set 745.0 with 1 tasks resource profile 0
16:35:15.743 INFO  TaskSetManager - Starting task 0.0 in stage 745.0 (TID 683) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:15.744 INFO  Executor - Running task 0.0 in stage 745.0 (TID 683)
16:35:15.750 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.750 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.753 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:15.753 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:15.753 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:15.753 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:15.753 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:15.753 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:15.774 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271635156934519975368986411_3512_r_000000_0' to file:/tmp/local118027614369008844995/markdups5962609011649382606.bam.parts/_temporary/0/task_202505271635156934519975368986411_3512_r_000000
16:35:15.774 INFO  SparkHadoopMapRedUtil - attempt_202505271635156934519975368986411_3512_r_000000_0: Committed. Elapsed time: 0 ms.
16:35:15.775 INFO  Executor - Finished task 0.0 in stage 745.0 (TID 683). 1858 bytes result sent to driver
16:35:15.775 INFO  TaskSetManager - Finished task 0.0 in stage 745.0 (TID 683) in 32 ms on localhost (executor driver) (1/1)
16:35:15.775 INFO  TaskSchedulerImpl - Removed TaskSet 745.0, whose tasks have all completed, from pool 
16:35:15.775 INFO  DAGScheduler - ResultStage 745 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
16:35:15.776 INFO  DAGScheduler - Job 425 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:15.776 INFO  TaskSchedulerImpl - Killing all running tasks in stage 745: Stage finished
16:35:15.776 INFO  DAGScheduler - Job 425 finished: runJob at SparkHadoopWriter.scala:83, took 0.058947 s
16:35:15.777 INFO  SparkHadoopWriter - Start to commit write Job job_202505271635156934519975368986411_3512.
16:35:15.782 INFO  SparkHadoopWriter - Write Job job_202505271635156934519975368986411_3512 committed. Elapsed time: 5 ms.
16:35:15.795 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local118027614369008844995/markdups5962609011649382606.bam
16:35:15.799 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local118027614369008844995/markdups5962609011649382606.bam done
16:35:15.799 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local118027614369008844995/markdups5962609011649382606.bam.parts/ to /tmp/local118027614369008844995/markdups5962609011649382606.bam.sbi
16:35:15.804 INFO  IndexFileMerger - Done merging .sbi files
16:35:15.804 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local118027614369008844995/markdups5962609011649382606.bam.parts/ to /tmp/local118027614369008844995/markdups5962609011649382606.bam.bai
16:35:15.808 INFO  IndexFileMerger - Done merging .bai files
16:35:15.808 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:35:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
16:35:15.815 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.816 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
16:35:15.816 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:35:15.816 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
16:35:15.816 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
16:35:15.816 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:35:15 PM UTC
16:35:15.816 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.816 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:15.816 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:35:15.816 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:35:15.816 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:35:15.816 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:35:15.816 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:35:15.816 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:35:15.816 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:35:15.816 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:35:15.816 INFO  MarkDuplicatesSpark - Initializing engine
16:35:15.816 INFO  MarkDuplicatesSpark - Done initializing engine
16:35:15.816 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:35:15.819 INFO  MemoryStore - Block broadcast_1385 stored as values in memory (estimated size 306.3 KiB, free 1897.0 MiB)
16:35:15.825 INFO  MemoryStore - Block broadcast_1385_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1897.0 MiB)
16:35:15.825 INFO  BlockManagerInfo - Added broadcast_1385_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.3 MiB)
16:35:15.826 INFO  SparkContext - Created broadcast 1385 from newAPIHadoopFile at PathSplitSource.java:96
16:35:15.845 INFO  MemoryStore - Block broadcast_1386 stored as values in memory (estimated size 306.3 KiB, free 1896.7 MiB)
16:35:15.852 INFO  MemoryStore - Block broadcast_1386_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1896.6 MiB)
16:35:15.852 INFO  BlockManagerInfo - Added broadcast_1386_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.3 MiB)
16:35:15.852 INFO  SparkContext - Created broadcast 1386 from newAPIHadoopFile at PathSplitSource.java:96
16:35:15.884 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:35:15.885 INFO  FileInputFormat - Total input files to process : 1
16:35:15.885 INFO  DAGScheduler - Registering RDD 3526 (mapToPair at SparkUtils.java:161) as input to shuffle 105
16:35:15.885 INFO  DAGScheduler - Got job 426 (collect at SparkUtils.java:205) with 1 output partitions
16:35:15.885 INFO  DAGScheduler - Final stage: ResultStage 747 (collect at SparkUtils.java:205)
16:35:15.885 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 746)
16:35:15.885 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 746)
16:35:15.885 INFO  DAGScheduler - Submitting ShuffleMapStage 746 (MapPartitionsRDD[3526] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:15.902 INFO  MemoryStore - Block broadcast_1387 stored as values in memory (estimated size 460.4 KiB, free 1896.2 MiB)
16:35:15.905 INFO  MemoryStore - Block broadcast_1387_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1896.0 MiB)
16:35:15.905 INFO  BlockManagerInfo - Added broadcast_1387_piece0 in memory on localhost:46629 (size: 202.5 KiB, free: 1918.1 MiB)
16:35:15.905 INFO  SparkContext - Created broadcast 1387 from broadcast at DAGScheduler.scala:1580
16:35:15.905 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 746 (MapPartitionsRDD[3526] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:15.905 INFO  TaskSchedulerImpl - Adding task set 746.0 with 1 tasks resource profile 0
16:35:15.906 INFO  TaskSetManager - Starting task 0.0 in stage 746.0 (TID 684) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9736 bytes) 
16:35:15.906 INFO  Executor - Running task 0.0 in stage 746.0 (TID 684)
16:35:15.935 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam:0+1856
16:35:15.939 INFO  Executor - Finished task 0.0 in stage 746.0 (TID 684). 1148 bytes result sent to driver
16:35:15.939 INFO  TaskSetManager - Finished task 0.0 in stage 746.0 (TID 684) in 34 ms on localhost (executor driver) (1/1)
16:35:15.939 INFO  TaskSchedulerImpl - Removed TaskSet 746.0, whose tasks have all completed, from pool 
16:35:15.939 INFO  DAGScheduler - ShuffleMapStage 746 (mapToPair at SparkUtils.java:161) finished in 0.053 s
16:35:15.939 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.939 INFO  DAGScheduler - running: HashSet()
16:35:15.939 INFO  DAGScheduler - waiting: HashSet(ResultStage 747)
16:35:15.939 INFO  DAGScheduler - failed: HashSet()
16:35:15.940 INFO  DAGScheduler - Submitting ResultStage 747 (MapPartitionsRDD[3529] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:35:15.940 INFO  MemoryStore - Block broadcast_1388 stored as values in memory (estimated size 7.4 KiB, free 1896.0 MiB)
16:35:15.940 INFO  MemoryStore - Block broadcast_1388_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1895.9 MiB)
16:35:15.941 INFO  BlockManagerInfo - Added broadcast_1388_piece0 in memory on localhost:46629 (size: 4.1 KiB, free: 1918.1 MiB)
16:35:15.941 INFO  SparkContext - Created broadcast 1388 from broadcast at DAGScheduler.scala:1580
16:35:15.941 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 747 (MapPartitionsRDD[3529] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:35:15.941 INFO  TaskSchedulerImpl - Adding task set 747.0 with 1 tasks resource profile 0
16:35:15.941 INFO  TaskSetManager - Starting task 0.0 in stage 747.0 (TID 685) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:15.942 INFO  Executor - Running task 0.0 in stage 747.0 (TID 685)
16:35:15.942 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.943 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.944 INFO  Executor - Finished task 0.0 in stage 747.0 (TID 685). 2495 bytes result sent to driver
16:35:15.944 INFO  TaskSetManager - Finished task 0.0 in stage 747.0 (TID 685) in 3 ms on localhost (executor driver) (1/1)
16:35:15.944 INFO  TaskSchedulerImpl - Removed TaskSet 747.0, whose tasks have all completed, from pool 
16:35:15.944 INFO  DAGScheduler - ResultStage 747 (collect at SparkUtils.java:205) finished in 0.004 s
16:35:15.945 INFO  DAGScheduler - Job 426 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:15.945 INFO  TaskSchedulerImpl - Killing all running tasks in stage 747: Stage finished
16:35:15.945 INFO  DAGScheduler - Job 426 finished: collect at SparkUtils.java:205, took 0.060562 s
16:35:15.949 INFO  MemoryStore - Block broadcast_1389 stored as values in memory (estimated size 1632.0 B, free 1895.9 MiB)
16:35:15.949 INFO  MemoryStore - Block broadcast_1389_piece0 stored as bytes in memory (estimated size 145.0 B, free 1895.9 MiB)
16:35:15.950 INFO  BlockManagerInfo - Added broadcast_1389_piece0 in memory on localhost:46629 (size: 145.0 B, free: 1918.1 MiB)
16:35:15.950 INFO  SparkContext - Created broadcast 1389 from broadcast at MarkDuplicatesSparkUtils.java:126
16:35:15.950 INFO  MemoryStore - Block broadcast_1390 stored as values in memory (estimated size 304.0 B, free 1895.9 MiB)
16:35:15.950 INFO  MemoryStore - Block broadcast_1390_piece0 stored as bytes in memory (estimated size 37.0 B, free 1895.9 MiB)
16:35:15.950 INFO  BlockManagerInfo - Added broadcast_1390_piece0 in memory on localhost:46629 (size: 37.0 B, free: 1918.1 MiB)
16:35:15.951 INFO  SparkContext - Created broadcast 1390 from broadcast at MarkDuplicatesSparkUtils.java:127
16:35:15.963 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:35:15.963 INFO  DAGScheduler - Registering RDD 3538 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 108
16:35:15.963 INFO  DAGScheduler - Registering RDD 3542 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 107
16:35:15.963 INFO  DAGScheduler - Registering RDD 3546 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 106
16:35:15.964 INFO  DAGScheduler - Got job 427 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:35:15.964 INFO  DAGScheduler - Final stage: ResultStage 752 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:35:15.964 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 751)
16:35:15.964 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 751)
16:35:15.964 INFO  DAGScheduler - Submitting ShuffleMapStage 749 (MapPartitionsRDD[3538] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:35:15.965 INFO  MemoryStore - Block broadcast_1391 stored as values in memory (estimated size 24.2 KiB, free 1895.9 MiB)
16:35:15.965 INFO  MemoryStore - Block broadcast_1391_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1895.9 MiB)
16:35:15.965 INFO  BlockManagerInfo - Added broadcast_1391_piece0 in memory on localhost:46629 (size: 12.6 KiB, free: 1918.1 MiB)
16:35:15.966 INFO  SparkContext - Created broadcast 1391 from broadcast at DAGScheduler.scala:1580
16:35:15.966 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 749 (MapPartitionsRDD[3538] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:35:15.966 INFO  TaskSchedulerImpl - Adding task set 749.0 with 1 tasks resource profile 0
16:35:15.966 INFO  TaskSetManager - Starting task 0.0 in stage 749.0 (TID 686) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
16:35:15.966 INFO  Executor - Running task 0.0 in stage 749.0 (TID 686)
16:35:15.968 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.968 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.971 INFO  Executor - Finished task 0.0 in stage 749.0 (TID 686). 1922 bytes result sent to driver
16:35:15.971 INFO  TaskSetManager - Finished task 0.0 in stage 749.0 (TID 686) in 5 ms on localhost (executor driver) (1/1)
16:35:15.971 INFO  TaskSchedulerImpl - Removed TaskSet 749.0, whose tasks have all completed, from pool 
16:35:15.971 INFO  DAGScheduler - ShuffleMapStage 749 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.007 s
16:35:15.971 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.971 INFO  DAGScheduler - running: HashSet()
16:35:15.972 INFO  DAGScheduler - waiting: HashSet(ResultStage 752, ShuffleMapStage 750, ShuffleMapStage 751)
16:35:15.972 INFO  DAGScheduler - failed: HashSet()
16:35:15.972 INFO  DAGScheduler - Submitting ShuffleMapStage 750 (MapPartitionsRDD[3542] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:35:15.972 INFO  MemoryStore - Block broadcast_1392 stored as values in memory (estimated size 28.3 KiB, free 1895.9 MiB)
16:35:15.973 INFO  MemoryStore - Block broadcast_1392_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1895.9 MiB)
16:35:15.973 INFO  BlockManagerInfo - Added broadcast_1392_piece0 in memory on localhost:46629 (size: 14.8 KiB, free: 1918.0 MiB)
16:35:15.973 INFO  SparkContext - Created broadcast 1392 from broadcast at DAGScheduler.scala:1580
16:35:15.973 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 750 (MapPartitionsRDD[3542] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:35:15.973 INFO  TaskSchedulerImpl - Adding task set 750.0 with 1 tasks resource profile 0
16:35:15.974 INFO  TaskSetManager - Starting task 0.0 in stage 750.0 (TID 687) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
16:35:15.974 INFO  Executor - Running task 0.0 in stage 750.0 (TID 687)
16:35:15.975 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.975 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.979 INFO  Executor - Finished task 0.0 in stage 750.0 (TID 687). 1879 bytes result sent to driver
16:35:15.979 INFO  TaskSetManager - Finished task 0.0 in stage 750.0 (TID 687) in 5 ms on localhost (executor driver) (1/1)
16:35:15.980 INFO  TaskSchedulerImpl - Removed TaskSet 750.0, whose tasks have all completed, from pool 
16:35:15.980 INFO  DAGScheduler - ShuffleMapStage 750 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.008 s
16:35:15.980 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.980 INFO  DAGScheduler - running: HashSet()
16:35:15.980 INFO  DAGScheduler - waiting: HashSet(ResultStage 752, ShuffleMapStage 751)
16:35:15.980 INFO  DAGScheduler - failed: HashSet()
16:35:15.980 INFO  DAGScheduler - Submitting ShuffleMapStage 751 (MapPartitionsRDD[3546] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:35:15.981 INFO  MemoryStore - Block broadcast_1393 stored as values in memory (estimated size 19.0 KiB, free 1895.9 MiB)
16:35:15.981 INFO  MemoryStore - Block broadcast_1393_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1895.8 MiB)
16:35:15.981 INFO  BlockManagerInfo - Added broadcast_1393_piece0 in memory on localhost:46629 (size: 9.7 KiB, free: 1918.0 MiB)
16:35:15.981 INFO  SparkContext - Created broadcast 1393 from broadcast at DAGScheduler.scala:1580
16:35:15.982 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 751 (MapPartitionsRDD[3546] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:35:15.982 INFO  TaskSchedulerImpl - Adding task set 751.0 with 1 tasks resource profile 0
16:35:15.982 INFO  TaskSetManager - Starting task 0.0 in stage 751.0 (TID 688) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:15.982 INFO  Executor - Running task 0.0 in stage 751.0 (TID 688)
16:35:15.983 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.983 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.985 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.985 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.986 INFO  Executor - Finished task 0.0 in stage 751.0 (TID 688). 1879 bytes result sent to driver
16:35:15.986 INFO  TaskSetManager - Finished task 0.0 in stage 751.0 (TID 688) in 4 ms on localhost (executor driver) (1/1)
16:35:15.987 INFO  TaskSchedulerImpl - Removed TaskSet 751.0, whose tasks have all completed, from pool 
16:35:15.987 INFO  DAGScheduler - ShuffleMapStage 751 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.007 s
16:35:15.987 INFO  DAGScheduler - looking for newly runnable stages
16:35:15.987 INFO  DAGScheduler - running: HashSet()
16:35:15.987 INFO  DAGScheduler - waiting: HashSet(ResultStage 752)
16:35:15.987 INFO  DAGScheduler - failed: HashSet()
16:35:15.987 INFO  DAGScheduler - Submitting ResultStage 752 (MapPartitionsRDD[3548] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:35:15.988 INFO  MemoryStore - Block broadcast_1394 stored as values in memory (estimated size 20.2 KiB, free 1895.8 MiB)
16:35:15.988 INFO  MemoryStore - Block broadcast_1394_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1895.8 MiB)
16:35:15.988 INFO  BlockManagerInfo - Added broadcast_1394_piece0 in memory on localhost:46629 (size: 10.6 KiB, free: 1918.0 MiB)
16:35:15.988 INFO  SparkContext - Created broadcast 1394 from broadcast at DAGScheduler.scala:1580
16:35:15.989 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 752 (MapPartitionsRDD[3548] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:35:15.989 INFO  TaskSchedulerImpl - Adding task set 752.0 with 1 tasks resource profile 0
16:35:15.989 INFO  TaskSetManager - Starting task 0.0 in stage 752.0 (TID 689) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:15.989 INFO  Executor - Running task 0.0 in stage 752.0 (TID 689)
16:35:15.991 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:15.991 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:15.993 INFO  Executor - Finished task 0.0 in stage 752.0 (TID 689). 2053 bytes result sent to driver
16:35:15.993 INFO  TaskSetManager - Finished task 0.0 in stage 752.0 (TID 689) in 4 ms on localhost (executor driver) (1/1)
16:35:15.993 INFO  TaskSchedulerImpl - Removed TaskSet 752.0, whose tasks have all completed, from pool 
16:35:15.993 INFO  DAGScheduler - ResultStage 752 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
16:35:15.993 INFO  DAGScheduler - Job 427 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:15.993 INFO  TaskSchedulerImpl - Killing all running tasks in stage 752: Stage finished
16:35:15.993 INFO  DAGScheduler - Job 427 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.030629 s
16:35:16.004 INFO  MemoryStore - Block broadcast_1395 stored as values in memory (estimated size 20.3 KiB, free 1895.8 MiB)
16:35:16.005 INFO  MemoryStore - Block broadcast_1395_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1895.8 MiB)
16:35:16.005 INFO  BlockManagerInfo - Added broadcast_1395_piece0 in memory on localhost:46629 (size: 1850.0 B, free: 1918.0 MiB)
16:35:16.005 INFO  SparkContext - Created broadcast 1395 from broadcast at ReadsSparkSink.java:133
16:35:16.006 INFO  MemoryStore - Block broadcast_1396 stored as values in memory (estimated size 20.4 KiB, free 1895.8 MiB)
16:35:16.006 INFO  MemoryStore - Block broadcast_1396_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1895.8 MiB)
16:35:16.006 INFO  BlockManagerInfo - Added broadcast_1396_piece0 in memory on localhost:46629 (size: 1850.0 B, free: 1918.0 MiB)
16:35:16.006 INFO  SparkContext - Created broadcast 1396 from broadcast at BamSink.java:76
16:35:16.008 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:16.008 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:16.008 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:16.024 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:35:16.025 INFO  DAGScheduler - Registering RDD 3550 (mapToPair at SparkUtils.java:161) as input to shuffle 109
16:35:16.025 INFO  DAGScheduler - Got job 428 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:35:16.025 INFO  DAGScheduler - Final stage: ResultStage 757 (runJob at SparkHadoopWriter.scala:83)
16:35:16.025 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 756)
16:35:16.025 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 756)
16:35:16.025 INFO  DAGScheduler - Submitting ShuffleMapStage 756 (MapPartitionsRDD[3550] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:16.026 INFO  MemoryStore - Block broadcast_1397 stored as values in memory (estimated size 18.1 KiB, free 1895.8 MiB)
16:35:16.026 INFO  MemoryStore - Block broadcast_1397_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1895.7 MiB)
16:35:16.026 INFO  BlockManagerInfo - Added broadcast_1397_piece0 in memory on localhost:46629 (size: 9.5 KiB, free: 1918.0 MiB)
16:35:16.027 INFO  SparkContext - Created broadcast 1397 from broadcast at DAGScheduler.scala:1580
16:35:16.027 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 756 (MapPartitionsRDD[3550] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:16.027 INFO  TaskSchedulerImpl - Adding task set 756.0 with 1 tasks resource profile 0
16:35:16.027 INFO  TaskSetManager - Starting task 0.0 in stage 756.0 (TID 690) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:16.027 INFO  Executor - Running task 0.0 in stage 756.0 (TID 690)
16:35:16.029 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:16.029 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:16.031 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:16.031 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:16.032 INFO  Executor - Finished task 0.0 in stage 756.0 (TID 690). 1922 bytes result sent to driver
16:35:16.032 INFO  TaskSetManager - Finished task 0.0 in stage 756.0 (TID 690) in 5 ms on localhost (executor driver) (1/1)
16:35:16.032 INFO  TaskSchedulerImpl - Removed TaskSet 756.0, whose tasks have all completed, from pool 
16:35:16.032 INFO  DAGScheduler - ShuffleMapStage 756 (mapToPair at SparkUtils.java:161) finished in 0.007 s
16:35:16.032 INFO  DAGScheduler - looking for newly runnable stages
16:35:16.032 INFO  DAGScheduler - running: HashSet()
16:35:16.032 INFO  DAGScheduler - waiting: HashSet(ResultStage 757)
16:35:16.033 INFO  DAGScheduler - failed: HashSet()
16:35:16.033 INFO  DAGScheduler - Submitting ResultStage 757 (MapPartitionsRDD[3555] at mapToPair at BamSink.java:91), which has no missing parents
16:35:16.039 INFO  MemoryStore - Block broadcast_1398 stored as values in memory (estimated size 163.7 KiB, free 1895.6 MiB)
16:35:16.040 INFO  MemoryStore - Block broadcast_1398_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1895.5 MiB)
16:35:16.040 INFO  BlockManagerInfo - Added broadcast_1398_piece0 in memory on localhost:46629 (size: 73.9 KiB, free: 1917.9 MiB)
16:35:16.040 INFO  SparkContext - Created broadcast 1398 from broadcast at DAGScheduler.scala:1580
16:35:16.040 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 757 (MapPartitionsRDD[3555] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
16:35:16.040 INFO  TaskSchedulerImpl - Adding task set 757.0 with 1 tasks resource profile 0
16:35:16.041 INFO  TaskSetManager - Starting task 0.0 in stage 757.0 (TID 691) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:16.041 INFO  Executor - Running task 0.0 in stage 757.0 (TID 691)
16:35:16.044 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:16.044 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:16.046 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:16.047 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:16.047 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:16.047 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:35:16.047 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:16.047 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:16.065 INFO  FileOutputCommitter - Saved output of task 'attempt_2025052716351641793918006478351_3555_r_000000_0' to file:/tmp/local118027614369008844995/markdups11187611159168454400.bam.parts/_temporary/0/task_2025052716351641793918006478351_3555_r_000000
16:35:16.065 INFO  SparkHadoopMapRedUtil - attempt_2025052716351641793918006478351_3555_r_000000_0: Committed. Elapsed time: 0 ms.
16:35:16.065 INFO  Executor - Finished task 0.0 in stage 757.0 (TID 691). 1858 bytes result sent to driver
16:35:16.065 INFO  TaskSetManager - Finished task 0.0 in stage 757.0 (TID 691) in 24 ms on localhost (executor driver) (1/1)
16:35:16.065 INFO  TaskSchedulerImpl - Removed TaskSet 757.0, whose tasks have all completed, from pool 
16:35:16.066 INFO  DAGScheduler - ResultStage 757 (runJob at SparkHadoopWriter.scala:83) finished in 0.032 s
16:35:16.066 INFO  DAGScheduler - Job 428 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:16.066 INFO  TaskSchedulerImpl - Killing all running tasks in stage 757: Stage finished
16:35:16.066 INFO  DAGScheduler - Job 428 finished: runJob at SparkHadoopWriter.scala:83, took 0.041548 s
16:35:16.066 INFO  SparkHadoopWriter - Start to commit write Job job_2025052716351641793918006478351_3555.
16:35:16.070 INFO  SparkHadoopWriter - Write Job job_2025052716351641793918006478351_3555 committed. Elapsed time: 4 ms.
16:35:16.080 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local118027614369008844995/markdups11187611159168454400.bam
16:35:16.083 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local118027614369008844995/markdups11187611159168454400.bam done
16:35:16.084 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local118027614369008844995/markdups11187611159168454400.bam.parts/ to /tmp/local118027614369008844995/markdups11187611159168454400.bam.sbi
16:35:16.089 INFO  IndexFileMerger - Done merging .sbi files
16:35:16.089 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local118027614369008844995/markdups11187611159168454400.bam.parts/ to /tmp/local118027614369008844995/markdups11187611159168454400.bam.bai
16:35:16.093 INFO  IndexFileMerger - Done merging .bai files
16:35:16.093 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:35:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
16:35:40.906 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:40.906 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
16:35:40.906 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:35:40.906 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
16:35:40.906 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
16:35:40.906 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:35:40 PM UTC
16:35:40.906 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:40.907 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:40.907 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:35:40.907 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:35:40.907 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:35:40.907 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:35:40.907 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:35:40.907 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:35:40.907 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:35:40.907 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:35:40.907 INFO  MarkDuplicatesSpark - Initializing engine
16:35:40.907 INFO  MarkDuplicatesSpark - Done initializing engine
16:35:40.907 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:35:40.909 INFO  MemoryStore - Block broadcast_1975 stored as values in memory (estimated size 306.3 KiB, free 1912.1 MiB)
16:35:40.916 INFO  MemoryStore - Block broadcast_1975_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1912.1 MiB)
16:35:40.916 INFO  BlockManagerInfo - Added broadcast_1975_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1919.0 MiB)
16:35:40.916 INFO  SparkContext - Created broadcast 1975 from newAPIHadoopFile at PathSplitSource.java:96
16:35:40.937 INFO  MemoryStore - Block broadcast_1976 stored as values in memory (estimated size 306.3 KiB, free 1911.8 MiB)
16:35:40.943 INFO  MemoryStore - Block broadcast_1976_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1911.7 MiB)
16:35:40.943 INFO  BlockManagerInfo - Added broadcast_1976_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1919.0 MiB)
16:35:40.943 INFO  SparkContext - Created broadcast 1976 from newAPIHadoopFile at PathSplitSource.java:96
16:35:40.976 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:35:40.977 INFO  FileInputFormat - Total input files to process : 1
16:35:40.977 INFO  DAGScheduler - Registering RDD 5447 (mapToPair at SparkUtils.java:161) as input to shuffle 300
16:35:40.977 INFO  DAGScheduler - Got job 559 (collect at SparkUtils.java:205) with 1 output partitions
16:35:40.977 INFO  DAGScheduler - Final stage: ResultStage 1219 (collect at SparkUtils.java:205)
16:35:40.977 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1218)
16:35:40.977 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1218)
16:35:40.978 INFO  DAGScheduler - Submitting ShuffleMapStage 1218 (MapPartitionsRDD[5447] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.003 INFO  MemoryStore - Block broadcast_1977 stored as values in memory (estimated size 493.8 KiB, free 1911.2 MiB)
16:35:41.006 INFO  MemoryStore - Block broadcast_1977_piece0 stored as bytes in memory (estimated size 210.1 KiB, free 1911.0 MiB)
16:35:41.006 INFO  BlockManagerInfo - Added broadcast_1977_piece0 in memory on localhost:46629 (size: 210.1 KiB, free: 1918.8 MiB)
16:35:41.006 INFO  SparkContext - Created broadcast 1977 from broadcast at DAGScheduler.scala:1580
16:35:41.006 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1218 (MapPartitionsRDD[5447] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.006 INFO  TaskSchedulerImpl - Adding task set 1218.0 with 1 tasks resource profile 0
16:35:41.007 INFO  TaskSetManager - Starting task 0.0 in stage 1218.0 (TID 1062) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9758 bytes) 
16:35:41.007 INFO  Executor - Running task 0.0 in stage 1218.0 (TID 1062)
16:35:41.037 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam:0+13412
16:35:41.042 INFO  Executor - Finished task 0.0 in stage 1218.0 (TID 1062). 1148 bytes result sent to driver
16:35:41.042 INFO  TaskSetManager - Finished task 0.0 in stage 1218.0 (TID 1062) in 35 ms on localhost (executor driver) (1/1)
16:35:41.042 INFO  TaskSchedulerImpl - Removed TaskSet 1218.0, whose tasks have all completed, from pool 
16:35:41.042 INFO  DAGScheduler - ShuffleMapStage 1218 (mapToPair at SparkUtils.java:161) finished in 0.064 s
16:35:41.042 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.043 INFO  DAGScheduler - running: HashSet()
16:35:41.043 INFO  DAGScheduler - waiting: HashSet(ResultStage 1219)
16:35:41.043 INFO  DAGScheduler - failed: HashSet()
16:35:41.043 INFO  DAGScheduler - Submitting ResultStage 1219 (MapPartitionsRDD[5450] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:35:41.043 INFO  MemoryStore - Block broadcast_1978 stored as values in memory (estimated size 7.4 KiB, free 1911.0 MiB)
16:35:41.044 INFO  MemoryStore - Block broadcast_1978_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1911.0 MiB)
16:35:41.044 INFO  BlockManagerInfo - Added broadcast_1978_piece0 in memory on localhost:46629 (size: 4.1 KiB, free: 1918.8 MiB)
16:35:41.044 INFO  SparkContext - Created broadcast 1978 from broadcast at DAGScheduler.scala:1580
16:35:41.044 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1219 (MapPartitionsRDD[5450] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:35:41.044 INFO  TaskSchedulerImpl - Adding task set 1219.0 with 1 tasks resource profile 0
16:35:41.044 INFO  TaskSetManager - Starting task 0.0 in stage 1219.0 (TID 1063) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.045 INFO  Executor - Running task 0.0 in stage 1219.0 (TID 1063)
16:35:41.046 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.046 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.048 INFO  Executor - Finished task 0.0 in stage 1219.0 (TID 1063). 2329 bytes result sent to driver
16:35:41.048 INFO  TaskSetManager - Finished task 0.0 in stage 1219.0 (TID 1063) in 4 ms on localhost (executor driver) (1/1)
16:35:41.048 INFO  TaskSchedulerImpl - Removed TaskSet 1219.0, whose tasks have all completed, from pool 
16:35:41.048 INFO  DAGScheduler - ResultStage 1219 (collect at SparkUtils.java:205) finished in 0.005 s
16:35:41.049 INFO  DAGScheduler - Job 559 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.049 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1219: Stage finished
16:35:41.049 INFO  DAGScheduler - Job 559 finished: collect at SparkUtils.java:205, took 0.072417 s
16:35:41.053 INFO  MemoryStore - Block broadcast_1979 stored as values in memory (estimated size 7.8 KiB, free 1911.0 MiB)
16:35:41.053 INFO  MemoryStore - Block broadcast_1979_piece0 stored as bytes in memory (estimated size 540.0 B, free 1911.0 MiB)
16:35:41.053 INFO  BlockManagerInfo - Added broadcast_1979_piece0 in memory on localhost:46629 (size: 540.0 B, free: 1918.8 MiB)
16:35:41.053 INFO  SparkContext - Created broadcast 1979 from broadcast at MarkDuplicatesSparkUtils.java:126
16:35:41.054 INFO  MemoryStore - Block broadcast_1980 stored as values in memory (estimated size 2.6 KiB, free 1911.0 MiB)
16:35:41.054 INFO  MemoryStore - Block broadcast_1980_piece0 stored as bytes in memory (estimated size 209.0 B, free 1911.0 MiB)
16:35:41.054 INFO  BlockManagerInfo - Added broadcast_1980_piece0 in memory on localhost:46629 (size: 209.0 B, free: 1918.8 MiB)
16:35:41.054 INFO  SparkContext - Created broadcast 1980 from broadcast at MarkDuplicatesSparkUtils.java:127
16:35:41.068 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:35:41.068 INFO  DAGScheduler - Registering RDD 5459 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 303
16:35:41.068 INFO  DAGScheduler - Registering RDD 5463 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 302
16:35:41.068 INFO  DAGScheduler - Registering RDD 5467 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 301
16:35:41.069 INFO  DAGScheduler - Got job 560 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:35:41.069 INFO  DAGScheduler - Final stage: ResultStage 1224 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:35:41.069 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1223)
16:35:41.069 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1223)
16:35:41.069 INFO  DAGScheduler - Submitting ShuffleMapStage 1221 (MapPartitionsRDD[5459] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:35:41.070 INFO  MemoryStore - Block broadcast_1981 stored as values in memory (estimated size 57.6 KiB, free 1910.9 MiB)
16:35:41.070 INFO  MemoryStore - Block broadcast_1981_piece0 stored as bytes in memory (estimated size 19.8 KiB, free 1910.9 MiB)
16:35:41.071 INFO  BlockManagerInfo - Added broadcast_1981_piece0 in memory on localhost:46629 (size: 19.8 KiB, free: 1918.8 MiB)
16:35:41.071 INFO  SparkContext - Created broadcast 1981 from broadcast at DAGScheduler.scala:1580
16:35:41.071 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1221 (MapPartitionsRDD[5459] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:35:41.071 INFO  TaskSchedulerImpl - Adding task set 1221.0 with 1 tasks resource profile 0
16:35:41.071 INFO  TaskSetManager - Starting task 0.0 in stage 1221.0 (TID 1064) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
16:35:41.071 INFO  Executor - Running task 0.0 in stage 1221.0 (TID 1064)
16:35:41.073 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.073 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.077 INFO  Executor - Finished task 0.0 in stage 1221.0 (TID 1064). 1922 bytes result sent to driver
16:35:41.077 INFO  TaskSetManager - Finished task 0.0 in stage 1221.0 (TID 1064) in 6 ms on localhost (executor driver) (1/1)
16:35:41.077 INFO  TaskSchedulerImpl - Removed TaskSet 1221.0, whose tasks have all completed, from pool 
16:35:41.077 INFO  DAGScheduler - ShuffleMapStage 1221 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.008 s
16:35:41.077 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.077 INFO  DAGScheduler - running: HashSet()
16:35:41.078 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1222, ShuffleMapStage 1223, ResultStage 1224)
16:35:41.078 INFO  DAGScheduler - failed: HashSet()
16:35:41.078 INFO  DAGScheduler - Submitting ShuffleMapStage 1222 (MapPartitionsRDD[5463] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:35:41.079 INFO  MemoryStore - Block broadcast_1982 stored as values in memory (estimated size 61.7 KiB, free 1910.9 MiB)
16:35:41.079 INFO  MemoryStore - Block broadcast_1982_piece0 stored as bytes in memory (estimated size 22.0 KiB, free 1910.8 MiB)
16:35:41.080 INFO  BlockManagerInfo - Added broadcast_1982_piece0 in memory on localhost:46629 (size: 22.0 KiB, free: 1918.7 MiB)
16:35:41.080 INFO  SparkContext - Created broadcast 1982 from broadcast at DAGScheduler.scala:1580
16:35:41.080 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1222 (MapPartitionsRDD[5463] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:35:41.080 INFO  TaskSchedulerImpl - Adding task set 1222.0 with 1 tasks resource profile 0
16:35:41.080 INFO  TaskSetManager - Starting task 0.0 in stage 1222.0 (TID 1065) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
16:35:41.080 INFO  Executor - Running task 0.0 in stage 1222.0 (TID 1065)
16:35:41.082 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.0 KiB) non-empty blocks including 1 (2.0 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.082 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.088 INFO  Executor - Finished task 0.0 in stage 1222.0 (TID 1065). 1922 bytes result sent to driver
16:35:41.088 INFO  TaskSetManager - Finished task 0.0 in stage 1222.0 (TID 1065) in 8 ms on localhost (executor driver) (1/1)
16:35:41.088 INFO  TaskSchedulerImpl - Removed TaskSet 1222.0, whose tasks have all completed, from pool 
16:35:41.088 INFO  DAGScheduler - ShuffleMapStage 1222 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.010 s
16:35:41.088 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.088 INFO  DAGScheduler - running: HashSet()
16:35:41.088 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1223, ResultStage 1224)
16:35:41.088 INFO  DAGScheduler - failed: HashSet()
16:35:41.088 INFO  DAGScheduler - Submitting ShuffleMapStage 1223 (MapPartitionsRDD[5467] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:35:41.089 INFO  MemoryStore - Block broadcast_1983 stored as values in memory (estimated size 52.4 KiB, free 1910.8 MiB)
16:35:41.090 INFO  MemoryStore - Block broadcast_1983_piece0 stored as bytes in memory (estimated size 17.3 KiB, free 1910.8 MiB)
16:35:41.090 INFO  BlockManagerInfo - Added broadcast_1983_piece0 in memory on localhost:46629 (size: 17.3 KiB, free: 1918.7 MiB)
16:35:41.090 INFO  SparkContext - Created broadcast 1983 from broadcast at DAGScheduler.scala:1580
16:35:41.090 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1223 (MapPartitionsRDD[5467] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:35:41.090 INFO  TaskSchedulerImpl - Adding task set 1223.0 with 1 tasks resource profile 0
16:35:41.090 INFO  TaskSetManager - Starting task 0.0 in stage 1223.0 (TID 1066) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.091 INFO  Executor - Running task 0.0 in stage 1223.0 (TID 1066)
16:35:41.092 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.092 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.094 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.094 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.096 INFO  Executor - Finished task 0.0 in stage 1223.0 (TID 1066). 1922 bytes result sent to driver
16:35:41.096 INFO  TaskSetManager - Finished task 0.0 in stage 1223.0 (TID 1066) in 6 ms on localhost (executor driver) (1/1)
16:35:41.096 INFO  TaskSchedulerImpl - Removed TaskSet 1223.0, whose tasks have all completed, from pool 
16:35:41.096 INFO  DAGScheduler - ShuffleMapStage 1223 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.008 s
16:35:41.096 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.096 INFO  DAGScheduler - running: HashSet()
16:35:41.096 INFO  DAGScheduler - waiting: HashSet(ResultStage 1224)
16:35:41.096 INFO  DAGScheduler - failed: HashSet()
16:35:41.097 INFO  DAGScheduler - Submitting ResultStage 1224 (MapPartitionsRDD[5469] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:35:41.097 INFO  MemoryStore - Block broadcast_1984 stored as values in memory (estimated size 53.6 KiB, free 1910.7 MiB)
16:35:41.098 INFO  MemoryStore - Block broadcast_1984_piece0 stored as bytes in memory (estimated size 18.0 KiB, free 1910.7 MiB)
16:35:41.098 INFO  BlockManagerInfo - Added broadcast_1984_piece0 in memory on localhost:46629 (size: 18.0 KiB, free: 1918.7 MiB)
16:35:41.098 INFO  SparkContext - Created broadcast 1984 from broadcast at DAGScheduler.scala:1580
16:35:41.098 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1224 (MapPartitionsRDD[5469] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:35:41.098 INFO  TaskSchedulerImpl - Adding task set 1224.0 with 1 tasks resource profile 0
16:35:41.099 INFO  TaskSetManager - Starting task 0.0 in stage 1224.0 (TID 1067) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.099 INFO  Executor - Running task 0.0 in stage 1224.0 (TID 1067)
16:35:41.101 INFO  ShuffleBlockFetcherIterator - Getting 1 (251.0 B) non-empty blocks including 1 (251.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.101 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.102 INFO  Executor - Finished task 0.0 in stage 1224.0 (TID 1067). 2371 bytes result sent to driver
16:35:41.102 INFO  TaskSetManager - Finished task 0.0 in stage 1224.0 (TID 1067) in 3 ms on localhost (executor driver) (1/1)
16:35:41.102 INFO  TaskSchedulerImpl - Removed TaskSet 1224.0, whose tasks have all completed, from pool 
16:35:41.102 INFO  DAGScheduler - ResultStage 1224 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
16:35:41.102 INFO  DAGScheduler - Job 560 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.102 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1224: Stage finished
16:35:41.102 INFO  DAGScheduler - Job 560 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.034491 s
16:35:41.114 INFO  MemoryStore - Block broadcast_1985 stored as values in memory (estimated size 179.4 KiB, free 1910.5 MiB)
16:35:41.115 INFO  MemoryStore - Block broadcast_1985_piece0 stored as bytes in memory (estimated size 7.9 KiB, free 1910.5 MiB)
16:35:41.115 INFO  BlockManagerInfo - Added broadcast_1985_piece0 in memory on localhost:46629 (size: 7.9 KiB, free: 1918.7 MiB)
16:35:41.115 INFO  SparkContext - Created broadcast 1985 from broadcast at ReadsSparkSink.java:133
16:35:41.118 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.118 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.118 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.134 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:35:41.134 INFO  DAGScheduler - Registering RDD 5471 (mapToPair at SparkUtils.java:161) as input to shuffle 304
16:35:41.134 INFO  DAGScheduler - Got job 561 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:35:41.134 INFO  DAGScheduler - Final stage: ResultStage 1229 (runJob at SparkHadoopWriter.scala:83)
16:35:41.135 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1228)
16:35:41.135 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1228)
16:35:41.135 INFO  DAGScheduler - Submitting ShuffleMapStage 1228 (MapPartitionsRDD[5471] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.136 INFO  MemoryStore - Block broadcast_1986 stored as values in memory (estimated size 51.5 KiB, free 1910.5 MiB)
16:35:41.136 INFO  MemoryStore - Block broadcast_1986_piece0 stored as bytes in memory (estimated size 16.8 KiB, free 1910.4 MiB)
16:35:41.136 INFO  BlockManagerInfo - Added broadcast_1986_piece0 in memory on localhost:46629 (size: 16.8 KiB, free: 1918.7 MiB)
16:35:41.136 INFO  SparkContext - Created broadcast 1986 from broadcast at DAGScheduler.scala:1580
16:35:41.136 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1228 (MapPartitionsRDD[5471] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.136 INFO  TaskSchedulerImpl - Adding task set 1228.0 with 1 tasks resource profile 0
16:35:41.137 INFO  TaskSetManager - Starting task 0.0 in stage 1228.0 (TID 1068) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.137 INFO  Executor - Running task 0.0 in stage 1228.0 (TID 1068)
16:35:41.138 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.139 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.141 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.141 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.143 INFO  Executor - Finished task 0.0 in stage 1228.0 (TID 1068). 1922 bytes result sent to driver
16:35:41.143 INFO  TaskSetManager - Finished task 0.0 in stage 1228.0 (TID 1068) in 6 ms on localhost (executor driver) (1/1)
16:35:41.143 INFO  TaskSchedulerImpl - Removed TaskSet 1228.0, whose tasks have all completed, from pool 
16:35:41.143 INFO  DAGScheduler - ShuffleMapStage 1228 (mapToPair at SparkUtils.java:161) finished in 0.008 s
16:35:41.143 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.143 INFO  DAGScheduler - running: HashSet()
16:35:41.143 INFO  DAGScheduler - waiting: HashSet(ResultStage 1229)
16:35:41.143 INFO  DAGScheduler - failed: HashSet()
16:35:41.143 INFO  DAGScheduler - Submitting ResultStage 1229 (MapPartitionsRDD[5477] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:35:41.150 INFO  MemoryStore - Block broadcast_1987 stored as values in memory (estimated size 196.7 KiB, free 1910.3 MiB)
16:35:41.151 INFO  MemoryStore - Block broadcast_1987_piece0 stored as bytes in memory (estimated size 80.9 KiB, free 1910.2 MiB)
16:35:41.151 INFO  BlockManagerInfo - Added broadcast_1987_piece0 in memory on localhost:46629 (size: 80.9 KiB, free: 1918.6 MiB)
16:35:41.151 INFO  SparkContext - Created broadcast 1987 from broadcast at DAGScheduler.scala:1580
16:35:41.151 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1229 (MapPartitionsRDD[5477] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:35:41.151 INFO  TaskSchedulerImpl - Adding task set 1229.0 with 1 tasks resource profile 0
16:35:41.151 INFO  TaskSetManager - Starting task 0.0 in stage 1229.0 (TID 1069) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.152 INFO  Executor - Running task 0.0 in stage 1229.0 (TID 1069)
16:35:41.155 INFO  ShuffleBlockFetcherIterator - Getting 1 (14.8 KiB) non-empty blocks including 1 (14.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.155 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.157 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.157 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.157 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.168 INFO  FileOutputCommitter - Saved output of task 'attempt_20250527163541844650956114153274_5477_m_000000_0' to file:/tmp/MarkDups4343932815612202102/MarkDups.sam.parts/_temporary/0/task_20250527163541844650956114153274_5477_m_000000
16:35:41.169 INFO  SparkHadoopMapRedUtil - attempt_20250527163541844650956114153274_5477_m_000000_0: Committed. Elapsed time: 0 ms.
16:35:41.169 INFO  Executor - Finished task 0.0 in stage 1229.0 (TID 1069). 1858 bytes result sent to driver
16:35:41.169 INFO  TaskSetManager - Finished task 0.0 in stage 1229.0 (TID 1069) in 18 ms on localhost (executor driver) (1/1)
16:35:41.169 INFO  TaskSchedulerImpl - Removed TaskSet 1229.0, whose tasks have all completed, from pool 
16:35:41.169 INFO  DAGScheduler - ResultStage 1229 (runJob at SparkHadoopWriter.scala:83) finished in 0.026 s
16:35:41.169 INFO  DAGScheduler - Job 561 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.169 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1229: Stage finished
16:35:41.170 INFO  DAGScheduler - Job 561 finished: runJob at SparkHadoopWriter.scala:83, took 0.035482 s
16:35:41.170 INFO  SparkHadoopWriter - Start to commit write Job job_20250527163541844650956114153274_5477.
16:35:41.174 INFO  SparkHadoopWriter - Write Job job_20250527163541844650956114153274_5477 committed. Elapsed time: 4 ms.
16:35:41.182 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups4343932815612202102/MarkDups.sam
16:35:41.186 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups4343932815612202102/MarkDups.sam done
16:35:41.186 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:35:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
16:35:41.192 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.192 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
16:35:41.192 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:35:41.192 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
16:35:41.192 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
16:35:41.192 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:35:41 PM UTC
16:35:41.192 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.192 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.192 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:35:41.192 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:35:41.192 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:35:41.192 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:35:41.192 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:35:41.192 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:35:41.192 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:35:41.192 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:35:41.192 INFO  MarkDuplicatesSpark - Initializing engine
16:35:41.192 INFO  MarkDuplicatesSpark - Done initializing engine
16:35:41.192 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:35:41.195 INFO  MemoryStore - Block broadcast_1988 stored as values in memory (estimated size 306.3 KiB, free 1909.9 MiB)
16:35:41.203 INFO  MemoryStore - Block broadcast_1988_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1909.8 MiB)
16:35:41.203 INFO  BlockManagerInfo - Added broadcast_1988_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.5 MiB)
16:35:41.203 INFO  SparkContext - Created broadcast 1988 from newAPIHadoopFile at PathSplitSource.java:96
16:35:41.223 INFO  MemoryStore - Block broadcast_1989 stored as values in memory (estimated size 306.3 KiB, free 1909.5 MiB)
16:35:41.229 INFO  MemoryStore - Block broadcast_1989_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1909.4 MiB)
16:35:41.229 INFO  BlockManagerInfo - Added broadcast_1989_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.5 MiB)
16:35:41.230 INFO  SparkContext - Created broadcast 1989 from newAPIHadoopFile at PathSplitSource.java:96
16:35:41.261 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:35:41.262 INFO  FileInputFormat - Total input files to process : 1
16:35:41.262 INFO  DAGScheduler - Registering RDD 5491 (mapToPair at SparkUtils.java:161) as input to shuffle 305
16:35:41.262 INFO  DAGScheduler - Got job 562 (collect at SparkUtils.java:205) with 1 output partitions
16:35:41.262 INFO  DAGScheduler - Final stage: ResultStage 1231 (collect at SparkUtils.java:205)
16:35:41.262 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1230)
16:35:41.262 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1230)
16:35:41.262 INFO  DAGScheduler - Submitting ShuffleMapStage 1230 (MapPartitionsRDD[5491] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.279 INFO  MemoryStore - Block broadcast_1990 stored as values in memory (estimated size 457.4 KiB, free 1909.0 MiB)
16:35:41.281 INFO  MemoryStore - Block broadcast_1990_piece0 stored as bytes in memory (estimated size 201.1 KiB, free 1908.8 MiB)
16:35:41.281 INFO  BlockManagerInfo - Added broadcast_1990_piece0 in memory on localhost:46629 (size: 201.1 KiB, free: 1918.3 MiB)
16:35:41.281 INFO  SparkContext - Created broadcast 1990 from broadcast at DAGScheduler.scala:1580
16:35:41.281 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1230 (MapPartitionsRDD[5491] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.281 INFO  TaskSchedulerImpl - Adding task set 1230.0 with 1 tasks resource profile 0
16:35:41.282 INFO  TaskSetManager - Starting task 0.0 in stage 1230.0 (TID 1070) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9738 bytes) 
16:35:41.282 INFO  Executor - Running task 0.0 in stage 1230.0 (TID 1070)
16:35:41.311 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam:0+678
16:35:41.314 INFO  Executor - Finished task 0.0 in stage 1230.0 (TID 1070). 1148 bytes result sent to driver
16:35:41.315 INFO  TaskSetManager - Finished task 0.0 in stage 1230.0 (TID 1070) in 33 ms on localhost (executor driver) (1/1)
16:35:41.315 INFO  TaskSchedulerImpl - Removed TaskSet 1230.0, whose tasks have all completed, from pool 
16:35:41.315 INFO  DAGScheduler - ShuffleMapStage 1230 (mapToPair at SparkUtils.java:161) finished in 0.053 s
16:35:41.315 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.315 INFO  DAGScheduler - running: HashSet()
16:35:41.315 INFO  DAGScheduler - waiting: HashSet(ResultStage 1231)
16:35:41.315 INFO  DAGScheduler - failed: HashSet()
16:35:41.315 INFO  DAGScheduler - Submitting ResultStage 1231 (MapPartitionsRDD[5494] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:35:41.316 INFO  MemoryStore - Block broadcast_1991 stored as values in memory (estimated size 7.4 KiB, free 1908.8 MiB)
16:35:41.316 INFO  MemoryStore - Block broadcast_1991_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1908.8 MiB)
16:35:41.316 INFO  BlockManagerInfo - Added broadcast_1991_piece0 in memory on localhost:46629 (size: 4.1 KiB, free: 1918.3 MiB)
16:35:41.316 INFO  SparkContext - Created broadcast 1991 from broadcast at DAGScheduler.scala:1580
16:35:41.316 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1231 (MapPartitionsRDD[5494] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:35:41.317 INFO  TaskSchedulerImpl - Adding task set 1231.0 with 1 tasks resource profile 0
16:35:41.317 INFO  TaskSetManager - Starting task 0.0 in stage 1231.0 (TID 1071) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.317 INFO  Executor - Running task 0.0 in stage 1231.0 (TID 1071)
16:35:41.318 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.318 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.320 INFO  Executor - Finished task 0.0 in stage 1231.0 (TID 1071). 2269 bytes result sent to driver
16:35:41.320 INFO  TaskSetManager - Finished task 0.0 in stage 1231.0 (TID 1071) in 3 ms on localhost (executor driver) (1/1)
16:35:41.320 INFO  TaskSchedulerImpl - Removed TaskSet 1231.0, whose tasks have all completed, from pool 
16:35:41.320 INFO  DAGScheduler - ResultStage 1231 (collect at SparkUtils.java:205) finished in 0.005 s
16:35:41.320 INFO  DAGScheduler - Job 562 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.320 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1231: Stage finished
16:35:41.320 INFO  DAGScheduler - Job 562 finished: collect at SparkUtils.java:205, took 0.058999 s
16:35:41.324 INFO  MemoryStore - Block broadcast_1992 stored as values in memory (estimated size 136.0 B, free 1908.8 MiB)
16:35:41.325 INFO  MemoryStore - Block broadcast_1992_piece0 stored as bytes in memory (estimated size 24.0 B, free 1908.8 MiB)
16:35:41.325 INFO  BlockManagerInfo - Added broadcast_1992_piece0 in memory on localhost:46629 (size: 24.0 B, free: 1918.3 MiB)
16:35:41.325 INFO  SparkContext - Created broadcast 1992 from broadcast at MarkDuplicatesSparkUtils.java:126
16:35:41.325 INFO  MemoryStore - Block broadcast_1993 stored as values in memory (estimated size 136.0 B, free 1908.8 MiB)
16:35:41.326 INFO  MemoryStore - Block broadcast_1993_piece0 stored as bytes in memory (estimated size 21.0 B, free 1908.8 MiB)
16:35:41.326 INFO  BlockManagerInfo - Added broadcast_1993_piece0 in memory on localhost:46629 (size: 21.0 B, free: 1918.3 MiB)
16:35:41.326 INFO  SparkContext - Created broadcast 1993 from broadcast at MarkDuplicatesSparkUtils.java:127
16:35:41.337 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:35:41.338 INFO  DAGScheduler - Registering RDD 5503 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 308
16:35:41.338 INFO  DAGScheduler - Registering RDD 5507 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 307
16:35:41.338 INFO  DAGScheduler - Registering RDD 5511 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 306
16:35:41.338 INFO  DAGScheduler - Got job 563 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:35:41.338 INFO  DAGScheduler - Final stage: ResultStage 1236 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:35:41.338 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1235)
16:35:41.338 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1235)
16:35:41.338 INFO  DAGScheduler - Submitting ShuffleMapStage 1233 (MapPartitionsRDD[5503] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:35:41.339 INFO  MemoryStore - Block broadcast_1994 stored as values in memory (estimated size 21.1 KiB, free 1908.8 MiB)
16:35:41.339 INFO  MemoryStore - Block broadcast_1994_piece0 stored as bytes in memory (estimated size 10.9 KiB, free 1908.8 MiB)
16:35:41.339 INFO  BlockManagerInfo - Added broadcast_1994_piece0 in memory on localhost:46629 (size: 10.9 KiB, free: 1918.3 MiB)
16:35:41.340 INFO  SparkContext - Created broadcast 1994 from broadcast at DAGScheduler.scala:1580
16:35:41.340 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1233 (MapPartitionsRDD[5503] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:35:41.340 INFO  TaskSchedulerImpl - Adding task set 1233.0 with 1 tasks resource profile 0
16:35:41.340 INFO  TaskSetManager - Starting task 0.0 in stage 1233.0 (TID 1072) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
16:35:41.340 INFO  Executor - Running task 0.0 in stage 1233.0 (TID 1072)
16:35:41.342 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.342 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.344 INFO  Executor - Finished task 0.0 in stage 1233.0 (TID 1072). 1879 bytes result sent to driver
16:35:41.344 INFO  TaskSetManager - Finished task 0.0 in stage 1233.0 (TID 1072) in 4 ms on localhost (executor driver) (1/1)
16:35:41.344 INFO  TaskSchedulerImpl - Removed TaskSet 1233.0, whose tasks have all completed, from pool 
16:35:41.344 INFO  DAGScheduler - ShuffleMapStage 1233 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
16:35:41.344 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.344 INFO  DAGScheduler - running: HashSet()
16:35:41.344 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1234, ShuffleMapStage 1235, ResultStage 1236)
16:35:41.344 INFO  DAGScheduler - failed: HashSet()
16:35:41.344 INFO  DAGScheduler - Submitting ShuffleMapStage 1234 (MapPartitionsRDD[5507] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:35:41.345 INFO  MemoryStore - Block broadcast_1995 stored as values in memory (estimated size 25.2 KiB, free 1908.7 MiB)
16:35:41.345 INFO  MemoryStore - Block broadcast_1995_piece0 stored as bytes in memory (estimated size 13.1 KiB, free 1908.7 MiB)
16:35:41.345 INFO  BlockManagerInfo - Added broadcast_1995_piece0 in memory on localhost:46629 (size: 13.1 KiB, free: 1918.2 MiB)
16:35:41.345 INFO  SparkContext - Created broadcast 1995 from broadcast at DAGScheduler.scala:1580
16:35:41.346 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1234 (MapPartitionsRDD[5507] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:35:41.346 INFO  TaskSchedulerImpl - Adding task set 1234.0 with 1 tasks resource profile 0
16:35:41.346 INFO  TaskSetManager - Starting task 0.0 in stage 1234.0 (TID 1073) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
16:35:41.346 INFO  Executor - Running task 0.0 in stage 1234.0 (TID 1073)
16:35:41.347 INFO  ShuffleBlockFetcherIterator - Getting 1 (106.0 B) non-empty blocks including 1 (106.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.347 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.350 INFO  Executor - Finished task 0.0 in stage 1234.0 (TID 1073). 1879 bytes result sent to driver
16:35:41.351 INFO  TaskSetManager - Finished task 0.0 in stage 1234.0 (TID 1073) in 5 ms on localhost (executor driver) (1/1)
16:35:41.351 INFO  TaskSchedulerImpl - Removed TaskSet 1234.0, whose tasks have all completed, from pool 
16:35:41.351 INFO  DAGScheduler - ShuffleMapStage 1234 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
16:35:41.351 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.351 INFO  DAGScheduler - running: HashSet()
16:35:41.351 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1235, ResultStage 1236)
16:35:41.351 INFO  DAGScheduler - failed: HashSet()
16:35:41.351 INFO  DAGScheduler - Submitting ShuffleMapStage 1235 (MapPartitionsRDD[5511] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:35:41.351 INFO  MemoryStore - Block broadcast_1996 stored as values in memory (estimated size 15.9 KiB, free 1908.7 MiB)
16:35:41.352 INFO  MemoryStore - Block broadcast_1996_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1908.7 MiB)
16:35:41.352 INFO  BlockManagerInfo - Added broadcast_1996_piece0 in memory on localhost:46629 (size: 8.3 KiB, free: 1918.2 MiB)
16:35:41.352 INFO  SparkContext - Created broadcast 1996 from broadcast at DAGScheduler.scala:1580
16:35:41.352 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1235 (MapPartitionsRDD[5511] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:35:41.352 INFO  TaskSchedulerImpl - Adding task set 1235.0 with 1 tasks resource profile 0
16:35:41.353 INFO  TaskSetManager - Starting task 0.0 in stage 1235.0 (TID 1074) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.353 INFO  Executor - Running task 0.0 in stage 1235.0 (TID 1074)
16:35:41.354 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.354 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.355 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.355 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.356 INFO  Executor - Finished task 0.0 in stage 1235.0 (TID 1074). 1879 bytes result sent to driver
16:35:41.356 INFO  TaskSetManager - Finished task 0.0 in stage 1235.0 (TID 1074) in 4 ms on localhost (executor driver) (1/1)
16:35:41.356 INFO  TaskSchedulerImpl - Removed TaskSet 1235.0, whose tasks have all completed, from pool 
16:35:41.356 INFO  DAGScheduler - ShuffleMapStage 1235 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.005 s
16:35:41.356 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.356 INFO  DAGScheduler - running: HashSet()
16:35:41.356 INFO  DAGScheduler - waiting: HashSet(ResultStage 1236)
16:35:41.356 INFO  DAGScheduler - failed: HashSet()
16:35:41.356 INFO  DAGScheduler - Submitting ResultStage 1236 (MapPartitionsRDD[5513] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:35:41.357 INFO  MemoryStore - Block broadcast_1997 stored as values in memory (estimated size 17.1 KiB, free 1908.7 MiB)
16:35:41.357 INFO  MemoryStore - Block broadcast_1997_piece0 stored as bytes in memory (estimated size 9.0 KiB, free 1908.7 MiB)
16:35:41.358 INFO  BlockManagerInfo - Added broadcast_1997_piece0 in memory on localhost:46629 (size: 9.0 KiB, free: 1918.2 MiB)
16:35:41.358 INFO  SparkContext - Created broadcast 1997 from broadcast at DAGScheduler.scala:1580
16:35:41.358 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1236 (MapPartitionsRDD[5513] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:35:41.358 INFO  TaskSchedulerImpl - Adding task set 1236.0 with 1 tasks resource profile 0
16:35:41.358 INFO  TaskSetManager - Starting task 0.0 in stage 1236.0 (TID 1075) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.358 INFO  Executor - Running task 0.0 in stage 1236.0 (TID 1075)
16:35:41.360 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.360 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.361 INFO  Executor - Finished task 0.0 in stage 1236.0 (TID 1075). 1930 bytes result sent to driver
16:35:41.362 INFO  TaskSetManager - Finished task 0.0 in stage 1236.0 (TID 1075) in 4 ms on localhost (executor driver) (1/1)
16:35:41.362 INFO  TaskSchedulerImpl - Removed TaskSet 1236.0, whose tasks have all completed, from pool 
16:35:41.362 INFO  DAGScheduler - ResultStage 1236 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
16:35:41.362 INFO  DAGScheduler - Job 563 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.362 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1236: Stage finished
16:35:41.362 INFO  DAGScheduler - Job 563 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.024559 s
16:35:41.373 INFO  MemoryStore - Block broadcast_1998 stored as values in memory (estimated size 5.6 KiB, free 1908.7 MiB)
16:35:41.373 INFO  MemoryStore - Block broadcast_1998_piece0 stored as bytes in memory (estimated size 500.0 B, free 1908.7 MiB)
16:35:41.373 INFO  BlockManagerInfo - Added broadcast_1998_piece0 in memory on localhost:46629 (size: 500.0 B, free: 1918.2 MiB)
16:35:41.373 INFO  SparkContext - Created broadcast 1998 from broadcast at ReadsSparkSink.java:133
16:35:41.376 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.376 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.376 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.392 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:35:41.392 INFO  DAGScheduler - Registering RDD 5515 (mapToPair at SparkUtils.java:161) as input to shuffle 309
16:35:41.393 INFO  DAGScheduler - Got job 564 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:35:41.393 INFO  DAGScheduler - Final stage: ResultStage 1241 (runJob at SparkHadoopWriter.scala:83)
16:35:41.393 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1240)
16:35:41.393 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1240)
16:35:41.393 INFO  DAGScheduler - Submitting ShuffleMapStage 1240 (MapPartitionsRDD[5515] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.393 INFO  MemoryStore - Block broadcast_1999 stored as values in memory (estimated size 15.0 KiB, free 1908.7 MiB)
16:35:41.394 INFO  MemoryStore - Block broadcast_1999_piece0 stored as bytes in memory (estimated size 7.8 KiB, free 1908.6 MiB)
16:35:41.394 INFO  BlockManagerInfo - Added broadcast_1999_piece0 in memory on localhost:46629 (size: 7.8 KiB, free: 1918.2 MiB)
16:35:41.394 INFO  SparkContext - Created broadcast 1999 from broadcast at DAGScheduler.scala:1580
16:35:41.394 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1240 (MapPartitionsRDD[5515] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.394 INFO  TaskSchedulerImpl - Adding task set 1240.0 with 1 tasks resource profile 0
16:35:41.394 INFO  TaskSetManager - Starting task 0.0 in stage 1240.0 (TID 1076) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.395 INFO  Executor - Running task 0.0 in stage 1240.0 (TID 1076)
16:35:41.396 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.396 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.397 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.397 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.398 INFO  Executor - Finished task 0.0 in stage 1240.0 (TID 1076). 1879 bytes result sent to driver
16:35:41.399 INFO  TaskSetManager - Finished task 0.0 in stage 1240.0 (TID 1076) in 4 ms on localhost (executor driver) (1/1)
16:35:41.399 INFO  TaskSchedulerImpl - Removed TaskSet 1240.0, whose tasks have all completed, from pool 
16:35:41.399 INFO  DAGScheduler - ShuffleMapStage 1240 (mapToPair at SparkUtils.java:161) finished in 0.006 s
16:35:41.399 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.399 INFO  DAGScheduler - running: HashSet()
16:35:41.399 INFO  DAGScheduler - waiting: HashSet(ResultStage 1241)
16:35:41.399 INFO  DAGScheduler - failed: HashSet()
16:35:41.399 INFO  DAGScheduler - Submitting ResultStage 1241 (MapPartitionsRDD[5521] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:35:41.405 INFO  MemoryStore - Block broadcast_2000 stored as values in memory (estimated size 160.3 KiB, free 1908.5 MiB)
16:35:41.406 INFO  MemoryStore - Block broadcast_2000_piece0 stored as bytes in memory (estimated size 72.1 KiB, free 1908.4 MiB)
16:35:41.406 INFO  BlockManagerInfo - Added broadcast_2000_piece0 in memory on localhost:46629 (size: 72.1 KiB, free: 1918.1 MiB)
16:35:41.406 INFO  SparkContext - Created broadcast 2000 from broadcast at DAGScheduler.scala:1580
16:35:41.406 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1241 (MapPartitionsRDD[5521] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:35:41.406 INFO  TaskSchedulerImpl - Adding task set 1241.0 with 1 tasks resource profile 0
16:35:41.407 INFO  TaskSetManager - Starting task 0.0 in stage 1241.0 (TID 1077) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.407 INFO  Executor - Running task 0.0 in stage 1241.0 (TID 1077)
16:35:41.410 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.410 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.411 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.411 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.411 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.423 INFO  FileOutputCommitter - Saved output of task 'attempt_20250527163541188289393465619385_5521_m_000000_0' to file:/tmp/MarkDups18096815761496668518/MarkDups.sam.parts/_temporary/0/task_20250527163541188289393465619385_5521_m_000000
16:35:41.423 INFO  SparkHadoopMapRedUtil - attempt_20250527163541188289393465619385_5521_m_000000_0: Committed. Elapsed time: 0 ms.
16:35:41.423 INFO  Executor - Finished task 0.0 in stage 1241.0 (TID 1077). 1858 bytes result sent to driver
16:35:41.424 INFO  TaskSetManager - Finished task 0.0 in stage 1241.0 (TID 1077) in 17 ms on localhost (executor driver) (1/1)
16:35:41.424 INFO  TaskSchedulerImpl - Removed TaskSet 1241.0, whose tasks have all completed, from pool 
16:35:41.424 INFO  DAGScheduler - ResultStage 1241 (runJob at SparkHadoopWriter.scala:83) finished in 0.025 s
16:35:41.424 INFO  DAGScheduler - Job 564 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.424 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1241: Stage finished
16:35:41.424 INFO  DAGScheduler - Job 564 finished: runJob at SparkHadoopWriter.scala:83, took 0.031695 s
16:35:41.424 INFO  SparkHadoopWriter - Start to commit write Job job_20250527163541188289393465619385_5521.
16:35:41.428 INFO  SparkHadoopWriter - Write Job job_20250527163541188289393465619385_5521 committed. Elapsed time: 3 ms.
16:35:41.435 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups18096815761496668518/MarkDups.sam
16:35:41.439 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups18096815761496668518/MarkDups.sam done
16:35:41.439 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:35:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
16:35:41.445 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.445 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
16:35:41.445 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:35:41.445 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
16:35:41.445 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
16:35:41.445 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:35:41 PM UTC
16:35:41.445 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.445 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:35:41.445 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:35:41.445 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:35:41.445 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:35:41.445 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:35:41.445 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:35:41.445 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:35:41.445 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:35:41.445 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:35:41.445 INFO  MarkDuplicatesSpark - Initializing engine
16:35:41.445 INFO  MarkDuplicatesSpark - Done initializing engine
16:35:41.445 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:35:41.447 INFO  MemoryStore - Block broadcast_2001 stored as values in memory (estimated size 306.3 KiB, free 1908.1 MiB)
16:35:41.454 INFO  MemoryStore - Block broadcast_2001_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1908.1 MiB)
16:35:41.454 INFO  BlockManagerInfo - Added broadcast_2001_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.1 MiB)
16:35:41.454 INFO  SparkContext - Created broadcast 2001 from newAPIHadoopFile at PathSplitSource.java:96
16:35:41.474 INFO  MemoryStore - Block broadcast_2002 stored as values in memory (estimated size 306.3 KiB, free 1907.8 MiB)
16:35:41.480 INFO  MemoryStore - Block broadcast_2002_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1907.7 MiB)
16:35:41.480 INFO  BlockManagerInfo - Added broadcast_2002_piece0 in memory on localhost:46629 (size: 64.4 KiB, free: 1918.0 MiB)
16:35:41.480 INFO  SparkContext - Created broadcast 2002 from newAPIHadoopFile at PathSplitSource.java:96
16:35:41.513 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:35:41.513 INFO  FileInputFormat - Total input files to process : 1
16:35:41.514 INFO  DAGScheduler - Registering RDD 5535 (mapToPair at SparkUtils.java:161) as input to shuffle 310
16:35:41.514 INFO  DAGScheduler - Got job 565 (collect at SparkUtils.java:205) with 1 output partitions
16:35:41.514 INFO  DAGScheduler - Final stage: ResultStage 1243 (collect at SparkUtils.java:205)
16:35:41.514 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1242)
16:35:41.514 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1242)
16:35:41.514 INFO  DAGScheduler - Submitting ShuffleMapStage 1242 (MapPartitionsRDD[5535] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.531 INFO  MemoryStore - Block broadcast_2003 stored as values in memory (estimated size 481.4 KiB, free 1907.2 MiB)
16:35:41.533 INFO  MemoryStore - Block broadcast_2003_piece0 stored as bytes in memory (estimated size 207.4 KiB, free 1907.0 MiB)
16:35:41.533 INFO  BlockManagerInfo - Added broadcast_2003_piece0 in memory on localhost:46629 (size: 207.4 KiB, free: 1917.8 MiB)
16:35:41.533 INFO  SparkContext - Created broadcast 2003 from broadcast at DAGScheduler.scala:1580
16:35:41.534 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1242 (MapPartitionsRDD[5535] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.534 INFO  TaskSchedulerImpl - Adding task set 1242.0 with 1 tasks resource profile 0
16:35:41.534 INFO  TaskSetManager - Starting task 0.0 in stage 1242.0 (TID 1078) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9757 bytes) 
16:35:41.534 INFO  Executor - Running task 0.0 in stage 1242.0 (TID 1078)
16:35:41.564 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam:0+5416
16:35:41.569 INFO  Executor - Finished task 0.0 in stage 1242.0 (TID 1078). 1148 bytes result sent to driver
16:35:41.569 INFO  TaskSetManager - Finished task 0.0 in stage 1242.0 (TID 1078) in 35 ms on localhost (executor driver) (1/1)
16:35:41.569 INFO  TaskSchedulerImpl - Removed TaskSet 1242.0, whose tasks have all completed, from pool 
16:35:41.570 INFO  DAGScheduler - ShuffleMapStage 1242 (mapToPair at SparkUtils.java:161) finished in 0.056 s
16:35:41.570 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.570 INFO  DAGScheduler - running: HashSet()
16:35:41.570 INFO  DAGScheduler - waiting: HashSet(ResultStage 1243)
16:35:41.570 INFO  DAGScheduler - failed: HashSet()
16:35:41.570 INFO  DAGScheduler - Submitting ResultStage 1243 (MapPartitionsRDD[5538] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:35:41.570 INFO  MemoryStore - Block broadcast_2004 stored as values in memory (estimated size 7.4 KiB, free 1907.0 MiB)
16:35:41.571 INFO  MemoryStore - Block broadcast_2004_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1907.0 MiB)
16:35:41.571 INFO  BlockManagerInfo - Added broadcast_2004_piece0 in memory on localhost:46629 (size: 4.1 KiB, free: 1917.8 MiB)
16:35:41.571 INFO  SparkContext - Created broadcast 2004 from broadcast at DAGScheduler.scala:1580
16:35:41.571 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1243 (MapPartitionsRDD[5538] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:35:41.571 INFO  TaskSchedulerImpl - Adding task set 1243.0 with 1 tasks resource profile 0
16:35:41.571 INFO  TaskSetManager - Starting task 0.0 in stage 1243.0 (TID 1079) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.572 INFO  Executor - Running task 0.0 in stage 1243.0 (TID 1079)
16:35:41.572 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.572 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.574 INFO  Executor - Finished task 0.0 in stage 1243.0 (TID 1079). 2039 bytes result sent to driver
16:35:41.574 INFO  TaskSetManager - Finished task 0.0 in stage 1243.0 (TID 1079) in 3 ms on localhost (executor driver) (1/1)
16:35:41.574 INFO  TaskSchedulerImpl - Removed TaskSet 1243.0, whose tasks have all completed, from pool 
16:35:41.574 INFO  DAGScheduler - ResultStage 1243 (collect at SparkUtils.java:205) finished in 0.004 s
16:35:41.574 INFO  DAGScheduler - Job 565 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.574 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1243: Stage finished
16:35:41.574 INFO  DAGScheduler - Job 565 finished: collect at SparkUtils.java:205, took 0.061376 s
16:35:41.578 INFO  MemoryStore - Block broadcast_2005 stored as values in memory (estimated size 392.0 B, free 1907.0 MiB)
16:35:41.579 INFO  MemoryStore - Block broadcast_2005_piece0 stored as bytes in memory (estimated size 33.0 B, free 1907.0 MiB)
16:35:41.579 INFO  BlockManagerInfo - Added broadcast_2005_piece0 in memory on localhost:46629 (size: 33.0 B, free: 1917.8 MiB)
16:35:41.579 INFO  SparkContext - Created broadcast 2005 from broadcast at MarkDuplicatesSparkUtils.java:126
16:35:41.579 INFO  MemoryStore - Block broadcast_2006 stored as values in memory (estimated size 144.0 B, free 1907.0 MiB)
16:35:41.579 INFO  MemoryStore - Block broadcast_2006_piece0 stored as bytes in memory (estimated size 28.0 B, free 1907.0 MiB)
16:35:41.579 INFO  BlockManagerInfo - Added broadcast_2006_piece0 in memory on localhost:46629 (size: 28.0 B, free: 1917.8 MiB)
16:35:41.580 INFO  SparkContext - Created broadcast 2006 from broadcast at MarkDuplicatesSparkUtils.java:127
16:35:41.592 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:35:41.592 INFO  DAGScheduler - Registering RDD 5547 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 313
16:35:41.593 INFO  DAGScheduler - Registering RDD 5551 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 312
16:35:41.593 INFO  DAGScheduler - Registering RDD 5555 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 311
16:35:41.593 INFO  DAGScheduler - Got job 566 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:35:41.593 INFO  DAGScheduler - Final stage: ResultStage 1248 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:35:41.593 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1247)
16:35:41.593 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1247)
16:35:41.593 INFO  DAGScheduler - Submitting ShuffleMapStage 1245 (MapPartitionsRDD[5547] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:35:41.594 INFO  MemoryStore - Block broadcast_2007 stored as values in memory (estimated size 45.2 KiB, free 1907.0 MiB)
16:35:41.594 INFO  MemoryStore - Block broadcast_2007_piece0 stored as bytes in memory (estimated size 17.1 KiB, free 1907.0 MiB)
16:35:41.594 INFO  BlockManagerInfo - Added broadcast_2007_piece0 in memory on localhost:46629 (size: 17.1 KiB, free: 1917.8 MiB)
16:35:41.594 INFO  SparkContext - Created broadcast 2007 from broadcast at DAGScheduler.scala:1580
16:35:41.594 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1245 (MapPartitionsRDD[5547] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:35:41.594 INFO  TaskSchedulerImpl - Adding task set 1245.0 with 1 tasks resource profile 0
16:35:41.595 INFO  TaskSetManager - Starting task 0.0 in stage 1245.0 (TID 1080) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
16:35:41.595 INFO  Executor - Running task 0.0 in stage 1245.0 (TID 1080)
16:35:41.596 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.596 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.599 INFO  Executor - Finished task 0.0 in stage 1245.0 (TID 1080). 1922 bytes result sent to driver
16:35:41.599 INFO  TaskSetManager - Finished task 0.0 in stage 1245.0 (TID 1080) in 4 ms on localhost (executor driver) (1/1)
16:35:41.599 INFO  TaskSchedulerImpl - Removed TaskSet 1245.0, whose tasks have all completed, from pool 
16:35:41.599 INFO  DAGScheduler - ShuffleMapStage 1245 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
16:35:41.599 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.599 INFO  DAGScheduler - running: HashSet()
16:35:41.599 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248, ShuffleMapStage 1246, ShuffleMapStage 1247)
16:35:41.599 INFO  DAGScheduler - failed: HashSet()
16:35:41.599 INFO  DAGScheduler - Submitting ShuffleMapStage 1246 (MapPartitionsRDD[5551] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:35:41.600 INFO  MemoryStore - Block broadcast_2008 stored as values in memory (estimated size 49.3 KiB, free 1906.9 MiB)
16:35:41.601 INFO  MemoryStore - Block broadcast_2008_piece0 stored as bytes in memory (estimated size 19.3 KiB, free 1906.9 MiB)
16:35:41.601 INFO  BlockManagerInfo - Added broadcast_2008_piece0 in memory on localhost:46629 (size: 19.3 KiB, free: 1917.8 MiB)
16:35:41.601 INFO  SparkContext - Created broadcast 2008 from broadcast at DAGScheduler.scala:1580
16:35:41.601 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1246 (MapPartitionsRDD[5551] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:35:41.601 INFO  TaskSchedulerImpl - Adding task set 1246.0 with 1 tasks resource profile 0
16:35:41.601 INFO  TaskSetManager - Starting task 0.0 in stage 1246.0 (TID 1081) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
16:35:41.601 INFO  Executor - Running task 0.0 in stage 1246.0 (TID 1081)
16:35:41.603 INFO  ShuffleBlockFetcherIterator - Getting 1 (593.0 B) non-empty blocks including 1 (593.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.603 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.607 INFO  Executor - Finished task 0.0 in stage 1246.0 (TID 1081). 1922 bytes result sent to driver
16:35:41.607 INFO  TaskSetManager - Finished task 0.0 in stage 1246.0 (TID 1081) in 6 ms on localhost (executor driver) (1/1)
16:35:41.607 INFO  TaskSchedulerImpl - Removed TaskSet 1246.0, whose tasks have all completed, from pool 
16:35:41.607 INFO  DAGScheduler - ShuffleMapStage 1246 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.008 s
16:35:41.607 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.607 INFO  DAGScheduler - running: HashSet()
16:35:41.607 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248, ShuffleMapStage 1247)
16:35:41.607 INFO  DAGScheduler - failed: HashSet()
16:35:41.607 INFO  DAGScheduler - Submitting ShuffleMapStage 1247 (MapPartitionsRDD[5555] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:35:41.608 INFO  MemoryStore - Block broadcast_2009 stored as values in memory (estimated size 40.0 KiB, free 1906.8 MiB)
16:35:41.609 INFO  MemoryStore - Block broadcast_2009_piece0 stored as bytes in memory (estimated size 14.5 KiB, free 1906.8 MiB)
16:35:41.609 INFO  BlockManagerInfo - Added broadcast_2009_piece0 in memory on localhost:46629 (size: 14.5 KiB, free: 1917.8 MiB)
16:35:41.609 INFO  SparkContext - Created broadcast 2009 from broadcast at DAGScheduler.scala:1580
16:35:41.609 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1247 (MapPartitionsRDD[5555] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:35:41.609 INFO  TaskSchedulerImpl - Adding task set 1247.0 with 1 tasks resource profile 0
16:35:41.609 INFO  TaskSetManager - Starting task 0.0 in stage 1247.0 (TID 1082) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.609 INFO  Executor - Running task 0.0 in stage 1247.0 (TID 1082)
16:35:41.611 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.611 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.612 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.612 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.613 INFO  Executor - Finished task 0.0 in stage 1247.0 (TID 1082). 1922 bytes result sent to driver
16:35:41.613 INFO  TaskSetManager - Finished task 0.0 in stage 1247.0 (TID 1082) in 4 ms on localhost (executor driver) (1/1)
16:35:41.613 INFO  TaskSchedulerImpl - Removed TaskSet 1247.0, whose tasks have all completed, from pool 
16:35:41.613 INFO  DAGScheduler - ShuffleMapStage 1247 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.005 s
16:35:41.614 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.614 INFO  DAGScheduler - running: HashSet()
16:35:41.614 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248)
16:35:41.614 INFO  DAGScheduler - failed: HashSet()
16:35:41.614 INFO  DAGScheduler - Submitting ResultStage 1248 (MapPartitionsRDD[5557] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:35:41.614 INFO  MemoryStore - Block broadcast_2010 stored as values in memory (estimated size 41.2 KiB, free 1906.8 MiB)
16:35:41.615 INFO  MemoryStore - Block broadcast_2010_piece0 stored as bytes in memory (estimated size 15.3 KiB, free 1906.8 MiB)
16:35:41.615 INFO  BlockManagerInfo - Added broadcast_2010_piece0 in memory on localhost:46629 (size: 15.3 KiB, free: 1917.8 MiB)
16:35:41.615 INFO  SparkContext - Created broadcast 2010 from broadcast at DAGScheduler.scala:1580
16:35:41.615 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1248 (MapPartitionsRDD[5557] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:35:41.615 INFO  TaskSchedulerImpl - Adding task set 1248.0 with 1 tasks resource profile 0
16:35:41.616 INFO  TaskSetManager - Starting task 0.0 in stage 1248.0 (TID 1083) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.616 INFO  Executor - Running task 0.0 in stage 1248.0 (TID 1083)
16:35:41.617 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.617 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.618 INFO  Executor - Finished task 0.0 in stage 1248.0 (TID 1083). 1944 bytes result sent to driver
16:35:41.618 INFO  TaskSetManager - Finished task 0.0 in stage 1248.0 (TID 1083) in 3 ms on localhost (executor driver) (1/1)
16:35:41.618 INFO  TaskSchedulerImpl - Removed TaskSet 1248.0, whose tasks have all completed, from pool 
16:35:41.619 INFO  DAGScheduler - ResultStage 1248 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
16:35:41.619 INFO  DAGScheduler - Job 566 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.619 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1248: Stage finished
16:35:41.619 INFO  DAGScheduler - Job 566 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.026550 s
16:35:41.630 INFO  MemoryStore - Block broadcast_2011 stored as values in memory (estimated size 93.5 KiB, free 1906.7 MiB)
16:35:41.631 INFO  MemoryStore - Block broadcast_2011_piece0 stored as bytes in memory (estimated size 5.6 KiB, free 1906.7 MiB)
16:35:41.631 INFO  BlockManagerInfo - Added broadcast_2011_piece0 in memory on localhost:46629 (size: 5.6 KiB, free: 1917.7 MiB)
16:35:41.631 INFO  SparkContext - Created broadcast 2011 from broadcast at ReadsSparkSink.java:133
16:35:41.635 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.635 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.635 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.653 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:35:41.653 INFO  DAGScheduler - Registering RDD 5559 (mapToPair at SparkUtils.java:161) as input to shuffle 314
16:35:41.653 INFO  DAGScheduler - Got job 567 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:35:41.653 INFO  DAGScheduler - Final stage: ResultStage 1253 (runJob at SparkHadoopWriter.scala:83)
16:35:41.653 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1252)
16:35:41.653 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1252)
16:35:41.654 INFO  DAGScheduler - Submitting ShuffleMapStage 1252 (MapPartitionsRDD[5559] at mapToPair at SparkUtils.java:161), which has no missing parents
16:35:41.654 INFO  MemoryStore - Block broadcast_2012 stored as values in memory (estimated size 39.1 KiB, free 1906.6 MiB)
16:35:41.661 INFO  BlockManagerInfo - Removed broadcast_1996_piece0 on localhost:46629 in memory (size: 8.3 KiB, free: 1917.8 MiB)
16:35:41.661 INFO  MemoryStore - Block broadcast_2012_piece0 stored as bytes in memory (estimated size 14.1 KiB, free 1906.6 MiB)
16:35:41.661 INFO  BlockManagerInfo - Added broadcast_2012_piece0 in memory on localhost:46629 (size: 14.1 KiB, free: 1917.7 MiB)
16:35:41.662 INFO  SparkContext - Created broadcast 2012 from broadcast at DAGScheduler.scala:1580
16:35:41.662 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1252 (MapPartitionsRDD[5559] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:35:41.662 INFO  TaskSchedulerImpl - Adding task set 1252.0 with 1 tasks resource profile 0
16:35:41.662 INFO  BlockManagerInfo - Removed broadcast_1993_piece0 on localhost:46629 in memory (size: 21.0 B, free: 1917.7 MiB)
16:35:41.663 INFO  BlockManagerInfo - Removed broadcast_1964_piece0 on localhost:46629 in memory (size: 201.2 KiB, free: 1917.9 MiB)
16:35:41.663 INFO  TaskSetManager - Starting task 0.0 in stage 1252.0 (TID 1084) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
16:35:41.663 INFO  Executor - Running task 0.0 in stage 1252.0 (TID 1084)
16:35:41.663 INFO  BlockManagerInfo - Removed broadcast_1967_piece0 on localhost:46629 in memory (size: 31.0 B, free: 1917.9 MiB)
16:35:41.664 INFO  BlockManagerInfo - Removed broadcast_1976_piece0 on localhost:46629 in memory (size: 64.4 KiB, free: 1918.0 MiB)
16:35:41.664 INFO  BlockManagerInfo - Removed broadcast_1999_piece0 on localhost:46629 in memory (size: 7.8 KiB, free: 1918.0 MiB)
16:35:41.664 INFO  BlockManagerInfo - Removed broadcast_1982_piece0 on localhost:46629 in memory (size: 22.0 KiB, free: 1918.0 MiB)
16:35:41.665 INFO  BlockManagerInfo - Removed broadcast_1973_piece0 on localhost:46629 in memory (size: 8.0 KiB, free: 1918.0 MiB)
16:35:41.665 INFO  BlockManagerInfo - Removed broadcast_2000_piece0 on localhost:46629 in memory (size: 72.1 KiB, free: 1918.1 MiB)
16:35:41.665 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.665 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.666 INFO  BlockManagerInfo - Removed broadcast_1992_piece0 on localhost:46629 in memory (size: 24.0 B, free: 1918.1 MiB)
16:35:41.666 INFO  BlockManagerInfo - Removed broadcast_2010_piece0 on localhost:46629 in memory (size: 15.3 KiB, free: 1918.1 MiB)
16:35:41.667 INFO  BlockManagerInfo - Removed broadcast_2004_piece0 on localhost:46629 in memory (size: 4.1 KiB, free: 1918.1 MiB)
16:35:41.667 INFO  BlockManagerInfo - Removed broadcast_1968_piece0 on localhost:46629 in memory (size: 11.2 KiB, free: 1918.1 MiB)
16:35:41.667 INFO  BlockManagerInfo - Removed broadcast_1995_piece0 on localhost:46629 in memory (size: 13.1 KiB, free: 1918.1 MiB)
16:35:41.667 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.668 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.668 INFO  BlockManagerInfo - Removed broadcast_1978_piece0 on localhost:46629 in memory (size: 4.1 KiB, free: 1918.2 MiB)
16:35:41.668 INFO  BlockManagerInfo - Removed broadcast_1963_piece0 on localhost:46629 in memory (size: 64.3 KiB, free: 1918.2 MiB)
16:35:41.668 INFO  BlockManagerInfo - Removed broadcast_1962_piece0 on localhost:46629 in memory (size: 64.3 KiB, free: 1918.3 MiB)
16:35:41.669 INFO  BlockManagerInfo - Removed broadcast_1981_piece0 on localhost:46629 in memory (size: 19.8 KiB, free: 1918.3 MiB)
16:35:41.669 INFO  BlockManagerInfo - Removed broadcast_2001_piece0 on localhost:46629 in memory (size: 64.4 KiB, free: 1918.4 MiB)
16:35:41.669 INFO  Executor - Finished task 0.0 in stage 1252.0 (TID 1084). 1922 bytes result sent to driver
16:35:41.669 INFO  BlockManagerInfo - Removed broadcast_2003_piece0 on localhost:46629 in memory (size: 207.4 KiB, free: 1918.6 MiB)
16:35:41.669 INFO  TaskSetManager - Finished task 0.0 in stage 1252.0 (TID 1084) in 7 ms on localhost (executor driver) (1/1)
16:35:41.669 INFO  TaskSchedulerImpl - Removed TaskSet 1252.0, whose tasks have all completed, from pool 
16:35:41.669 INFO  DAGScheduler - ShuffleMapStage 1252 (mapToPair at SparkUtils.java:161) finished in 0.015 s
16:35:41.669 INFO  DAGScheduler - looking for newly runnable stages
16:35:41.669 INFO  DAGScheduler - running: HashSet()
16:35:41.669 INFO  DAGScheduler - waiting: HashSet(ResultStage 1253)
16:35:41.670 INFO  DAGScheduler - failed: HashSet()
16:35:41.670 INFO  BlockManagerInfo - Removed broadcast_1970_piece0 on localhost:46629 in memory (size: 8.5 KiB, free: 1918.6 MiB)
16:35:41.670 INFO  DAGScheduler - Submitting ResultStage 1253 (MapPartitionsRDD[5565] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:35:41.670 INFO  BlockManagerInfo - Removed broadcast_1998_piece0 on localhost:46629 in memory (size: 500.0 B, free: 1918.6 MiB)
16:35:41.670 INFO  BlockManagerInfo - Removed broadcast_1994_piece0 on localhost:46629 in memory (size: 10.9 KiB, free: 1918.6 MiB)
16:35:41.671 INFO  BlockManagerInfo - Removed broadcast_1965_piece0 on localhost:46629 in memory (size: 4.1 KiB, free: 1918.6 MiB)
16:35:41.671 INFO  BlockManagerInfo - Removed broadcast_1997_piece0 on localhost:46629 in memory (size: 9.0 KiB, free: 1918.6 MiB)
16:35:41.671 INFO  BlockManagerInfo - Removed broadcast_1990_piece0 on localhost:46629 in memory (size: 201.1 KiB, free: 1918.8 MiB)
16:35:41.672 INFO  BlockManagerInfo - Removed broadcast_1969_piece0 on localhost:46629 in memory (size: 13.2 KiB, free: 1918.8 MiB)
16:35:41.672 INFO  BlockManagerInfo - Removed broadcast_1980_piece0 on localhost:46629 in memory (size: 209.0 B, free: 1918.8 MiB)
16:35:41.672 INFO  BlockManagerInfo - Removed broadcast_1971_piece0 on localhost:46629 in memory (size: 9.1 KiB, free: 1918.8 MiB)
16:35:41.673 INFO  BlockManagerInfo - Removed broadcast_1966_piece0 on localhost:46629 in memory (size: 19.0 B, free: 1918.8 MiB)
16:35:41.673 INFO  BlockManagerInfo - Removed broadcast_2007_piece0 on localhost:46629 in memory (size: 17.1 KiB, free: 1918.8 MiB)
16:35:41.673 INFO  BlockManagerInfo - Removed broadcast_2008_piece0 on localhost:46629 in memory (size: 19.3 KiB, free: 1918.8 MiB)
16:35:41.674 INFO  BlockManagerInfo - Removed broadcast_1989_piece0 on localhost:46629 in memory (size: 64.4 KiB, free: 1918.9 MiB)
16:35:41.675 INFO  BlockManagerInfo - Removed broadcast_1988_piece0 on localhost:46629 in memory (size: 64.4 KiB, free: 1919.0 MiB)
16:35:41.675 INFO  BlockManagerInfo - Removed broadcast_1977_piece0 on localhost:46629 in memory (size: 210.1 KiB, free: 1919.2 MiB)
16:35:41.675 INFO  BlockManagerInfo - Removed broadcast_1983_piece0 on localhost:46629 in memory (size: 17.3 KiB, free: 1919.2 MiB)
16:35:41.676 INFO  BlockManagerInfo - Removed broadcast_1986_piece0 on localhost:46629 in memory (size: 16.8 KiB, free: 1919.2 MiB)
16:35:41.676 INFO  BlockManagerInfo - Removed broadcast_1985_piece0 on localhost:46629 in memory (size: 7.9 KiB, free: 1919.2 MiB)
16:35:41.677 INFO  BlockManagerInfo - Removed broadcast_1987_piece0 on localhost:46629 in memory (size: 80.9 KiB, free: 1919.3 MiB)
16:35:41.677 INFO  BlockManagerInfo - Removed broadcast_2009_piece0 on localhost:46629 in memory (size: 14.5 KiB, free: 1919.3 MiB)
16:35:41.678 INFO  BlockManagerInfo - Removed broadcast_1984_piece0 on localhost:46629 in memory (size: 18.0 KiB, free: 1919.3 MiB)
16:35:41.679 INFO  BlockManagerInfo - Removed broadcast_1975_piece0 on localhost:46629 in memory (size: 64.4 KiB, free: 1919.4 MiB)
16:35:41.679 INFO  BlockManagerInfo - Removed broadcast_1991_piece0 on localhost:46629 in memory (size: 4.1 KiB, free: 1919.4 MiB)
16:35:41.679 INFO  BlockManagerInfo - Removed broadcast_1974_piece0 on localhost:46629 in memory (size: 72.0 KiB, free: 1919.5 MiB)
16:35:41.680 INFO  BlockManagerInfo - Removed broadcast_1979_piece0 on localhost:46629 in memory (size: 540.0 B, free: 1919.5 MiB)
16:35:41.680 INFO  MemoryStore - Block broadcast_2013 stored as values in memory (estimated size 184.3 KiB, free 1913.5 MiB)
16:35:41.680 INFO  BlockManagerInfo - Removed broadcast_1972_piece0 on localhost:46629 in memory (size: 663.0 B, free: 1919.5 MiB)
16:35:41.681 INFO  MemoryStore - Block broadcast_2013_piece0 stored as bytes in memory (estimated size 78.3 KiB, free 1913.4 MiB)
16:35:41.681 INFO  BlockManagerInfo - Added broadcast_2013_piece0 in memory on localhost:46629 (size: 78.3 KiB, free: 1919.4 MiB)
16:35:41.681 INFO  SparkContext - Created broadcast 2013 from broadcast at DAGScheduler.scala:1580
16:35:41.682 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1253 (MapPartitionsRDD[5565] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:35:41.682 INFO  TaskSchedulerImpl - Adding task set 1253.0 with 1 tasks resource profile 0
16:35:41.682 INFO  TaskSetManager - Starting task 0.0 in stage 1253.0 (TID 1085) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
16:35:41.682 INFO  Executor - Running task 0.0 in stage 1253.0 (TID 1085)
16:35:41.686 INFO  ShuffleBlockFetcherIterator - Getting 1 (3.9 KiB) non-empty blocks including 1 (3.9 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:35:41.686 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:35:41.688 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:35:41.688 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:35:41.688 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:35:41.701 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271635415922555087553304145_5565_m_000000_0' to file:/tmp/MarkDups14031394904210693017/MarkDups.sam.parts/_temporary/0/task_202505271635415922555087553304145_5565_m_000000
16:35:41.701 INFO  SparkHadoopMapRedUtil - attempt_202505271635415922555087553304145_5565_m_000000_0: Committed. Elapsed time: 0 ms.
16:35:41.701 INFO  Executor - Finished task 0.0 in stage 1253.0 (TID 1085). 1858 bytes result sent to driver
16:35:41.701 INFO  TaskSetManager - Finished task 0.0 in stage 1253.0 (TID 1085) in 19 ms on localhost (executor driver) (1/1)
16:35:41.701 INFO  TaskSchedulerImpl - Removed TaskSet 1253.0, whose tasks have all completed, from pool 
16:35:41.701 INFO  DAGScheduler - ResultStage 1253 (runJob at SparkHadoopWriter.scala:83) finished in 0.031 s
16:35:41.702 INFO  DAGScheduler - Job 567 is finished. Cancelling potential speculative or zombie tasks for this job
16:35:41.702 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1253: Stage finished
16:35:41.702 INFO  DAGScheduler - Job 567 finished: runJob at SparkHadoopWriter.scala:83, took 0.048937 s
16:35:41.702 INFO  SparkHadoopWriter - Start to commit write Job job_202505271635415922555087553304145_5565.
16:35:41.707 INFO  SparkHadoopWriter - Write Job job_202505271635415922555087553304145_5565 committed. Elapsed time: 4 ms.
16:35:41.714 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups14031394904210693017/MarkDups.sam
16:35:41.717 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups14031394904210693017/MarkDups.sam done
16:35:41.718 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:35:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
16:35:47.477 ERROR Executor - Exception in task 0.0 in stage 1473.0 (TID 1232)
org.broadinstitute.hellbender.exceptions.UserException$ReadMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 is missing the read group (RG) tag, which is required by the GATK.  Please use http://gatkforums.broadinstitute.org/discussion/59/companion-utilities-replacereadgroups to fix this problem
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:64) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
16:35:47.492 ERROR TaskSetManager - Task 0 in stage 1473.0 failed 1 times; aborting job
[May 27, 2025 at 4:35:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
16:35:47.728 ERROR Executor - Exception in task 0.0 in stage 1480.0 (TID 1235)
org.broadinstitute.hellbender.exceptions.UserException$HeaderMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 contains an (RG) tag with the group NotADuplicateGroup which is not found in the file header.
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:66) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
16:35:47.730 ERROR TaskSetManager - Task 0 in stage 1480.0 failed 1 times; aborting job
[May 27, 2025 at 4:35:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:35:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664
[May 27, 2025 at 4:36:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1692401664