Class org.broadinstitute.hellbender.tools.spark.pipelines.MarkDuplicatesSparkIntegrationTest

152

tests

0

failures

0

ignored

1m3.03s

duration

100%

successful

Tests

Test Duration Result
assertThatExpectedOutputUpdateToggleIsDisabled 0s passed
testAssertAssumeUnsortedFilesAreQueryGroupedFiles 0.580s passed
testAssertCorrectSortOrderMultipleBams 0.128s passed
testAssertCorrectSortOrderMultipleBamsOverriding 0.544s passed
testBulkFragmentsNoDuplicates 1.068s passed
testBulkFragmentsWithDuplicates 2.052s passed
testDifferentChromosomesInOppositeOrder 0.307s passed
testDuplicateDetectionDataProviderWithMetrics[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.chr1.1-1K.unmarkedDups.markDuplicate.metrics) 0.342s passed
testDuplicateDetectionDataProviderWithMetrics[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.optical_dupes.markDuplicate.metrics) 0.278s passed
testDuplicateDetectionDataProviderWithMetrics[2](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.inputSingleLibrarySolexa16404.metrics) 0.274s passed
testFlowModeFlag 1.632s passed
testHashCollisionHandling 1.113s passed
testMDOrder[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug.bam) 0.326s passed
testMDOrder[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug2.bam) 0.327s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[0](true, true, true) 0.320s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[1](true, false, true) 0.313s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[2](false, true, true) 0.315s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[3](true, true, false) 0.324s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[4](true, false, false) 0.314s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[5](false, true, false) 0.317s passed
testMappedFragmentAndMappedPairFirstOfPairNonPrimary 0.310s passed
testMappedFragmentAndMatePairFirstUnmapped 0.321s passed
testMappedFragmentAndMatePairSecondUnmapped 0.318s passed
testMappedPairAndMappedFragmentAndMatePairFirstUnmapped 0.310s passed
testMappedPairAndMappedFragmentAndMatePairSecondUnmapped 0.362s passed
testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped 0.341s passed
testMappedPairAndMatePairFirstUnmapped 0.309s passed
testMappedPairAndMatePairSecondUnmapped 0.313s passed
testMappedPairWithFirstEndSamePositionAndOther 0.308s passed
testMappedPairWithSamePosition 0.308s passed
testMappedPairWithSamePositionSameCigar 0.307s passed
testMarkDuplicatesSparkDeletingDuplicateReads[0]([Ljava.io.File;@500debe6, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.407s passed
testMarkDuplicatesSparkDeletingDuplicateReads[1]([Ljava.io.File;@dee977f, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.390s passed
testMarkDuplicatesSparkDeletingDuplicateReads[2]([Ljava.io.File;@71959536, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.417s passed
testMarkDuplicatesSparkDeletingDuplicateReads[3]([Ljava.io.File;@4b1f7b65, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.344s passed
testMarkDuplicatesSparkDeletingDuplicateReads[4]([Ljava.io.File;@6e5aafa9, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.345s passed
testMarkDuplicatesSparkDeletingDuplicateReads[5]([Ljava.io.File;@49d280f2, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.278s passed
testMarkDuplicatesSparkDeletingDuplicateReads[6]([Ljava.io.File;@71e05bce, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.275s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[0]([Ljava.io.File;@79dc2d3c, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.365s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[1]([Ljava.io.File;@1e292bd2, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.413s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[2]([Ljava.io.File;@680627a3, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.392s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[3]([Ljava.io.File;@55fe1735, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.352s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[4]([Ljava.io.File;@7ae4d864, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.350s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[5]([Ljava.io.File;@9eed0b6, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.258s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[6]([Ljava.io.File;@1ce52f11, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.280s passed
testMarkDuplicatesSparkIntegrationTestLocal[0]([Ljava.io.File;@6ce646b7, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.364s passed
testMarkDuplicatesSparkIntegrationTestLocal[1]([Ljava.io.File;@25ac79a7, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.385s passed
testMarkDuplicatesSparkIntegrationTestLocal[2]([Ljava.io.File;@6d401002, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.403s passed
testMarkDuplicatesSparkIntegrationTestLocal[3]([Ljava.io.File;@105943c4, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.348s passed
testMarkDuplicatesSparkIntegrationTestLocal[4]([Ljava.io.File;@1af92ea8, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.347s passed
testMarkDuplicatesSparkIntegrationTestLocal[5]([Ljava.io.File;@24602374, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.292s passed
testMarkDuplicatesSparkIntegrationTestLocal[6]([Ljava.io.File;@3ac0895b, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.289s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[0]([Ljava.io.File;@1d0e4a4c, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.380s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[1]([Ljava.io.File;@6b7cc03b, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.377s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[2]([Ljava.io.File;@17d7b833, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.361s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[3]([Ljava.io.File;@713381e0, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.349s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[4]([Ljava.io.File;@13903467, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.340s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[5]([Ljava.io.File;@116aa12f, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.245s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[6]([Ljava.io.File;@4a7ef9ec, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.257s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[0]([Ljava.io.File;@4770b7ee, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.352s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[1]([Ljava.io.File;@4ed7d3cf, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.372s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[2]([Ljava.io.File;@2165ca9a, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.375s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[3]([Ljava.io.File;@60e2ce8f, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.332s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[4]([Ljava.io.File;@14071e95, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.329s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[5]([Ljava.io.File;@539421a0, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.261s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[6]([Ljava.io.File;@5af8680e, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.245s passed
testMatePairFirstUnmapped 0.319s passed
testMatePairSecondUnmapped 0.310s passed
testNoReadGroupInRead 0.238s passed
testNonExistantReadGroupInRead 0.227s passed
testNullOpticalDuplicates 0.336s passed
testOpticalDuplicateClusterOneEndSamePositionOneCluster 0.331s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance 0.322s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.322s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.308s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.310s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.318s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.319s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.312s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.333s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.316s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.322s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.308s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.304s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.311s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.312s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.312s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.307s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.304s passed
testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance 0.314s passed
testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate 0.308s passed
testOpticalDuplicateFinding 0.311s passed
testOpticalDuplicateFindingPxelDistance 0.313s passed
testOpticalDuplicatesAndPCRDuplicatesOrientationDifference 0.311s passed
testOpticalDuplicatesDifferentReadGroups 0.319s passed
testOpticalDuplicatesTheSameReadGroup 0.310s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.312s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.318s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.311s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.310s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.325s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.315s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.322s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.317s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.310s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.315s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.317s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.303s passed
testPathologicalOrderingAtTheSamePosition 0.322s passed
testReadSameStartPositionOrientationOverride 0.320s passed
testReadsHaveSameStartPositionButDifferentChromosomeNonEquivalence 0.313s passed
testSecondEndIsBeforeFirstInCoordinate 0.319s passed
testSingleMappedFragment 0.314s passed
testSingleMappedFragmentAndSingleMappedPair 0.314s passed
testSingleMappedFragmentAndTwoMappedPairs 0.309s passed
testSingleMappedPair 0.314s passed
testSingleUnmappedFragment 0.309s passed
testSingleUnmappedPair 0.323s passed
testStackOverFlowPairSetSwap 0.314s passed
testSupplementaryReadUnmappedMate 0.311s passed
testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs 0.308s passed
testThreeMappedPairs 0.309s passed
testThreeMappedPairsWithMatchingSecondMate 0.319s passed
testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs 0.307s passed
testTwoGroupsOnDifferentChromosomesOfTwoFragments 0.308s passed
testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs 0.317s passed
testTwoMappedFragments 0.322s passed
testTwoMappedPairWithSamePosition 0.314s passed
testTwoMappedPairWithSamePositionDifferentStrands 0.311s passed
testTwoMappedPairWithSamePositionDifferentStrands2 0.314s passed
testTwoMappedPairs 0.312s passed
testTwoMappedPairsAndMappedSecondaryFragment 0.306s passed
testTwoMappedPairsAndTerminalUnmappedFragment 0.317s passed
testTwoMappedPairsAndTerminalUnmappedPair 0.316s passed
testTwoMappedPairsMatesSoftClipped 0.306s passed
testTwoMappedPairsWithOppositeOrientations 0.307s passed
testTwoMappedPairsWithOppositeOrientationsNumberTwo 0.313s passed
testTwoMappedPairsWithSoftClipping 0.306s passed
testTwoMappedPairsWithSoftClippingBoth 0.306s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnly 0.310s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar 0.313s passed
testTwoMappedPairsWithSupplementaryReads[0](true, true, true) 0.302s passed
testTwoMappedPairsWithSupplementaryReads[1](true, false, true) 0.312s passed
testTwoMappedPairsWithSupplementaryReads[2](false, true, true) 0.307s passed
testTwoMappedPairsWithSupplementaryReads[3](true, true, false) 0.305s passed
testTwoMappedPairsWithSupplementaryReads[4](true, false, false) 0.323s passed
testTwoMappedPairsWithSupplementaryReads[5](false, true, false) 0.304s passed
testTwoUnmappedFragments 0.301s passed

Standard error

[March 28, 2025 at 10:45:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
22:45:03.532 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.532 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
22:45:03.532 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
22:45:03.532 INFO  MarkDuplicatesSpark - Executing as runner@fv-az1690-153 on Linux v6.8.0-1021-azure amd64
22:45:03.532 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
22:45:03.532 INFO  MarkDuplicatesSpark - Start Date/Time: March 28, 2025 at 10:45:03 PM UTC
22:45:03.532 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.532 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.532 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
22:45:03.532 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
22:45:03.532 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
22:45:03.532 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
22:45:03.532 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
22:45:03.532 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
22:45:03.533 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
22:45:03.533 INFO  MarkDuplicatesSpark - Requester pays: disabled
22:45:03.533 INFO  MarkDuplicatesSpark - Initializing engine
22:45:03.533 INFO  MarkDuplicatesSpark - Done initializing engine
22:45:03.533 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
22:45:03.536 INFO  MemoryStore - Block broadcast_1337 stored as values in memory (estimated size 333.6 KiB, free 1903.5 MiB)
22:45:03.546 INFO  MemoryStore - Block broadcast_1337_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1903.4 MiB)
22:45:03.547 INFO  BlockManagerInfo - Added broadcast_1337_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.0 MiB)
22:45:03.547 INFO  SparkContext - Created broadcast 1337 from newAPIHadoopFile at PathSplitSource.java:96
22:45:03.569 INFO  MemoryStore - Block broadcast_1338 stored as values in memory (estimated size 333.6 KiB, free 1903.1 MiB)
22:45:03.576 INFO  MemoryStore - Block broadcast_1338_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1903.0 MiB)
22:45:03.576 INFO  BlockManagerInfo - Added broadcast_1338_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1918.9 MiB)
22:45:03.576 INFO  SparkContext - Created broadcast 1338 from newAPIHadoopFile at PathSplitSource.java:96
22:45:03.612 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
22:45:03.613 INFO  FileInputFormat - Total input files to process : 1
22:45:03.614 INFO  DAGScheduler - Registering RDD 3396 (mapToPair at SparkUtils.java:161) as input to shuffle 97
22:45:03.614 INFO  DAGScheduler - Got job 413 (collect at SparkUtils.java:205) with 1 output partitions
22:45:03.614 INFO  DAGScheduler - Final stage: ResultStage 721 (collect at SparkUtils.java:205)
22:45:03.614 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 720)
22:45:03.614 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 720)
22:45:03.615 INFO  DAGScheduler - Submitting ShuffleMapStage 720 (MapPartitionsRDD[3396] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:03.633 INFO  MemoryStore - Block broadcast_1339 stored as values in memory (estimated size 504.8 KiB, free 1902.5 MiB)
22:45:03.636 INFO  MemoryStore - Block broadcast_1339_piece0 stored as bytes in memory (estimated size 223.7 KiB, free 1902.3 MiB)
22:45:03.636 INFO  BlockManagerInfo - Added broadcast_1339_piece0 in memory on localhost:46727 (size: 223.7 KiB, free: 1918.7 MiB)
22:45:03.637 INFO  SparkContext - Created broadcast 1339 from broadcast at DAGScheduler.scala:1580
22:45:03.637 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 720 (MapPartitionsRDD[3396] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:03.637 INFO  TaskSchedulerImpl - Adding task set 720.0 with 1 tasks resource profile 0
22:45:03.638 INFO  TaskSetManager - Starting task 0.0 in stage 720.0 (TID 650) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9735 bytes) 
22:45:03.639 INFO  Executor - Running task 0.0 in stage 720.0 (TID 650)
22:45:03.672 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam:0+1507
22:45:03.676 INFO  Executor - Finished task 0.0 in stage 720.0 (TID 650). 1148 bytes result sent to driver
22:45:03.677 INFO  TaskSetManager - Finished task 0.0 in stage 720.0 (TID 650) in 39 ms on localhost (executor driver) (1/1)
22:45:03.677 INFO  TaskSchedulerImpl - Removed TaskSet 720.0, whose tasks have all completed, from pool 
22:45:03.677 INFO  DAGScheduler - ShuffleMapStage 720 (mapToPair at SparkUtils.java:161) finished in 0.062 s
22:45:03.677 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.677 INFO  DAGScheduler - running: HashSet()
22:45:03.677 INFO  DAGScheduler - waiting: HashSet(ResultStage 721)
22:45:03.677 INFO  DAGScheduler - failed: HashSet()
22:45:03.678 INFO  DAGScheduler - Submitting ResultStage 721 (MapPartitionsRDD[3399] at mapPartitions at SparkUtils.java:188), which has no missing parents
22:45:03.678 INFO  MemoryStore - Block broadcast_1340 stored as values in memory (estimated size 7.4 KiB, free 1902.3 MiB)
22:45:03.679 INFO  MemoryStore - Block broadcast_1340_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1902.3 MiB)
22:45:03.679 INFO  BlockManagerInfo - Added broadcast_1340_piece0 in memory on localhost:46727 (size: 4.1 KiB, free: 1918.7 MiB)
22:45:03.679 INFO  SparkContext - Created broadcast 1340 from broadcast at DAGScheduler.scala:1580
22:45:03.680 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 721 (MapPartitionsRDD[3399] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
22:45:03.680 INFO  TaskSchedulerImpl - Adding task set 721.0 with 1 tasks resource profile 0
22:45:03.680 INFO  TaskSetManager - Starting task 0.0 in stage 721.0 (TID 651) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:03.681 INFO  Executor - Running task 0.0 in stage 721.0 (TID 651)
22:45:03.682 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.682 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.684 INFO  Executor - Finished task 0.0 in stage 721.0 (TID 651). 2238 bytes result sent to driver
22:45:03.684 INFO  TaskSetManager - Finished task 0.0 in stage 721.0 (TID 651) in 4 ms on localhost (executor driver) (1/1)
22:45:03.685 INFO  DAGScheduler - ResultStage 721 (collect at SparkUtils.java:205) finished in 0.007 s
22:45:03.685 INFO  TaskSchedulerImpl - Removed TaskSet 721.0, whose tasks have all completed, from pool 
22:45:03.685 INFO  DAGScheduler - Job 413 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:03.685 INFO  TaskSchedulerImpl - Killing all running tasks in stage 721: Stage finished
22:45:03.685 INFO  DAGScheduler - Job 413 finished: collect at SparkUtils.java:205, took 0.072807 s
22:45:03.691 INFO  MemoryStore - Block broadcast_1341 stored as values in memory (estimated size 1632.0 B, free 1902.3 MiB)
22:45:03.692 INFO  MemoryStore - Block broadcast_1341_piece0 stored as bytes in memory (estimated size 145.0 B, free 1902.3 MiB)
22:45:03.692 INFO  BlockManagerInfo - Added broadcast_1341_piece0 in memory on localhost:46727 (size: 145.0 B, free: 1918.7 MiB)
22:45:03.692 INFO  SparkContext - Created broadcast 1341 from broadcast at MarkDuplicatesSparkUtils.java:126
22:45:03.692 INFO  MemoryStore - Block broadcast_1342 stored as values in memory (estimated size 304.0 B, free 1902.3 MiB)
22:45:03.693 INFO  MemoryStore - Block broadcast_1342_piece0 stored as bytes in memory (estimated size 37.0 B, free 1902.3 MiB)
22:45:03.693 INFO  BlockManagerInfo - Added broadcast_1342_piece0 in memory on localhost:46727 (size: 37.0 B, free: 1918.7 MiB)
22:45:03.693 INFO  SparkContext - Created broadcast 1342 from broadcast at MarkDuplicatesSparkUtils.java:127
22:45:03.706 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
22:45:03.707 INFO  DAGScheduler - Registering RDD 3408 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 100
22:45:03.707 INFO  DAGScheduler - Registering RDD 3412 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 99
22:45:03.707 INFO  DAGScheduler - Registering RDD 3416 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 98
22:45:03.707 INFO  DAGScheduler - Got job 414 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
22:45:03.707 INFO  DAGScheduler - Final stage: ResultStage 726 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
22:45:03.707 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 725)
22:45:03.707 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 725)
22:45:03.707 INFO  DAGScheduler - Submitting ShuffleMapStage 723 (MapPartitionsRDD[3408] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
22:45:03.708 INFO  MemoryStore - Block broadcast_1343 stored as values in memory (estimated size 24.2 KiB, free 1902.3 MiB)
22:45:03.709 INFO  MemoryStore - Block broadcast_1343_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1902.3 MiB)
22:45:03.709 INFO  BlockManagerInfo - Added broadcast_1343_piece0 in memory on localhost:46727 (size: 12.6 KiB, free: 1918.7 MiB)
22:45:03.709 INFO  SparkContext - Created broadcast 1343 from broadcast at DAGScheduler.scala:1580
22:45:03.709 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 723 (MapPartitionsRDD[3408] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
22:45:03.709 INFO  TaskSchedulerImpl - Adding task set 723.0 with 1 tasks resource profile 0
22:45:03.710 INFO  TaskSetManager - Starting task 0.0 in stage 723.0 (TID 652) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
22:45:03.710 INFO  Executor - Running task 0.0 in stage 723.0 (TID 652)
22:45:03.712 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.712 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.715 INFO  Executor - Finished task 0.0 in stage 723.0 (TID 652). 1922 bytes result sent to driver
22:45:03.715 INFO  TaskSetManager - Finished task 0.0 in stage 723.0 (TID 652) in 5 ms on localhost (executor driver) (1/1)
22:45:03.715 INFO  TaskSchedulerImpl - Removed TaskSet 723.0, whose tasks have all completed, from pool 
22:45:03.715 INFO  DAGScheduler - ShuffleMapStage 723 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.007 s
22:45:03.715 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.715 INFO  DAGScheduler - running: HashSet()
22:45:03.715 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 724, ShuffleMapStage 725, ResultStage 726)
22:45:03.715 INFO  DAGScheduler - failed: HashSet()
22:45:03.716 INFO  DAGScheduler - Submitting ShuffleMapStage 724 (MapPartitionsRDD[3412] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
22:45:03.716 INFO  MemoryStore - Block broadcast_1344 stored as values in memory (estimated size 28.3 KiB, free 1902.2 MiB)
22:45:03.717 INFO  MemoryStore - Block broadcast_1344_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1902.2 MiB)
22:45:03.717 INFO  BlockManagerInfo - Added broadcast_1344_piece0 in memory on localhost:46727 (size: 14.8 KiB, free: 1918.7 MiB)
22:45:03.717 INFO  SparkContext - Created broadcast 1344 from broadcast at DAGScheduler.scala:1580
22:45:03.717 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 724 (MapPartitionsRDD[3412] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
22:45:03.717 INFO  TaskSchedulerImpl - Adding task set 724.0 with 1 tasks resource profile 0
22:45:03.718 INFO  TaskSetManager - Starting task 0.0 in stage 724.0 (TID 653) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
22:45:03.718 INFO  Executor - Running task 0.0 in stage 724.0 (TID 653)
22:45:03.720 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.720 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.724 INFO  Executor - Finished task 0.0 in stage 724.0 (TID 653). 1922 bytes result sent to driver
22:45:03.724 INFO  TaskSetManager - Finished task 0.0 in stage 724.0 (TID 653) in 6 ms on localhost (executor driver) (1/1)
22:45:03.724 INFO  TaskSchedulerImpl - Removed TaskSet 724.0, whose tasks have all completed, from pool 
22:45:03.724 INFO  DAGScheduler - ShuffleMapStage 724 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.008 s
22:45:03.724 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.724 INFO  DAGScheduler - running: HashSet()
22:45:03.724 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 725, ResultStage 726)
22:45:03.725 INFO  DAGScheduler - failed: HashSet()
22:45:03.725 INFO  DAGScheduler - Submitting ShuffleMapStage 725 (MapPartitionsRDD[3416] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
22:45:03.725 INFO  MemoryStore - Block broadcast_1345 stored as values in memory (estimated size 19.0 KiB, free 1902.2 MiB)
22:45:03.726 INFO  MemoryStore - Block broadcast_1345_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1902.2 MiB)
22:45:03.726 INFO  BlockManagerInfo - Added broadcast_1345_piece0 in memory on localhost:46727 (size: 9.7 KiB, free: 1918.7 MiB)
22:45:03.726 INFO  SparkContext - Created broadcast 1345 from broadcast at DAGScheduler.scala:1580
22:45:03.726 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 725 (MapPartitionsRDD[3416] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
22:45:03.726 INFO  TaskSchedulerImpl - Adding task set 725.0 with 1 tasks resource profile 0
22:45:03.727 INFO  TaskSetManager - Starting task 0.0 in stage 725.0 (TID 654) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:03.727 INFO  Executor - Running task 0.0 in stage 725.0 (TID 654)
22:45:03.729 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.729 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.730 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.731 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.732 INFO  Executor - Finished task 0.0 in stage 725.0 (TID 654). 1922 bytes result sent to driver
22:45:03.732 INFO  TaskSetManager - Finished task 0.0 in stage 725.0 (TID 654) in 5 ms on localhost (executor driver) (1/1)
22:45:03.732 INFO  TaskSchedulerImpl - Removed TaskSet 725.0, whose tasks have all completed, from pool 
22:45:03.732 INFO  DAGScheduler - ShuffleMapStage 725 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.007 s
22:45:03.732 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.732 INFO  DAGScheduler - running: HashSet()
22:45:03.732 INFO  DAGScheduler - waiting: HashSet(ResultStage 726)
22:45:03.732 INFO  DAGScheduler - failed: HashSet()
22:45:03.733 INFO  DAGScheduler - Submitting ResultStage 726 (MapPartitionsRDD[3418] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
22:45:03.733 INFO  MemoryStore - Block broadcast_1346 stored as values in memory (estimated size 20.2 KiB, free 1902.2 MiB)
22:45:03.734 INFO  MemoryStore - Block broadcast_1346_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1902.2 MiB)
22:45:03.734 INFO  BlockManagerInfo - Added broadcast_1346_piece0 in memory on localhost:46727 (size: 10.6 KiB, free: 1918.7 MiB)
22:45:03.734 INFO  SparkContext - Created broadcast 1346 from broadcast at DAGScheduler.scala:1580
22:45:03.734 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 726 (MapPartitionsRDD[3418] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
22:45:03.734 INFO  TaskSchedulerImpl - Adding task set 726.0 with 1 tasks resource profile 0
22:45:03.734 INFO  TaskSetManager - Starting task 0.0 in stage 726.0 (TID 655) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:03.735 INFO  Executor - Running task 0.0 in stage 726.0 (TID 655)
22:45:03.736 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.736 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.738 INFO  Executor - Finished task 0.0 in stage 726.0 (TID 655). 1944 bytes result sent to driver
22:45:03.738 INFO  TaskSetManager - Finished task 0.0 in stage 726.0 (TID 655) in 4 ms on localhost (executor driver) (1/1)
22:45:03.738 INFO  TaskSchedulerImpl - Removed TaskSet 726.0, whose tasks have all completed, from pool 
22:45:03.738 INFO  DAGScheduler - ResultStage 726 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
22:45:03.738 INFO  DAGScheduler - Job 414 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:03.739 INFO  TaskSchedulerImpl - Killing all running tasks in stage 726: Stage finished
22:45:03.739 INFO  DAGScheduler - Job 414 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.032558 s
22:45:03.750 INFO  MemoryStore - Block broadcast_1347 stored as values in memory (estimated size 20.3 KiB, free 1902.2 MiB)
22:45:03.751 INFO  MemoryStore - Block broadcast_1347_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1902.1 MiB)
22:45:03.751 INFO  BlockManagerInfo - Added broadcast_1347_piece0 in memory on localhost:46727 (size: 1850.0 B, free: 1918.7 MiB)
22:45:03.751 INFO  SparkContext - Created broadcast 1347 from broadcast at ReadsSparkSink.java:133
22:45:03.752 INFO  MemoryStore - Block broadcast_1348 stored as values in memory (estimated size 20.4 KiB, free 1902.1 MiB)
22:45:03.752 INFO  MemoryStore - Block broadcast_1348_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1902.1 MiB)
22:45:03.752 INFO  BlockManagerInfo - Added broadcast_1348_piece0 in memory on localhost:46727 (size: 1850.0 B, free: 1918.7 MiB)
22:45:03.753 INFO  SparkContext - Created broadcast 1348 from broadcast at BamSink.java:76
22:45:03.755 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:03.755 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:03.755 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:03.777 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
22:45:03.777 INFO  DAGScheduler - Registering RDD 3420 (mapToPair at SparkUtils.java:161) as input to shuffle 101
22:45:03.777 INFO  DAGScheduler - Got job 415 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
22:45:03.777 INFO  DAGScheduler - Final stage: ResultStage 731 (runJob at SparkHadoopWriter.scala:83)
22:45:03.777 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 730)
22:45:03.777 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 730)
22:45:03.778 INFO  DAGScheduler - Submitting ShuffleMapStage 730 (MapPartitionsRDD[3420] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:03.778 INFO  MemoryStore - Block broadcast_1349 stored as values in memory (estimated size 18.1 KiB, free 1902.1 MiB)
22:45:03.779 INFO  MemoryStore - Block broadcast_1349_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1902.1 MiB)
22:45:03.779 INFO  BlockManagerInfo - Added broadcast_1349_piece0 in memory on localhost:46727 (size: 9.5 KiB, free: 1918.7 MiB)
22:45:03.779 INFO  SparkContext - Created broadcast 1349 from broadcast at DAGScheduler.scala:1580
22:45:03.779 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 730 (MapPartitionsRDD[3420] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:03.780 INFO  TaskSchedulerImpl - Adding task set 730.0 with 1 tasks resource profile 0
22:45:03.780 INFO  TaskSetManager - Starting task 0.0 in stage 730.0 (TID 656) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:03.780 INFO  Executor - Running task 0.0 in stage 730.0 (TID 656)
22:45:03.782 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.782 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.785 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.785 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.786 INFO  Executor - Finished task 0.0 in stage 730.0 (TID 656). 1922 bytes result sent to driver
22:45:03.787 INFO  TaskSetManager - Finished task 0.0 in stage 730.0 (TID 656) in 7 ms on localhost (executor driver) (1/1)
22:45:03.787 INFO  TaskSchedulerImpl - Removed TaskSet 730.0, whose tasks have all completed, from pool 
22:45:03.787 INFO  DAGScheduler - ShuffleMapStage 730 (mapToPair at SparkUtils.java:161) finished in 0.009 s
22:45:03.787 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.787 INFO  DAGScheduler - running: HashSet()
22:45:03.787 INFO  DAGScheduler - waiting: HashSet(ResultStage 731)
22:45:03.787 INFO  DAGScheduler - failed: HashSet()
22:45:03.787 INFO  DAGScheduler - Submitting ResultStage 731 (MapPartitionsRDD[3425] at mapToPair at BamSink.java:91), which has no missing parents
22:45:03.794 INFO  MemoryStore - Block broadcast_1350 stored as values in memory (estimated size 178.5 KiB, free 1901.9 MiB)
22:45:03.795 INFO  MemoryStore - Block broadcast_1350_piece0 stored as bytes in memory (estimated size 80.6 KiB, free 1901.8 MiB)
22:45:03.796 INFO  BlockManagerInfo - Added broadcast_1350_piece0 in memory on localhost:46727 (size: 80.6 KiB, free: 1918.6 MiB)
22:45:03.796 INFO  SparkContext - Created broadcast 1350 from broadcast at DAGScheduler.scala:1580
22:45:03.796 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 731 (MapPartitionsRDD[3425] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
22:45:03.796 INFO  TaskSchedulerImpl - Adding task set 731.0 with 1 tasks resource profile 0
22:45:03.797 INFO  TaskSetManager - Starting task 0.0 in stage 731.0 (TID 657) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:03.797 INFO  Executor - Running task 0.0 in stage 731.0 (TID 657)
22:45:03.801 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:03.801 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:03.803 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:03.803 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:03.803 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:03.803 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:03.803 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:03.803 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:03.821 INFO  FileOutputCommitter - Saved output of task 'attempt_202503282245033315951754774271174_3425_r_000000_0' to file:/tmp/local116208583777069067979/markdups10978633683598076235.bam.parts/_temporary/0/task_202503282245033315951754774271174_3425_r_000000
22:45:03.822 INFO  SparkHadoopMapRedUtil - attempt_202503282245033315951754774271174_3425_r_000000_0: Committed. Elapsed time: 0 ms.
22:45:03.822 INFO  Executor - Finished task 0.0 in stage 731.0 (TID 657). 1858 bytes result sent to driver
22:45:03.822 INFO  TaskSetManager - Finished task 0.0 in stage 731.0 (TID 657) in 26 ms on localhost (executor driver) (1/1)
22:45:03.823 INFO  TaskSchedulerImpl - Removed TaskSet 731.0, whose tasks have all completed, from pool 
22:45:03.823 INFO  DAGScheduler - ResultStage 731 (runJob at SparkHadoopWriter.scala:83) finished in 0.035 s
22:45:03.823 INFO  DAGScheduler - Job 415 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:03.823 INFO  TaskSchedulerImpl - Killing all running tasks in stage 731: Stage finished
22:45:03.823 INFO  DAGScheduler - Job 415 finished: runJob at SparkHadoopWriter.scala:83, took 0.046386 s
22:45:03.823 INFO  SparkHadoopWriter - Start to commit write Job job_202503282245033315951754774271174_3425.
22:45:03.827 INFO  SparkHadoopWriter - Write Job job_202503282245033315951754774271174_3425 committed. Elapsed time: 4 ms.
22:45:03.838 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local116208583777069067979/markdups10978633683598076235.bam
22:45:03.841 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local116208583777069067979/markdups10978633683598076235.bam done
22:45:03.841 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local116208583777069067979/markdups10978633683598076235.bam.parts/ to /tmp/local116208583777069067979/markdups10978633683598076235.bam.sbi
22:45:03.846 INFO  IndexFileMerger - Done merging .sbi files
22:45:03.846 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local116208583777069067979/markdups10978633683598076235.bam.parts/ to /tmp/local116208583777069067979/markdups10978633683598076235.bam.bai
22:45:03.850 INFO  IndexFileMerger - Done merging .bai files
22:45:03.850 INFO  MarkDuplicatesSpark - Shutting down engine
[March 28, 2025 at 10:45:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
22:45:03.857 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.857 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
22:45:03.858 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
22:45:03.858 INFO  MarkDuplicatesSpark - Executing as runner@fv-az1690-153 on Linux v6.8.0-1021-azure amd64
22:45:03.858 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
22:45:03.858 INFO  MarkDuplicatesSpark - Start Date/Time: March 28, 2025 at 10:45:03 PM UTC
22:45:03.858 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.858 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:03.858 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
22:45:03.858 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
22:45:03.858 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
22:45:03.858 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
22:45:03.858 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
22:45:03.858 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
22:45:03.858 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
22:45:03.858 INFO  MarkDuplicatesSpark - Requester pays: disabled
22:45:03.858 INFO  MarkDuplicatesSpark - Initializing engine
22:45:03.858 INFO  MarkDuplicatesSpark - Done initializing engine
22:45:03.858 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
22:45:03.860 INFO  MemoryStore - Block broadcast_1351 stored as values in memory (estimated size 333.6 KiB, free 1901.5 MiB)
22:45:03.868 INFO  MemoryStore - Block broadcast_1351_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1901.5 MiB)
22:45:03.868 INFO  BlockManagerInfo - Added broadcast_1351_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1918.5 MiB)
22:45:03.868 INFO  SparkContext - Created broadcast 1351 from newAPIHadoopFile at PathSplitSource.java:96
22:45:03.890 INFO  MemoryStore - Block broadcast_1352 stored as values in memory (estimated size 333.6 KiB, free 1901.1 MiB)
22:45:03.897 INFO  MemoryStore - Block broadcast_1352_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1901.1 MiB)
22:45:03.897 INFO  BlockManagerInfo - Added broadcast_1352_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1918.4 MiB)
22:45:03.897 INFO  SparkContext - Created broadcast 1352 from newAPIHadoopFile at PathSplitSource.java:96
22:45:03.932 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
22:45:03.933 INFO  FileInputFormat - Total input files to process : 1
22:45:03.933 INFO  DAGScheduler - Registering RDD 3439 (mapToPair at SparkUtils.java:161) as input to shuffle 102
22:45:03.933 INFO  DAGScheduler - Got job 416 (collect at SparkUtils.java:205) with 1 output partitions
22:45:03.933 INFO  DAGScheduler - Final stage: ResultStage 733 (collect at SparkUtils.java:205)
22:45:03.933 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 732)
22:45:03.933 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 732)
22:45:03.933 INFO  DAGScheduler - Submitting ShuffleMapStage 732 (MapPartitionsRDD[3439] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:03.952 INFO  MemoryStore - Block broadcast_1353 stored as values in memory (estimated size 504.8 KiB, free 1900.6 MiB)
22:45:03.954 INFO  MemoryStore - Block broadcast_1353_piece0 stored as bytes in memory (estimated size 223.7 KiB, free 1900.3 MiB)
22:45:03.954 INFO  BlockManagerInfo - Added broadcast_1353_piece0 in memory on localhost:46727 (size: 223.7 KiB, free: 1918.2 MiB)
22:45:03.955 INFO  SparkContext - Created broadcast 1353 from broadcast at DAGScheduler.scala:1580
22:45:03.955 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 732 (MapPartitionsRDD[3439] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:03.955 INFO  TaskSchedulerImpl - Adding task set 732.0 with 1 tasks resource profile 0
22:45:03.955 INFO  TaskSetManager - Starting task 0.0 in stage 732.0 (TID 658) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9736 bytes) 
22:45:03.956 INFO  Executor - Running task 0.0 in stage 732.0 (TID 658)
22:45:03.988 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam:0+1856
22:45:03.992 INFO  Executor - Finished task 0.0 in stage 732.0 (TID 658). 1148 bytes result sent to driver
22:45:03.993 INFO  TaskSetManager - Finished task 0.0 in stage 732.0 (TID 658) in 38 ms on localhost (executor driver) (1/1)
22:45:03.993 INFO  TaskSchedulerImpl - Removed TaskSet 732.0, whose tasks have all completed, from pool 
22:45:03.993 INFO  DAGScheduler - ShuffleMapStage 732 (mapToPair at SparkUtils.java:161) finished in 0.059 s
22:45:03.993 INFO  DAGScheduler - looking for newly runnable stages
22:45:03.993 INFO  DAGScheduler - running: HashSet()
22:45:03.993 INFO  DAGScheduler - waiting: HashSet(ResultStage 733)
22:45:03.993 INFO  DAGScheduler - failed: HashSet()
22:45:03.993 INFO  DAGScheduler - Submitting ResultStage 733 (MapPartitionsRDD[3442] at mapPartitions at SparkUtils.java:188), which has no missing parents
22:45:03.994 INFO  MemoryStore - Block broadcast_1354 stored as values in memory (estimated size 7.4 KiB, free 1900.3 MiB)
22:45:04.000 INFO  BlockManagerInfo - Removed broadcast_1343_piece0 on localhost:46727 in memory (size: 12.6 KiB, free: 1918.2 MiB)
22:45:04.000 INFO  MemoryStore - Block broadcast_1354_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1900.4 MiB)
22:45:04.000 INFO  BlockManagerInfo - Added broadcast_1354_piece0 in memory on localhost:46727 (size: 4.1 KiB, free: 1918.2 MiB)
22:45:04.001 INFO  SparkContext - Created broadcast 1354 from broadcast at DAGScheduler.scala:1580
22:45:04.001 INFO  BlockManagerInfo - Removed broadcast_1339_piece0 on localhost:46727 in memory (size: 223.7 KiB, free: 1918.4 MiB)
22:45:04.001 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 733 (MapPartitionsRDD[3442] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
22:45:04.001 INFO  TaskSchedulerImpl - Adding task set 733.0 with 1 tasks resource profile 0
22:45:04.001 INFO  BlockManagerInfo - Removed broadcast_1351_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1918.5 MiB)
22:45:04.002 INFO  BlockManagerInfo - Removed broadcast_1331_piece0 on localhost:46727 in memory (size: 228.2 KiB, free: 1918.7 MiB)
22:45:04.002 INFO  TaskSetManager - Starting task 0.0 in stage 733.0 (TID 659) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:04.002 INFO  Executor - Running task 0.0 in stage 733.0 (TID 659)
22:45:04.002 INFO  BlockManagerInfo - Removed broadcast_1332_piece0 on localhost:46727 in memory (size: 228.2 KiB, free: 1919.0 MiB)
22:45:04.002 INFO  BlockManagerInfo - Removed broadcast_1329_piece0 on localhost:46727 in memory (size: 19.0 B, free: 1919.0 MiB)
22:45:04.003 INFO  BlockManagerInfo - Removed broadcast_1327_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.0 MiB)
22:45:04.004 INFO  BlockManagerInfo - Removed broadcast_1349_piece0 on localhost:46727 in memory (size: 9.5 KiB, free: 1919.0 MiB)
22:45:04.004 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.004 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.004 INFO  BlockManagerInfo - Removed broadcast_1346_piece0 on localhost:46727 in memory (size: 10.6 KiB, free: 1919.1 MiB)
22:45:04.005 INFO  BlockManagerInfo - Removed broadcast_1342_piece0 on localhost:46727 in memory (size: 37.0 B, free: 1919.1 MiB)
22:45:04.006 INFO  BlockManagerInfo - Removed broadcast_1330_piece0 on localhost:46727 in memory (size: 20.0 B, free: 1919.1 MiB)
22:45:04.006 INFO  BlockManagerInfo - Removed broadcast_1340_piece0 on localhost:46727 in memory (size: 4.1 KiB, free: 1919.1 MiB)
22:45:04.006 INFO  Executor - Finished task 0.0 in stage 733.0 (TID 659). 2538 bytes result sent to driver
22:45:04.007 INFO  TaskSetManager - Finished task 0.0 in stage 733.0 (TID 659) in 6 ms on localhost (executor driver) (1/1)
22:45:04.007 INFO  TaskSchedulerImpl - Removed TaskSet 733.0, whose tasks have all completed, from pool 
22:45:04.007 INFO  DAGScheduler - ResultStage 733 (collect at SparkUtils.java:205) finished in 0.013 s
22:45:04.007 INFO  DAGScheduler - Job 416 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:04.007 INFO  TaskSchedulerImpl - Killing all running tasks in stage 733: Stage finished
22:45:04.007 INFO  DAGScheduler - Job 416 finished: collect at SparkUtils.java:205, took 0.074869 s
22:45:04.007 INFO  BlockManagerInfo - Removed broadcast_1344_piece0 on localhost:46727 in memory (size: 14.8 KiB, free: 1919.1 MiB)
22:45:04.008 INFO  BlockManagerInfo - Removed broadcast_1345_piece0 on localhost:46727 in memory (size: 9.7 KiB, free: 1919.1 MiB)
22:45:04.008 INFO  BlockManagerInfo - Removed broadcast_1336_piece0 on localhost:46727 in memory (size: 331.1 KiB, free: 1919.4 MiB)
22:45:04.009 INFO  BlockManagerInfo - Removed broadcast_1337_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.5 MiB)
22:45:04.010 INFO  BlockManagerInfo - Removed broadcast_1347_piece0 on localhost:46727 in memory (size: 1850.0 B, free: 1919.5 MiB)
22:45:04.010 INFO  BlockManagerInfo - Removed broadcast_1350_piece0 on localhost:46727 in memory (size: 80.6 KiB, free: 1919.6 MiB)
22:45:04.011 INFO  BlockManagerInfo - Removed broadcast_1338_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.6 MiB)
22:45:04.012 INFO  BlockManagerInfo - Removed broadcast_1341_piece0 on localhost:46727 in memory (size: 145.0 B, free: 1919.6 MiB)
22:45:04.012 INFO  BlockManagerInfo - Removed broadcast_1348_piece0 on localhost:46727 in memory (size: 1850.0 B, free: 1919.6 MiB)
22:45:04.013 INFO  MemoryStore - Block broadcast_1355 stored as values in memory (estimated size 1632.0 B, free 1918.4 MiB)
22:45:04.014 INFO  MemoryStore - Block broadcast_1355_piece0 stored as bytes in memory (estimated size 145.0 B, free 1918.4 MiB)
22:45:04.014 INFO  BlockManagerInfo - Added broadcast_1355_piece0 in memory on localhost:46727 (size: 145.0 B, free: 1919.6 MiB)
22:45:04.014 INFO  SparkContext - Created broadcast 1355 from broadcast at MarkDuplicatesSparkUtils.java:126
22:45:04.014 INFO  MemoryStore - Block broadcast_1356 stored as values in memory (estimated size 304.0 B, free 1918.4 MiB)
22:45:04.015 INFO  MemoryStore - Block broadcast_1356_piece0 stored as bytes in memory (estimated size 37.0 B, free 1918.4 MiB)
22:45:04.015 INFO  BlockManagerInfo - Added broadcast_1356_piece0 in memory on localhost:46727 (size: 37.0 B, free: 1919.6 MiB)
22:45:04.015 INFO  SparkContext - Created broadcast 1356 from broadcast at MarkDuplicatesSparkUtils.java:127
22:45:04.027 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
22:45:04.028 INFO  DAGScheduler - Registering RDD 3451 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 105
22:45:04.028 INFO  DAGScheduler - Registering RDD 3455 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 104
22:45:04.028 INFO  DAGScheduler - Registering RDD 3459 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 103
22:45:04.028 INFO  DAGScheduler - Got job 417 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
22:45:04.028 INFO  DAGScheduler - Final stage: ResultStage 738 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
22:45:04.028 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 737)
22:45:04.028 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 737)
22:45:04.028 INFO  DAGScheduler - Submitting ShuffleMapStage 735 (MapPartitionsRDD[3451] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
22:45:04.029 INFO  MemoryStore - Block broadcast_1357 stored as values in memory (estimated size 24.2 KiB, free 1918.4 MiB)
22:45:04.030 INFO  MemoryStore - Block broadcast_1357_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1918.3 MiB)
22:45:04.030 INFO  BlockManagerInfo - Added broadcast_1357_piece0 in memory on localhost:46727 (size: 12.6 KiB, free: 1919.6 MiB)
22:45:04.030 INFO  SparkContext - Created broadcast 1357 from broadcast at DAGScheduler.scala:1580
22:45:04.030 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 735 (MapPartitionsRDD[3451] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
22:45:04.030 INFO  TaskSchedulerImpl - Adding task set 735.0 with 1 tasks resource profile 0
22:45:04.031 INFO  TaskSetManager - Starting task 0.0 in stage 735.0 (TID 660) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
22:45:04.031 INFO  Executor - Running task 0.0 in stage 735.0 (TID 660)
22:45:04.032 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.033 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.035 INFO  Executor - Finished task 0.0 in stage 735.0 (TID 660). 1922 bytes result sent to driver
22:45:04.036 INFO  TaskSetManager - Finished task 0.0 in stage 735.0 (TID 660) in 6 ms on localhost (executor driver) (1/1)
22:45:04.036 INFO  TaskSchedulerImpl - Removed TaskSet 735.0, whose tasks have all completed, from pool 
22:45:04.036 INFO  DAGScheduler - ShuffleMapStage 735 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.007 s
22:45:04.036 INFO  DAGScheduler - looking for newly runnable stages
22:45:04.036 INFO  DAGScheduler - running: HashSet()
22:45:04.036 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 736, ShuffleMapStage 737, ResultStage 738)
22:45:04.036 INFO  DAGScheduler - failed: HashSet()
22:45:04.036 INFO  DAGScheduler - Submitting ShuffleMapStage 736 (MapPartitionsRDD[3455] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
22:45:04.037 INFO  MemoryStore - Block broadcast_1358 stored as values in memory (estimated size 28.3 KiB, free 1918.3 MiB)
22:45:04.038 INFO  MemoryStore - Block broadcast_1358_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1918.3 MiB)
22:45:04.038 INFO  BlockManagerInfo - Added broadcast_1358_piece0 in memory on localhost:46727 (size: 14.8 KiB, free: 1919.6 MiB)
22:45:04.038 INFO  SparkContext - Created broadcast 1358 from broadcast at DAGScheduler.scala:1580
22:45:04.038 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 736 (MapPartitionsRDD[3455] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
22:45:04.038 INFO  TaskSchedulerImpl - Adding task set 736.0 with 1 tasks resource profile 0
22:45:04.038 INFO  TaskSetManager - Starting task 0.0 in stage 736.0 (TID 661) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
22:45:04.039 INFO  Executor - Running task 0.0 in stage 736.0 (TID 661)
22:45:04.040 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.040 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.044 INFO  Executor - Finished task 0.0 in stage 736.0 (TID 661). 1922 bytes result sent to driver
22:45:04.045 INFO  TaskSetManager - Finished task 0.0 in stage 736.0 (TID 661) in 7 ms on localhost (executor driver) (1/1)
22:45:04.045 INFO  TaskSchedulerImpl - Removed TaskSet 736.0, whose tasks have all completed, from pool 
22:45:04.045 INFO  DAGScheduler - ShuffleMapStage 736 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.009 s
22:45:04.045 INFO  DAGScheduler - looking for newly runnable stages
22:45:04.045 INFO  DAGScheduler - running: HashSet()
22:45:04.045 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 737, ResultStage 738)
22:45:04.045 INFO  DAGScheduler - failed: HashSet()
22:45:04.045 INFO  DAGScheduler - Submitting ShuffleMapStage 737 (MapPartitionsRDD[3459] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
22:45:04.046 INFO  MemoryStore - Block broadcast_1359 stored as values in memory (estimated size 19.0 KiB, free 1918.3 MiB)
22:45:04.046 INFO  MemoryStore - Block broadcast_1359_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1918.3 MiB)
22:45:04.046 INFO  BlockManagerInfo - Added broadcast_1359_piece0 in memory on localhost:46727 (size: 9.7 KiB, free: 1919.6 MiB)
22:45:04.047 INFO  SparkContext - Created broadcast 1359 from broadcast at DAGScheduler.scala:1580
22:45:04.047 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 737 (MapPartitionsRDD[3459] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
22:45:04.047 INFO  TaskSchedulerImpl - Adding task set 737.0 with 1 tasks resource profile 0
22:45:04.047 INFO  TaskSetManager - Starting task 0.0 in stage 737.0 (TID 662) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:04.047 INFO  Executor - Running task 0.0 in stage 737.0 (TID 662)
22:45:04.049 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.049 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.051 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.051 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.052 INFO  Executor - Finished task 0.0 in stage 737.0 (TID 662). 1879 bytes result sent to driver
22:45:04.052 INFO  TaskSetManager - Finished task 0.0 in stage 737.0 (TID 662) in 5 ms on localhost (executor driver) (1/1)
22:45:04.052 INFO  TaskSchedulerImpl - Removed TaskSet 737.0, whose tasks have all completed, from pool 
22:45:04.053 INFO  DAGScheduler - ShuffleMapStage 737 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.008 s
22:45:04.053 INFO  DAGScheduler - looking for newly runnable stages
22:45:04.053 INFO  DAGScheduler - running: HashSet()
22:45:04.053 INFO  DAGScheduler - waiting: HashSet(ResultStage 738)
22:45:04.053 INFO  DAGScheduler - failed: HashSet()
22:45:04.053 INFO  DAGScheduler - Submitting ResultStage 738 (MapPartitionsRDD[3461] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
22:45:04.054 INFO  MemoryStore - Block broadcast_1360 stored as values in memory (estimated size 20.2 KiB, free 1918.3 MiB)
22:45:04.054 INFO  MemoryStore - Block broadcast_1360_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1918.2 MiB)
22:45:04.054 INFO  BlockManagerInfo - Added broadcast_1360_piece0 in memory on localhost:46727 (size: 10.6 KiB, free: 1919.6 MiB)
22:45:04.054 INFO  SparkContext - Created broadcast 1360 from broadcast at DAGScheduler.scala:1580
22:45:04.055 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 738 (MapPartitionsRDD[3461] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
22:45:04.055 INFO  TaskSchedulerImpl - Adding task set 738.0 with 1 tasks resource profile 0
22:45:04.055 INFO  TaskSetManager - Starting task 0.0 in stage 738.0 (TID 663) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:04.055 INFO  Executor - Running task 0.0 in stage 738.0 (TID 663)
22:45:04.057 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.057 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.058 INFO  Executor - Finished task 0.0 in stage 738.0 (TID 663). 2053 bytes result sent to driver
22:45:04.059 INFO  TaskSetManager - Finished task 0.0 in stage 738.0 (TID 663) in 4 ms on localhost (executor driver) (1/1)
22:45:04.059 INFO  TaskSchedulerImpl - Removed TaskSet 738.0, whose tasks have all completed, from pool 
22:45:04.059 INFO  DAGScheduler - ResultStage 738 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
22:45:04.059 INFO  DAGScheduler - Job 417 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:04.059 INFO  TaskSchedulerImpl - Killing all running tasks in stage 738: Stage finished
22:45:04.059 INFO  DAGScheduler - Job 417 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.031993 s
22:45:04.071 INFO  MemoryStore - Block broadcast_1361 stored as values in memory (estimated size 20.3 KiB, free 1918.2 MiB)
22:45:04.071 INFO  MemoryStore - Block broadcast_1361_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1918.2 MiB)
22:45:04.071 INFO  BlockManagerInfo - Added broadcast_1361_piece0 in memory on localhost:46727 (size: 1850.0 B, free: 1919.6 MiB)
22:45:04.072 INFO  SparkContext - Created broadcast 1361 from broadcast at ReadsSparkSink.java:133
22:45:04.072 INFO  MemoryStore - Block broadcast_1362 stored as values in memory (estimated size 20.4 KiB, free 1918.2 MiB)
22:45:04.073 INFO  MemoryStore - Block broadcast_1362_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1918.2 MiB)
22:45:04.073 INFO  BlockManagerInfo - Added broadcast_1362_piece0 in memory on localhost:46727 (size: 1850.0 B, free: 1919.6 MiB)
22:45:04.073 INFO  SparkContext - Created broadcast 1362 from broadcast at BamSink.java:76
22:45:04.075 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:04.075 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:04.075 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:04.092 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
22:45:04.093 INFO  DAGScheduler - Registering RDD 3463 (mapToPair at SparkUtils.java:161) as input to shuffle 106
22:45:04.093 INFO  DAGScheduler - Got job 418 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
22:45:04.093 INFO  DAGScheduler - Final stage: ResultStage 743 (runJob at SparkHadoopWriter.scala:83)
22:45:04.093 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 742)
22:45:04.093 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 742)
22:45:04.094 INFO  DAGScheduler - Submitting ShuffleMapStage 742 (MapPartitionsRDD[3463] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:04.095 INFO  MemoryStore - Block broadcast_1363 stored as values in memory (estimated size 18.1 KiB, free 1918.2 MiB)
22:45:04.095 INFO  MemoryStore - Block broadcast_1363_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1918.2 MiB)
22:45:04.095 INFO  BlockManagerInfo - Added broadcast_1363_piece0 in memory on localhost:46727 (size: 9.5 KiB, free: 1919.6 MiB)
22:45:04.095 INFO  SparkContext - Created broadcast 1363 from broadcast at DAGScheduler.scala:1580
22:45:04.096 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 742 (MapPartitionsRDD[3463] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:04.096 INFO  TaskSchedulerImpl - Adding task set 742.0 with 1 tasks resource profile 0
22:45:04.096 INFO  TaskSetManager - Starting task 0.0 in stage 742.0 (TID 664) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:04.097 INFO  Executor - Running task 0.0 in stage 742.0 (TID 664)
22:45:04.099 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.099 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.101 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.102 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.103 INFO  Executor - Finished task 0.0 in stage 742.0 (TID 664). 1922 bytes result sent to driver
22:45:04.103 INFO  TaskSetManager - Finished task 0.0 in stage 742.0 (TID 664) in 7 ms on localhost (executor driver) (1/1)
22:45:04.103 INFO  TaskSchedulerImpl - Removed TaskSet 742.0, whose tasks have all completed, from pool 
22:45:04.104 INFO  DAGScheduler - ShuffleMapStage 742 (mapToPair at SparkUtils.java:161) finished in 0.010 s
22:45:04.104 INFO  DAGScheduler - looking for newly runnable stages
22:45:04.104 INFO  DAGScheduler - running: HashSet()
22:45:04.104 INFO  DAGScheduler - waiting: HashSet(ResultStage 743)
22:45:04.104 INFO  DAGScheduler - failed: HashSet()
22:45:04.104 INFO  DAGScheduler - Submitting ResultStage 743 (MapPartitionsRDD[3468] at mapToPair at BamSink.java:91), which has no missing parents
22:45:04.115 INFO  MemoryStore - Block broadcast_1364 stored as values in memory (estimated size 178.5 KiB, free 1918.0 MiB)
22:45:04.117 INFO  MemoryStore - Block broadcast_1364_piece0 stored as bytes in memory (estimated size 80.6 KiB, free 1917.9 MiB)
22:45:04.117 INFO  BlockManagerInfo - Added broadcast_1364_piece0 in memory on localhost:46727 (size: 80.6 KiB, free: 1919.5 MiB)
22:45:04.117 INFO  SparkContext - Created broadcast 1364 from broadcast at DAGScheduler.scala:1580
22:45:04.118 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 743 (MapPartitionsRDD[3468] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
22:45:04.118 INFO  TaskSchedulerImpl - Adding task set 743.0 with 1 tasks resource profile 0
22:45:04.118 INFO  TaskSetManager - Starting task 0.0 in stage 743.0 (TID 665) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:04.118 INFO  Executor - Running task 0.0 in stage 743.0 (TID 665)
22:45:04.125 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:04.125 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:04.128 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:04.128 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:04.128 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:04.128 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
22:45:04.129 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:04.129 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:04.148 INFO  FileOutputCommitter - Saved output of task 'attempt_202503282245041558006898313311215_3468_r_000000_0' to file:/tmp/local116208583777069067979/markdups14861317100839807663.bam.parts/_temporary/0/task_202503282245041558006898313311215_3468_r_000000
22:45:04.148 INFO  SparkHadoopMapRedUtil - attempt_202503282245041558006898313311215_3468_r_000000_0: Committed. Elapsed time: 0 ms.
22:45:04.148 INFO  Executor - Finished task 0.0 in stage 743.0 (TID 665). 1858 bytes result sent to driver
22:45:04.149 INFO  TaskSetManager - Finished task 0.0 in stage 743.0 (TID 665) in 31 ms on localhost (executor driver) (1/1)
22:45:04.149 INFO  TaskSchedulerImpl - Removed TaskSet 743.0, whose tasks have all completed, from pool 
22:45:04.149 INFO  DAGScheduler - ResultStage 743 (runJob at SparkHadoopWriter.scala:83) finished in 0.045 s
22:45:04.149 INFO  DAGScheduler - Job 418 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:04.149 INFO  TaskSchedulerImpl - Killing all running tasks in stage 743: Stage finished
22:45:04.149 INFO  DAGScheduler - Job 418 finished: runJob at SparkHadoopWriter.scala:83, took 0.056534 s
22:45:04.149 INFO  SparkHadoopWriter - Start to commit write Job job_202503282245041558006898313311215_3468.
22:45:04.154 INFO  SparkHadoopWriter - Write Job job_202503282245041558006898313311215_3468 committed. Elapsed time: 4 ms.
22:45:04.165 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local116208583777069067979/markdups14861317100839807663.bam
22:45:04.169 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local116208583777069067979/markdups14861317100839807663.bam done
22:45:04.169 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local116208583777069067979/markdups14861317100839807663.bam.parts/ to /tmp/local116208583777069067979/markdups14861317100839807663.bam.sbi
22:45:04.174 INFO  IndexFileMerger - Done merging .sbi files
22:45:04.174 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local116208583777069067979/markdups14861317100839807663.bam.parts/ to /tmp/local116208583777069067979/markdups14861317100839807663.bam.bai
22:45:04.178 INFO  IndexFileMerger - Done merging .bai files
22:45:04.178 INFO  MarkDuplicatesSpark - Shutting down engine
[March 28, 2025 at 10:45:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
22:45:30.193 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.193 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
22:45:30.193 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
22:45:30.193 INFO  MarkDuplicatesSpark - Executing as runner@fv-az1690-153 on Linux v6.8.0-1021-azure amd64
22:45:30.193 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
22:45:30.193 INFO  MarkDuplicatesSpark - Start Date/Time: March 28, 2025 at 10:45:30 PM UTC
22:45:30.193 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.193 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.193 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
22:45:30.193 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
22:45:30.193 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
22:45:30.193 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
22:45:30.193 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
22:45:30.193 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
22:45:30.193 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
22:45:30.193 INFO  MarkDuplicatesSpark - Requester pays: disabled
22:45:30.194 INFO  MarkDuplicatesSpark - Initializing engine
22:45:30.194 INFO  MarkDuplicatesSpark - Done initializing engine
22:45:30.194 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
22:45:30.196 INFO  MemoryStore - Block broadcast_1941 stored as values in memory (estimated size 333.6 KiB, free 1916.6 MiB)
22:45:30.203 INFO  MemoryStore - Block broadcast_1941_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1916.5 MiB)
22:45:30.203 INFO  BlockManagerInfo - Added broadcast_1941_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.2 MiB)
22:45:30.203 INFO  SparkContext - Created broadcast 1941 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.226 INFO  MemoryStore - Block broadcast_1942 stored as values in memory (estimated size 333.6 KiB, free 1916.2 MiB)
22:45:30.233 INFO  MemoryStore - Block broadcast_1942_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1916.1 MiB)
22:45:30.233 INFO  BlockManagerInfo - Added broadcast_1942_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.1 MiB)
22:45:30.233 INFO  SparkContext - Created broadcast 1942 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.268 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
22:45:30.268 INFO  FileInputFormat - Total input files to process : 1
22:45:30.269 INFO  DAGScheduler - Registering RDD 5360 (mapToPair at SparkUtils.java:161) as input to shuffle 297
22:45:30.269 INFO  DAGScheduler - Got job 549 (collect at SparkUtils.java:205) with 1 output partitions
22:45:30.269 INFO  DAGScheduler - Final stage: ResultStage 1205 (collect at SparkUtils.java:205)
22:45:30.269 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1204)
22:45:30.269 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1204)
22:45:30.269 INFO  DAGScheduler - Submitting ShuffleMapStage 1204 (MapPartitionsRDD[5360] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:30.294 INFO  MemoryStore - Block broadcast_1943 stored as values in memory (estimated size 538.1 KiB, free 1915.6 MiB)
22:45:30.297 INFO  MemoryStore - Block broadcast_1943_piece0 stored as bytes in memory (estimated size 231.2 KiB, free 1915.4 MiB)
22:45:30.297 INFO  BlockManagerInfo - Added broadcast_1943_piece0 in memory on localhost:46727 (size: 231.2 KiB, free: 1918.9 MiB)
22:45:30.297 INFO  SparkContext - Created broadcast 1943 from broadcast at DAGScheduler.scala:1580
22:45:30.298 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1204 (MapPartitionsRDD[5360] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:30.298 INFO  TaskSchedulerImpl - Adding task set 1204.0 with 1 tasks resource profile 0
22:45:30.298 INFO  TaskSetManager - Starting task 0.0 in stage 1204.0 (TID 1036) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9758 bytes) 
22:45:30.298 INFO  Executor - Running task 0.0 in stage 1204.0 (TID 1036)
22:45:30.343 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam:0+13412
22:45:30.350 INFO  Executor - Finished task 0.0 in stage 1204.0 (TID 1036). 1148 bytes result sent to driver
22:45:30.350 INFO  TaskSetManager - Finished task 0.0 in stage 1204.0 (TID 1036) in 52 ms on localhost (executor driver) (1/1)
22:45:30.350 INFO  TaskSchedulerImpl - Removed TaskSet 1204.0, whose tasks have all completed, from pool 
22:45:30.350 INFO  DAGScheduler - ShuffleMapStage 1204 (mapToPair at SparkUtils.java:161) finished in 0.081 s
22:45:30.350 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.351 INFO  DAGScheduler - running: HashSet()
22:45:30.351 INFO  DAGScheduler - waiting: HashSet(ResultStage 1205)
22:45:30.351 INFO  DAGScheduler - failed: HashSet()
22:45:30.351 INFO  DAGScheduler - Submitting ResultStage 1205 (MapPartitionsRDD[5363] at mapPartitions at SparkUtils.java:188), which has no missing parents
22:45:30.351 INFO  MemoryStore - Block broadcast_1944 stored as values in memory (estimated size 7.4 KiB, free 1915.3 MiB)
22:45:30.352 INFO  MemoryStore - Block broadcast_1944_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1915.3 MiB)
22:45:30.352 INFO  BlockManagerInfo - Added broadcast_1944_piece0 in memory on localhost:46727 (size: 4.1 KiB, free: 1918.9 MiB)
22:45:30.352 INFO  SparkContext - Created broadcast 1944 from broadcast at DAGScheduler.scala:1580
22:45:30.352 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1205 (MapPartitionsRDD[5363] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
22:45:30.352 INFO  TaskSchedulerImpl - Adding task set 1205.0 with 1 tasks resource profile 0
22:45:30.353 INFO  TaskSetManager - Starting task 0.0 in stage 1205.0 (TID 1037) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.353 INFO  Executor - Running task 0.0 in stage 1205.0 (TID 1037)
22:45:30.354 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.354 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.358 INFO  Executor - Finished task 0.0 in stage 1205.0 (TID 1037). 2329 bytes result sent to driver
22:45:30.358 INFO  TaskSetManager - Finished task 0.0 in stage 1205.0 (TID 1037) in 6 ms on localhost (executor driver) (1/1)
22:45:30.358 INFO  TaskSchedulerImpl - Removed TaskSet 1205.0, whose tasks have all completed, from pool 
22:45:30.358 INFO  DAGScheduler - ResultStage 1205 (collect at SparkUtils.java:205) finished in 0.007 s
22:45:30.358 INFO  DAGScheduler - Job 549 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.358 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1205: Stage finished
22:45:30.358 INFO  DAGScheduler - Job 549 finished: collect at SparkUtils.java:205, took 0.090248 s
22:45:30.364 INFO  MemoryStore - Block broadcast_1945 stored as values in memory (estimated size 7.8 KiB, free 1915.3 MiB)
22:45:30.369 INFO  MemoryStore - Block broadcast_1945_piece0 stored as bytes in memory (estimated size 540.0 B, free 1915.3 MiB)
22:45:30.369 INFO  BlockManagerInfo - Added broadcast_1945_piece0 in memory on localhost:46727 (size: 540.0 B, free: 1918.9 MiB)
22:45:30.369 INFO  BlockManagerInfo - Removed broadcast_1930_piece0 on localhost:46727 in memory (size: 222.3 KiB, free: 1919.1 MiB)
22:45:30.369 INFO  SparkContext - Created broadcast 1945 from broadcast at MarkDuplicatesSparkUtils.java:126
22:45:30.370 INFO  MemoryStore - Block broadcast_1946 stored as values in memory (estimated size 2.6 KiB, free 1916.0 MiB)
22:45:30.370 INFO  MemoryStore - Block broadcast_1946_piece0 stored as bytes in memory (estimated size 209.0 B, free 1916.1 MiB)
22:45:30.370 INFO  BlockManagerInfo - Removed broadcast_1929_piece0 on localhost:46727 in memory (size: 71.1 KiB, free: 1919.2 MiB)
22:45:30.370 INFO  BlockManagerInfo - Added broadcast_1946_piece0 in memory on localhost:46727 (size: 209.0 B, free: 1919.2 MiB)
22:45:30.370 INFO  BlockManagerInfo - Removed broadcast_1933_piece0 on localhost:46727 in memory (size: 31.0 B, free: 1919.2 MiB)
22:45:30.370 INFO  SparkContext - Created broadcast 1946 from broadcast at MarkDuplicatesSparkUtils.java:127
22:45:30.371 INFO  BlockManagerInfo - Removed broadcast_1944_piece0 on localhost:46727 in memory (size: 4.1 KiB, free: 1919.2 MiB)
22:45:30.372 INFO  BlockManagerInfo - Removed broadcast_1943_piece0 on localhost:46727 in memory (size: 231.2 KiB, free: 1919.4 MiB)
22:45:30.373 INFO  BlockManagerInfo - Removed broadcast_1931_piece0 on localhost:46727 in memory (size: 4.1 KiB, free: 1919.4 MiB)
22:45:30.374 INFO  BlockManagerInfo - Removed broadcast_1941_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.5 MiB)
22:45:30.374 INFO  BlockManagerInfo - Removed broadcast_1934_piece0 on localhost:46727 in memory (size: 11.2 KiB, free: 1919.5 MiB)
22:45:30.375 INFO  BlockManagerInfo - Removed broadcast_1938_piece0 on localhost:46727 in memory (size: 663.0 B, free: 1919.5 MiB)
22:45:30.375 INFO  BlockManagerInfo - Removed broadcast_1928_piece0 on localhost:46727 in memory (size: 71.1 KiB, free: 1919.6 MiB)
22:45:30.376 INFO  BlockManagerInfo - Removed broadcast_1935_piece0 on localhost:46727 in memory (size: 13.2 KiB, free: 1919.6 MiB)
22:45:30.376 INFO  BlockManagerInfo - Removed broadcast_1940_piece0 on localhost:46727 in memory (size: 79.2 KiB, free: 1919.7 MiB)
22:45:30.377 INFO  BlockManagerInfo - Removed broadcast_1937_piece0 on localhost:46727 in memory (size: 9.1 KiB, free: 1919.7 MiB)
22:45:30.377 INFO  BlockManagerInfo - Removed broadcast_1932_piece0 on localhost:46727 in memory (size: 19.0 B, free: 1919.7 MiB)
22:45:30.378 INFO  BlockManagerInfo - Removed broadcast_1936_piece0 on localhost:46727 in memory (size: 8.5 KiB, free: 1919.7 MiB)
22:45:30.378 INFO  BlockManagerInfo - Removed broadcast_1939_piece0 on localhost:46727 in memory (size: 8.0 KiB, free: 1919.7 MiB)
22:45:30.386 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
22:45:30.387 INFO  DAGScheduler - Registering RDD 5372 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 300
22:45:30.387 INFO  DAGScheduler - Registering RDD 5376 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 299
22:45:30.387 INFO  DAGScheduler - Registering RDD 5380 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 298
22:45:30.387 INFO  DAGScheduler - Got job 550 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
22:45:30.387 INFO  DAGScheduler - Final stage: ResultStage 1210 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
22:45:30.387 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1209)
22:45:30.387 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1209)
22:45:30.387 INFO  DAGScheduler - Submitting ShuffleMapStage 1207 (MapPartitionsRDD[5372] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
22:45:30.388 INFO  MemoryStore - Block broadcast_1947 stored as values in memory (estimated size 57.6 KiB, free 1918.4 MiB)
22:45:30.389 INFO  MemoryStore - Block broadcast_1947_piece0 stored as bytes in memory (estimated size 19.8 KiB, free 1918.3 MiB)
22:45:30.389 INFO  BlockManagerInfo - Added broadcast_1947_piece0 in memory on localhost:46727 (size: 19.8 KiB, free: 1919.7 MiB)
22:45:30.389 INFO  SparkContext - Created broadcast 1947 from broadcast at DAGScheduler.scala:1580
22:45:30.390 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1207 (MapPartitionsRDD[5372] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
22:45:30.390 INFO  TaskSchedulerImpl - Adding task set 1207.0 with 1 tasks resource profile 0
22:45:30.390 INFO  TaskSetManager - Starting task 0.0 in stage 1207.0 (TID 1038) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
22:45:30.390 INFO  Executor - Running task 0.0 in stage 1207.0 (TID 1038)
22:45:30.393 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.393 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.398 INFO  Executor - Finished task 0.0 in stage 1207.0 (TID 1038). 1922 bytes result sent to driver
22:45:30.398 INFO  TaskSetManager - Finished task 0.0 in stage 1207.0 (TID 1038) in 8 ms on localhost (executor driver) (1/1)
22:45:30.398 INFO  TaskSchedulerImpl - Removed TaskSet 1207.0, whose tasks have all completed, from pool 
22:45:30.398 INFO  DAGScheduler - ShuffleMapStage 1207 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.011 s
22:45:30.398 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.398 INFO  DAGScheduler - running: HashSet()
22:45:30.398 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1208, ShuffleMapStage 1209, ResultStage 1210)
22:45:30.398 INFO  DAGScheduler - failed: HashSet()
22:45:30.399 INFO  DAGScheduler - Submitting ShuffleMapStage 1208 (MapPartitionsRDD[5376] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
22:45:30.400 INFO  MemoryStore - Block broadcast_1948 stored as values in memory (estimated size 61.7 KiB, free 1918.3 MiB)
22:45:30.400 INFO  MemoryStore - Block broadcast_1948_piece0 stored as bytes in memory (estimated size 22.0 KiB, free 1918.2 MiB)
22:45:30.400 INFO  BlockManagerInfo - Added broadcast_1948_piece0 in memory on localhost:46727 (size: 22.0 KiB, free: 1919.6 MiB)
22:45:30.401 INFO  SparkContext - Created broadcast 1948 from broadcast at DAGScheduler.scala:1580
22:45:30.401 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1208 (MapPartitionsRDD[5376] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
22:45:30.401 INFO  TaskSchedulerImpl - Adding task set 1208.0 with 1 tasks resource profile 0
22:45:30.401 INFO  TaskSetManager - Starting task 0.0 in stage 1208.0 (TID 1039) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
22:45:30.401 INFO  Executor - Running task 0.0 in stage 1208.0 (TID 1039)
22:45:30.403 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.0 KiB) non-empty blocks including 1 (2.0 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.404 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.411 INFO  Executor - Finished task 0.0 in stage 1208.0 (TID 1039). 1922 bytes result sent to driver
22:45:30.412 INFO  TaskSetManager - Finished task 0.0 in stage 1208.0 (TID 1039) in 11 ms on localhost (executor driver) (1/1)
22:45:30.412 INFO  TaskSchedulerImpl - Removed TaskSet 1208.0, whose tasks have all completed, from pool 
22:45:30.412 INFO  DAGScheduler - ShuffleMapStage 1208 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.013 s
22:45:30.412 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.412 INFO  DAGScheduler - running: HashSet()
22:45:30.412 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1209, ResultStage 1210)
22:45:30.412 INFO  DAGScheduler - failed: HashSet()
22:45:30.412 INFO  DAGScheduler - Submitting ShuffleMapStage 1209 (MapPartitionsRDD[5380] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
22:45:30.413 INFO  MemoryStore - Block broadcast_1949 stored as values in memory (estimated size 52.4 KiB, free 1918.2 MiB)
22:45:30.414 INFO  MemoryStore - Block broadcast_1949_piece0 stored as bytes in memory (estimated size 17.3 KiB, free 1918.2 MiB)
22:45:30.414 INFO  BlockManagerInfo - Added broadcast_1949_piece0 in memory on localhost:46727 (size: 17.3 KiB, free: 1919.6 MiB)
22:45:30.414 INFO  SparkContext - Created broadcast 1949 from broadcast at DAGScheduler.scala:1580
22:45:30.414 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1209 (MapPartitionsRDD[5380] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
22:45:30.414 INFO  TaskSchedulerImpl - Adding task set 1209.0 with 1 tasks resource profile 0
22:45:30.415 INFO  TaskSetManager - Starting task 0.0 in stage 1209.0 (TID 1040) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:30.415 INFO  Executor - Running task 0.0 in stage 1209.0 (TID 1040)
22:45:30.417 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.417 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.421 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.421 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.423 INFO  Executor - Finished task 0.0 in stage 1209.0 (TID 1040). 1922 bytes result sent to driver
22:45:30.423 INFO  TaskSetManager - Finished task 0.0 in stage 1209.0 (TID 1040) in 8 ms on localhost (executor driver) (1/1)
22:45:30.423 INFO  TaskSchedulerImpl - Removed TaskSet 1209.0, whose tasks have all completed, from pool 
22:45:30.423 INFO  DAGScheduler - ShuffleMapStage 1209 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.011 s
22:45:30.423 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.423 INFO  DAGScheduler - running: HashSet()
22:45:30.423 INFO  DAGScheduler - waiting: HashSet(ResultStage 1210)
22:45:30.423 INFO  DAGScheduler - failed: HashSet()
22:45:30.423 INFO  DAGScheduler - Submitting ResultStage 1210 (MapPartitionsRDD[5382] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
22:45:30.424 INFO  MemoryStore - Block broadcast_1950 stored as values in memory (estimated size 53.6 KiB, free 1918.1 MiB)
22:45:30.425 INFO  MemoryStore - Block broadcast_1950_piece0 stored as bytes in memory (estimated size 18.0 KiB, free 1918.1 MiB)
22:45:30.425 INFO  BlockManagerInfo - Added broadcast_1950_piece0 in memory on localhost:46727 (size: 18.0 KiB, free: 1919.6 MiB)
22:45:30.425 INFO  SparkContext - Created broadcast 1950 from broadcast at DAGScheduler.scala:1580
22:45:30.425 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1210 (MapPartitionsRDD[5382] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
22:45:30.425 INFO  TaskSchedulerImpl - Adding task set 1210.0 with 1 tasks resource profile 0
22:45:30.426 INFO  TaskSetManager - Starting task 0.0 in stage 1210.0 (TID 1041) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.426 INFO  Executor - Running task 0.0 in stage 1210.0 (TID 1041)
22:45:30.428 INFO  ShuffleBlockFetcherIterator - Getting 1 (251.0 B) non-empty blocks including 1 (251.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.428 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.430 INFO  Executor - Finished task 0.0 in stage 1210.0 (TID 1041). 2371 bytes result sent to driver
22:45:30.430 INFO  TaskSetManager - Finished task 0.0 in stage 1210.0 (TID 1041) in 4 ms on localhost (executor driver) (1/1)
22:45:30.430 INFO  TaskSchedulerImpl - Removed TaskSet 1210.0, whose tasks have all completed, from pool 
22:45:30.430 INFO  DAGScheduler - ResultStage 1210 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.007 s
22:45:30.430 INFO  DAGScheduler - Job 550 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.430 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1210: Stage finished
22:45:30.430 INFO  DAGScheduler - Job 550 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.043746 s
22:45:30.448 INFO  MemoryStore - Block broadcast_1951 stored as values in memory (estimated size 179.4 KiB, free 1917.9 MiB)
22:45:30.449 INFO  MemoryStore - Block broadcast_1951_piece0 stored as bytes in memory (estimated size 7.9 KiB, free 1917.9 MiB)
22:45:30.449 INFO  BlockManagerInfo - Added broadcast_1951_piece0 in memory on localhost:46727 (size: 7.9 KiB, free: 1919.6 MiB)
22:45:30.449 INFO  SparkContext - Created broadcast 1951 from broadcast at ReadsSparkSink.java:133
22:45:30.453 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:30.453 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:30.453 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:30.475 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
22:45:30.475 INFO  DAGScheduler - Registering RDD 5384 (mapToPair at SparkUtils.java:161) as input to shuffle 301
22:45:30.475 INFO  DAGScheduler - Got job 551 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
22:45:30.476 INFO  DAGScheduler - Final stage: ResultStage 1215 (runJob at SparkHadoopWriter.scala:83)
22:45:30.476 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1214)
22:45:30.476 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1214)
22:45:30.476 INFO  DAGScheduler - Submitting ShuffleMapStage 1214 (MapPartitionsRDD[5384] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:30.477 INFO  MemoryStore - Block broadcast_1952 stored as values in memory (estimated size 51.5 KiB, free 1917.9 MiB)
22:45:30.477 INFO  MemoryStore - Block broadcast_1952_piece0 stored as bytes in memory (estimated size 16.8 KiB, free 1917.9 MiB)
22:45:30.477 INFO  BlockManagerInfo - Added broadcast_1952_piece0 in memory on localhost:46727 (size: 16.8 KiB, free: 1919.6 MiB)
22:45:30.477 INFO  SparkContext - Created broadcast 1952 from broadcast at DAGScheduler.scala:1580
22:45:30.478 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1214 (MapPartitionsRDD[5384] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:30.478 INFO  TaskSchedulerImpl - Adding task set 1214.0 with 1 tasks resource profile 0
22:45:30.478 INFO  TaskSetManager - Starting task 0.0 in stage 1214.0 (TID 1042) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:30.478 INFO  Executor - Running task 0.0 in stage 1214.0 (TID 1042)
22:45:30.480 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.480 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.483 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.483 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.484 INFO  Executor - Finished task 0.0 in stage 1214.0 (TID 1042). 1922 bytes result sent to driver
22:45:30.484 INFO  TaskSetManager - Finished task 0.0 in stage 1214.0 (TID 1042) in 6 ms on localhost (executor driver) (1/1)
22:45:30.484 INFO  TaskSchedulerImpl - Removed TaskSet 1214.0, whose tasks have all completed, from pool 
22:45:30.485 INFO  DAGScheduler - ShuffleMapStage 1214 (mapToPair at SparkUtils.java:161) finished in 0.009 s
22:45:30.485 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.485 INFO  DAGScheduler - running: HashSet()
22:45:30.485 INFO  DAGScheduler - waiting: HashSet(ResultStage 1215)
22:45:30.485 INFO  DAGScheduler - failed: HashSet()
22:45:30.485 INFO  DAGScheduler - Submitting ResultStage 1215 (MapPartitionsRDD[5390] at saveAsTextFile at SamSink.java:65), which has no missing parents
22:45:30.492 INFO  MemoryStore - Block broadcast_1953 stored as values in memory (estimated size 211.5 KiB, free 1917.7 MiB)
22:45:30.493 INFO  MemoryStore - Block broadcast_1953_piece0 stored as bytes in memory (estimated size 87.9 KiB, free 1917.6 MiB)
22:45:30.493 INFO  BlockManagerInfo - Added broadcast_1953_piece0 in memory on localhost:46727 (size: 87.9 KiB, free: 1919.5 MiB)
22:45:30.494 INFO  SparkContext - Created broadcast 1953 from broadcast at DAGScheduler.scala:1580
22:45:30.494 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1215 (MapPartitionsRDD[5390] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
22:45:30.494 INFO  TaskSchedulerImpl - Adding task set 1215.0 with 1 tasks resource profile 0
22:45:30.494 INFO  TaskSetManager - Starting task 0.0 in stage 1215.0 (TID 1043) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.494 INFO  Executor - Running task 0.0 in stage 1215.0 (TID 1043)
22:45:30.498 INFO  ShuffleBlockFetcherIterator - Getting 1 (14.8 KiB) non-empty blocks including 1 (14.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.498 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.501 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:30.501 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:30.501 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:30.513 INFO  FileOutputCommitter - Saved output of task 'attempt_2025032822453024266865725139105_5390_m_000000_0' to file:/tmp/MarkDups10268209968030767790/MarkDups.sam.parts/_temporary/0/task_2025032822453024266865725139105_5390_m_000000
22:45:30.513 INFO  SparkHadoopMapRedUtil - attempt_2025032822453024266865725139105_5390_m_000000_0: Committed. Elapsed time: 0 ms.
22:45:30.513 INFO  Executor - Finished task 0.0 in stage 1215.0 (TID 1043). 1858 bytes result sent to driver
22:45:30.513 INFO  TaskSetManager - Finished task 0.0 in stage 1215.0 (TID 1043) in 19 ms on localhost (executor driver) (1/1)
22:45:30.513 INFO  TaskSchedulerImpl - Removed TaskSet 1215.0, whose tasks have all completed, from pool 
22:45:30.513 INFO  DAGScheduler - ResultStage 1215 (runJob at SparkHadoopWriter.scala:83) finished in 0.028 s
22:45:30.514 INFO  DAGScheduler - Job 551 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.514 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1215: Stage finished
22:45:30.514 INFO  DAGScheduler - Job 551 finished: runJob at SparkHadoopWriter.scala:83, took 0.038625 s
22:45:30.514 INFO  SparkHadoopWriter - Start to commit write Job job_2025032822453024266865725139105_5390.
22:45:30.518 INFO  SparkHadoopWriter - Write Job job_2025032822453024266865725139105_5390 committed. Elapsed time: 4 ms.
22:45:30.526 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups10268209968030767790/MarkDups.sam
22:45:30.530 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups10268209968030767790/MarkDups.sam done
22:45:30.530 INFO  MarkDuplicatesSpark - Shutting down engine
[March 28, 2025 at 10:45:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1048576000
22:45:30.537 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.537 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
22:45:30.537 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
22:45:30.537 INFO  MarkDuplicatesSpark - Executing as runner@fv-az1690-153 on Linux v6.8.0-1021-azure amd64
22:45:30.537 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
22:45:30.537 INFO  MarkDuplicatesSpark - Start Date/Time: March 28, 2025 at 10:45:30 PM UTC
22:45:30.537 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.537 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.537 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
22:45:30.537 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
22:45:30.537 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
22:45:30.537 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
22:45:30.537 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
22:45:30.537 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
22:45:30.537 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
22:45:30.537 INFO  MarkDuplicatesSpark - Requester pays: disabled
22:45:30.537 INFO  MarkDuplicatesSpark - Initializing engine
22:45:30.537 INFO  MarkDuplicatesSpark - Done initializing engine
22:45:30.537 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
22:45:30.540 INFO  MemoryStore - Block broadcast_1954 stored as values in memory (estimated size 333.6 KiB, free 1917.2 MiB)
22:45:30.552 INFO  MemoryStore - Block broadcast_1954_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1917.2 MiB)
22:45:30.552 INFO  BlockManagerInfo - Added broadcast_1954_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.4 MiB)
22:45:30.552 INFO  SparkContext - Created broadcast 1954 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.579 INFO  MemoryStore - Block broadcast_1955 stored as values in memory (estimated size 333.6 KiB, free 1916.8 MiB)
22:45:30.586 INFO  MemoryStore - Block broadcast_1955_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1916.8 MiB)
22:45:30.586 INFO  BlockManagerInfo - Added broadcast_1955_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.4 MiB)
22:45:30.586 INFO  SparkContext - Created broadcast 1955 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.621 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
22:45:30.621 INFO  FileInputFormat - Total input files to process : 1
22:45:30.622 INFO  DAGScheduler - Registering RDD 5404 (mapToPair at SparkUtils.java:161) as input to shuffle 302
22:45:30.622 INFO  DAGScheduler - Got job 552 (collect at SparkUtils.java:205) with 1 output partitions
22:45:30.622 INFO  DAGScheduler - Final stage: ResultStage 1217 (collect at SparkUtils.java:205)
22:45:30.622 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1216)
22:45:30.622 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1216)
22:45:30.622 INFO  DAGScheduler - Submitting ShuffleMapStage 1216 (MapPartitionsRDD[5404] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:30.642 INFO  MemoryStore - Block broadcast_1956 stored as values in memory (estimated size 501.7 KiB, free 1916.3 MiB)
22:45:30.645 INFO  MemoryStore - Block broadcast_1956_piece0 stored as bytes in memory (estimated size 222.2 KiB, free 1916.1 MiB)
22:45:30.645 INFO  BlockManagerInfo - Added broadcast_1956_piece0 in memory on localhost:46727 (size: 222.2 KiB, free: 1919.1 MiB)
22:45:30.645 INFO  SparkContext - Created broadcast 1956 from broadcast at DAGScheduler.scala:1580
22:45:30.645 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1216 (MapPartitionsRDD[5404] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:30.645 INFO  TaskSchedulerImpl - Adding task set 1216.0 with 1 tasks resource profile 0
22:45:30.646 INFO  TaskSetManager - Starting task 0.0 in stage 1216.0 (TID 1044) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9738 bytes) 
22:45:30.646 INFO  Executor - Running task 0.0 in stage 1216.0 (TID 1044)
22:45:30.677 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam:0+678
22:45:30.681 INFO  Executor - Finished task 0.0 in stage 1216.0 (TID 1044). 1148 bytes result sent to driver
22:45:30.681 INFO  TaskSetManager - Finished task 0.0 in stage 1216.0 (TID 1044) in 36 ms on localhost (executor driver) (1/1)
22:45:30.681 INFO  TaskSchedulerImpl - Removed TaskSet 1216.0, whose tasks have all completed, from pool 
22:45:30.681 INFO  DAGScheduler - ShuffleMapStage 1216 (mapToPair at SparkUtils.java:161) finished in 0.059 s
22:45:30.681 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.681 INFO  DAGScheduler - running: HashSet()
22:45:30.681 INFO  DAGScheduler - waiting: HashSet(ResultStage 1217)
22:45:30.681 INFO  DAGScheduler - failed: HashSet()
22:45:30.682 INFO  DAGScheduler - Submitting ResultStage 1217 (MapPartitionsRDD[5407] at mapPartitions at SparkUtils.java:188), which has no missing parents
22:45:30.682 INFO  MemoryStore - Block broadcast_1957 stored as values in memory (estimated size 7.4 KiB, free 1916.1 MiB)
22:45:30.682 INFO  MemoryStore - Block broadcast_1957_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1916.1 MiB)
22:45:30.682 INFO  BlockManagerInfo - Added broadcast_1957_piece0 in memory on localhost:46727 (size: 4.1 KiB, free: 1919.1 MiB)
22:45:30.683 INFO  SparkContext - Created broadcast 1957 from broadcast at DAGScheduler.scala:1580
22:45:30.683 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1217 (MapPartitionsRDD[5407] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
22:45:30.683 INFO  TaskSchedulerImpl - Adding task set 1217.0 with 1 tasks resource profile 0
22:45:30.683 INFO  TaskSetManager - Starting task 0.0 in stage 1217.0 (TID 1045) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.683 INFO  Executor - Running task 0.0 in stage 1217.0 (TID 1045)
22:45:30.684 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.684 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.685 INFO  Executor - Finished task 0.0 in stage 1217.0 (TID 1045). 2269 bytes result sent to driver
22:45:30.685 INFO  TaskSetManager - Finished task 0.0 in stage 1217.0 (TID 1045) in 2 ms on localhost (executor driver) (1/1)
22:45:30.685 INFO  TaskSchedulerImpl - Removed TaskSet 1217.0, whose tasks have all completed, from pool 
22:45:30.685 INFO  DAGScheduler - ResultStage 1217 (collect at SparkUtils.java:205) finished in 0.003 s
22:45:30.686 INFO  DAGScheduler - Job 552 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.686 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1217: Stage finished
22:45:30.686 INFO  DAGScheduler - Job 552 finished: collect at SparkUtils.java:205, took 0.064719 s
22:45:30.690 INFO  MemoryStore - Block broadcast_1958 stored as values in memory (estimated size 136.0 B, free 1916.1 MiB)
22:45:30.690 INFO  MemoryStore - Block broadcast_1958_piece0 stored as bytes in memory (estimated size 24.0 B, free 1916.1 MiB)
22:45:30.690 INFO  BlockManagerInfo - Added broadcast_1958_piece0 in memory on localhost:46727 (size: 24.0 B, free: 1919.1 MiB)
22:45:30.690 INFO  SparkContext - Created broadcast 1958 from broadcast at MarkDuplicatesSparkUtils.java:126
22:45:30.690 INFO  MemoryStore - Block broadcast_1959 stored as values in memory (estimated size 136.0 B, free 1916.1 MiB)
22:45:30.691 INFO  MemoryStore - Block broadcast_1959_piece0 stored as bytes in memory (estimated size 21.0 B, free 1916.1 MiB)
22:45:30.691 INFO  BlockManagerInfo - Added broadcast_1959_piece0 in memory on localhost:46727 (size: 21.0 B, free: 1919.1 MiB)
22:45:30.691 INFO  SparkContext - Created broadcast 1959 from broadcast at MarkDuplicatesSparkUtils.java:127
22:45:30.702 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
22:45:30.702 INFO  DAGScheduler - Registering RDD 5416 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 305
22:45:30.703 INFO  DAGScheduler - Registering RDD 5420 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 304
22:45:30.703 INFO  DAGScheduler - Registering RDD 5424 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 303
22:45:30.703 INFO  DAGScheduler - Got job 553 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
22:45:30.703 INFO  DAGScheduler - Final stage: ResultStage 1222 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
22:45:30.703 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1221)
22:45:30.703 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1221)
22:45:30.703 INFO  DAGScheduler - Submitting ShuffleMapStage 1219 (MapPartitionsRDD[5416] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
22:45:30.703 INFO  MemoryStore - Block broadcast_1960 stored as values in memory (estimated size 21.1 KiB, free 1916.0 MiB)
22:45:30.707 INFO  BlockManagerInfo - Removed broadcast_1945_piece0 on localhost:46727 in memory (size: 540.0 B, free: 1919.1 MiB)
22:45:30.707 INFO  MemoryStore - Block broadcast_1960_piece0 stored as bytes in memory (estimated size 10.9 KiB, free 1916.0 MiB)
22:45:30.707 INFO  BlockManagerInfo - Added broadcast_1960_piece0 in memory on localhost:46727 (size: 10.9 KiB, free: 1919.1 MiB)
22:45:30.707 INFO  SparkContext - Created broadcast 1960 from broadcast at DAGScheduler.scala:1580
22:45:30.707 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1219 (MapPartitionsRDD[5416] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
22:45:30.707 INFO  BlockManagerInfo - Removed broadcast_1946_piece0 on localhost:46727 in memory (size: 209.0 B, free: 1919.1 MiB)
22:45:30.707 INFO  TaskSchedulerImpl - Adding task set 1219.0 with 1 tasks resource profile 0
22:45:30.708 INFO  BlockManagerInfo - Removed broadcast_1954_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.2 MiB)
22:45:30.708 INFO  TaskSetManager - Starting task 0.0 in stage 1219.0 (TID 1046) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
22:45:30.708 INFO  BlockManagerInfo - Removed broadcast_1956_piece0 on localhost:46727 in memory (size: 222.2 KiB, free: 1919.4 MiB)
22:45:30.708 INFO  Executor - Running task 0.0 in stage 1219.0 (TID 1046)
22:45:30.709 INFO  BlockManagerInfo - Removed broadcast_1953_piece0 on localhost:46727 in memory (size: 87.9 KiB, free: 1919.5 MiB)
22:45:30.709 INFO  BlockManagerInfo - Removed broadcast_1952_piece0 on localhost:46727 in memory (size: 16.8 KiB, free: 1919.5 MiB)
22:45:30.710 INFO  BlockManagerInfo - Removed broadcast_1942_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.6 MiB)
22:45:30.710 INFO  BlockManagerInfo - Removed broadcast_1949_piece0 on localhost:46727 in memory (size: 17.3 KiB, free: 1919.6 MiB)
22:45:30.710 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.710 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.711 INFO  BlockManagerInfo - Removed broadcast_1947_piece0 on localhost:46727 in memory (size: 19.8 KiB, free: 1919.6 MiB)
22:45:30.711 INFO  BlockManagerInfo - Removed broadcast_1948_piece0 on localhost:46727 in memory (size: 22.0 KiB, free: 1919.6 MiB)
22:45:30.711 INFO  BlockManagerInfo - Removed broadcast_1951_piece0 on localhost:46727 in memory (size: 7.9 KiB, free: 1919.7 MiB)
22:45:30.712 INFO  BlockManagerInfo - Removed broadcast_1957_piece0 on localhost:46727 in memory (size: 4.1 KiB, free: 1919.7 MiB)
22:45:30.712 INFO  BlockManagerInfo - Removed broadcast_1950_piece0 on localhost:46727 in memory (size: 18.0 KiB, free: 1919.7 MiB)
22:45:30.713 INFO  Executor - Finished task 0.0 in stage 1219.0 (TID 1046). 1922 bytes result sent to driver
22:45:30.713 INFO  TaskSetManager - Finished task 0.0 in stage 1219.0 (TID 1046) in 5 ms on localhost (executor driver) (1/1)
22:45:30.713 INFO  TaskSchedulerImpl - Removed TaskSet 1219.0, whose tasks have all completed, from pool 
22:45:30.713 INFO  DAGScheduler - ShuffleMapStage 1219 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.010 s
22:45:30.713 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.713 INFO  DAGScheduler - running: HashSet()
22:45:30.713 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1220, ShuffleMapStage 1221, ResultStage 1222)
22:45:30.713 INFO  DAGScheduler - failed: HashSet()
22:45:30.714 INFO  DAGScheduler - Submitting ShuffleMapStage 1220 (MapPartitionsRDD[5420] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
22:45:30.714 INFO  MemoryStore - Block broadcast_1961 stored as values in memory (estimated size 25.2 KiB, free 1918.4 MiB)
22:45:30.715 INFO  MemoryStore - Block broadcast_1961_piece0 stored as bytes in memory (estimated size 13.1 KiB, free 1918.3 MiB)
22:45:30.715 INFO  BlockManagerInfo - Added broadcast_1961_piece0 in memory on localhost:46727 (size: 13.1 KiB, free: 1919.7 MiB)
22:45:30.715 INFO  SparkContext - Created broadcast 1961 from broadcast at DAGScheduler.scala:1580
22:45:30.715 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1220 (MapPartitionsRDD[5420] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
22:45:30.715 INFO  TaskSchedulerImpl - Adding task set 1220.0 with 1 tasks resource profile 0
22:45:30.715 INFO  TaskSetManager - Starting task 0.0 in stage 1220.0 (TID 1047) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
22:45:30.715 INFO  Executor - Running task 0.0 in stage 1220.0 (TID 1047)
22:45:30.716 INFO  ShuffleBlockFetcherIterator - Getting 1 (106.0 B) non-empty blocks including 1 (106.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.716 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.720 INFO  Executor - Finished task 0.0 in stage 1220.0 (TID 1047). 1879 bytes result sent to driver
22:45:30.720 INFO  TaskSetManager - Finished task 0.0 in stage 1220.0 (TID 1047) in 5 ms on localhost (executor driver) (1/1)
22:45:30.720 INFO  TaskSchedulerImpl - Removed TaskSet 1220.0, whose tasks have all completed, from pool 
22:45:30.720 INFO  DAGScheduler - ShuffleMapStage 1220 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.006 s
22:45:30.720 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.720 INFO  DAGScheduler - running: HashSet()
22:45:30.720 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1221, ResultStage 1222)
22:45:30.720 INFO  DAGScheduler - failed: HashSet()
22:45:30.720 INFO  DAGScheduler - Submitting ShuffleMapStage 1221 (MapPartitionsRDD[5424] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
22:45:30.721 INFO  MemoryStore - Block broadcast_1962 stored as values in memory (estimated size 15.9 KiB, free 1918.3 MiB)
22:45:30.721 INFO  MemoryStore - Block broadcast_1962_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1918.3 MiB)
22:45:30.721 INFO  BlockManagerInfo - Added broadcast_1962_piece0 in memory on localhost:46727 (size: 8.3 KiB, free: 1919.7 MiB)
22:45:30.721 INFO  SparkContext - Created broadcast 1962 from broadcast at DAGScheduler.scala:1580
22:45:30.721 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1221 (MapPartitionsRDD[5424] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
22:45:30.721 INFO  TaskSchedulerImpl - Adding task set 1221.0 with 1 tasks resource profile 0
22:45:30.721 INFO  TaskSetManager - Starting task 0.0 in stage 1221.0 (TID 1048) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:30.722 INFO  Executor - Running task 0.0 in stage 1221.0 (TID 1048)
22:45:30.723 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.723 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.724 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.724 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.724 INFO  Executor - Finished task 0.0 in stage 1221.0 (TID 1048). 1879 bytes result sent to driver
22:45:30.725 INFO  TaskSetManager - Finished task 0.0 in stage 1221.0 (TID 1048) in 4 ms on localhost (executor driver) (1/1)
22:45:30.725 INFO  TaskSchedulerImpl - Removed TaskSet 1221.0, whose tasks have all completed, from pool 
22:45:30.725 INFO  DAGScheduler - ShuffleMapStage 1221 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.005 s
22:45:30.725 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.725 INFO  DAGScheduler - running: HashSet()
22:45:30.725 INFO  DAGScheduler - waiting: HashSet(ResultStage 1222)
22:45:30.725 INFO  DAGScheduler - failed: HashSet()
22:45:30.725 INFO  DAGScheduler - Submitting ResultStage 1222 (MapPartitionsRDD[5426] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
22:45:30.725 INFO  MemoryStore - Block broadcast_1963 stored as values in memory (estimated size 17.1 KiB, free 1918.3 MiB)
22:45:30.726 INFO  MemoryStore - Block broadcast_1963_piece0 stored as bytes in memory (estimated size 9.0 KiB, free 1918.3 MiB)
22:45:30.726 INFO  BlockManagerInfo - Added broadcast_1963_piece0 in memory on localhost:46727 (size: 9.0 KiB, free: 1919.6 MiB)
22:45:30.726 INFO  SparkContext - Created broadcast 1963 from broadcast at DAGScheduler.scala:1580
22:45:30.726 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1222 (MapPartitionsRDD[5426] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
22:45:30.726 INFO  TaskSchedulerImpl - Adding task set 1222.0 with 1 tasks resource profile 0
22:45:30.726 INFO  TaskSetManager - Starting task 0.0 in stage 1222.0 (TID 1049) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.726 INFO  Executor - Running task 0.0 in stage 1222.0 (TID 1049)
22:45:30.727 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.727 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.728 INFO  Executor - Finished task 0.0 in stage 1222.0 (TID 1049). 1887 bytes result sent to driver
22:45:30.728 INFO  TaskSetManager - Finished task 0.0 in stage 1222.0 (TID 1049) in 2 ms on localhost (executor driver) (1/1)
22:45:30.729 INFO  TaskSchedulerImpl - Removed TaskSet 1222.0, whose tasks have all completed, from pool 
22:45:30.729 INFO  DAGScheduler - ResultStage 1222 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
22:45:30.729 INFO  DAGScheduler - Job 553 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.729 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1222: Stage finished
22:45:30.729 INFO  DAGScheduler - Job 553 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.026610 s
22:45:30.740 INFO  MemoryStore - Block broadcast_1964 stored as values in memory (estimated size 5.6 KiB, free 1918.3 MiB)
22:45:30.740 INFO  MemoryStore - Block broadcast_1964_piece0 stored as bytes in memory (estimated size 500.0 B, free 1918.3 MiB)
22:45:30.740 INFO  BlockManagerInfo - Added broadcast_1964_piece0 in memory on localhost:46727 (size: 500.0 B, free: 1919.6 MiB)
22:45:30.740 INFO  SparkContext - Created broadcast 1964 from broadcast at ReadsSparkSink.java:133
22:45:30.743 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:30.743 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:30.743 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:30.760 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
22:45:30.760 INFO  DAGScheduler - Registering RDD 5428 (mapToPair at SparkUtils.java:161) as input to shuffle 306
22:45:30.761 INFO  DAGScheduler - Got job 554 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
22:45:30.761 INFO  DAGScheduler - Final stage: ResultStage 1227 (runJob at SparkHadoopWriter.scala:83)
22:45:30.761 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1226)
22:45:30.761 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1226)
22:45:30.761 INFO  DAGScheduler - Submitting ShuffleMapStage 1226 (MapPartitionsRDD[5428] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:30.761 INFO  MemoryStore - Block broadcast_1965 stored as values in memory (estimated size 15.0 KiB, free 1918.3 MiB)
22:45:30.762 INFO  MemoryStore - Block broadcast_1965_piece0 stored as bytes in memory (estimated size 7.8 KiB, free 1918.3 MiB)
22:45:30.762 INFO  BlockManagerInfo - Added broadcast_1965_piece0 in memory on localhost:46727 (size: 7.8 KiB, free: 1919.6 MiB)
22:45:30.762 INFO  SparkContext - Created broadcast 1965 from broadcast at DAGScheduler.scala:1580
22:45:30.762 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1226 (MapPartitionsRDD[5428] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:30.762 INFO  TaskSchedulerImpl - Adding task set 1226.0 with 1 tasks resource profile 0
22:45:30.762 INFO  TaskSetManager - Starting task 0.0 in stage 1226.0 (TID 1050) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:30.762 INFO  Executor - Running task 0.0 in stage 1226.0 (TID 1050)
22:45:30.764 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.764 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.765 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.765 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.766 INFO  Executor - Finished task 0.0 in stage 1226.0 (TID 1050). 1922 bytes result sent to driver
22:45:30.766 INFO  TaskSetManager - Finished task 0.0 in stage 1226.0 (TID 1050) in 4 ms on localhost (executor driver) (1/1)
22:45:30.766 INFO  TaskSchedulerImpl - Removed TaskSet 1226.0, whose tasks have all completed, from pool 
22:45:30.767 INFO  DAGScheduler - ShuffleMapStage 1226 (mapToPair at SparkUtils.java:161) finished in 0.006 s
22:45:30.767 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.767 INFO  DAGScheduler - running: HashSet()
22:45:30.767 INFO  DAGScheduler - waiting: HashSet(ResultStage 1227)
22:45:30.767 INFO  DAGScheduler - failed: HashSet()
22:45:30.767 INFO  DAGScheduler - Submitting ResultStage 1227 (MapPartitionsRDD[5434] at saveAsTextFile at SamSink.java:65), which has no missing parents
22:45:30.773 INFO  MemoryStore - Block broadcast_1966 stored as values in memory (estimated size 175.0 KiB, free 1918.1 MiB)
22:45:30.774 INFO  MemoryStore - Block broadcast_1966_piece0 stored as bytes in memory (estimated size 78.9 KiB, free 1918.0 MiB)
22:45:30.774 INFO  BlockManagerInfo - Added broadcast_1966_piece0 in memory on localhost:46727 (size: 78.9 KiB, free: 1919.6 MiB)
22:45:30.775 INFO  SparkContext - Created broadcast 1966 from broadcast at DAGScheduler.scala:1580
22:45:30.775 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1227 (MapPartitionsRDD[5434] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
22:45:30.775 INFO  TaskSchedulerImpl - Adding task set 1227.0 with 1 tasks resource profile 0
22:45:30.775 INFO  TaskSetManager - Starting task 0.0 in stage 1227.0 (TID 1051) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.775 INFO  Executor - Running task 0.0 in stage 1227.0 (TID 1051)
22:45:30.779 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.779 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.780 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:30.780 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:30.780 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:30.792 INFO  FileOutputCommitter - Saved output of task 'attempt_202503282245306655135476055341653_5434_m_000000_0' to file:/tmp/MarkDups1955833690336527186/MarkDups.sam.parts/_temporary/0/task_202503282245306655135476055341653_5434_m_000000
22:45:30.792 INFO  SparkHadoopMapRedUtil - attempt_202503282245306655135476055341653_5434_m_000000_0: Committed. Elapsed time: 0 ms.
22:45:30.792 INFO  Executor - Finished task 0.0 in stage 1227.0 (TID 1051). 1858 bytes result sent to driver
22:45:30.792 INFO  TaskSetManager - Finished task 0.0 in stage 1227.0 (TID 1051) in 17 ms on localhost (executor driver) (1/1)
22:45:30.792 INFO  TaskSchedulerImpl - Removed TaskSet 1227.0, whose tasks have all completed, from pool 
22:45:30.792 INFO  DAGScheduler - ResultStage 1227 (runJob at SparkHadoopWriter.scala:83) finished in 0.025 s
22:45:30.792 INFO  DAGScheduler - Job 554 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.792 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1227: Stage finished
22:45:30.792 INFO  DAGScheduler - Job 554 finished: runJob at SparkHadoopWriter.scala:83, took 0.032302 s
22:45:30.793 INFO  SparkHadoopWriter - Start to commit write Job job_202503282245306655135476055341653_5434.
22:45:30.797 INFO  SparkHadoopWriter - Write Job job_202503282245306655135476055341653_5434 committed. Elapsed time: 4 ms.
22:45:30.804 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups1955833690336527186/MarkDups.sam
22:45:30.808 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups1955833690336527186/MarkDups.sam done
22:45:30.808 INFO  MarkDuplicatesSpark - Shutting down engine
[March 28, 2025 at 10:45:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
22:45:30.814 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.814 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
22:45:30.814 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
22:45:30.814 INFO  MarkDuplicatesSpark - Executing as runner@fv-az1690-153 on Linux v6.8.0-1021-azure amd64
22:45:30.814 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
22:45:30.814 INFO  MarkDuplicatesSpark - Start Date/Time: March 28, 2025 at 10:45:30 PM UTC
22:45:30.814 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.814 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
22:45:30.814 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
22:45:30.814 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
22:45:30.814 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
22:45:30.814 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
22:45:30.814 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
22:45:30.814 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
22:45:30.814 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
22:45:30.814 INFO  MarkDuplicatesSpark - Requester pays: disabled
22:45:30.814 INFO  MarkDuplicatesSpark - Initializing engine
22:45:30.814 INFO  MarkDuplicatesSpark - Done initializing engine
22:45:30.814 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
22:45:30.816 INFO  MemoryStore - Block broadcast_1967 stored as values in memory (estimated size 333.6 KiB, free 1917.7 MiB)
22:45:30.823 INFO  MemoryStore - Block broadcast_1967_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1917.6 MiB)
22:45:30.823 INFO  BlockManagerInfo - Added broadcast_1967_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.5 MiB)
22:45:30.824 INFO  SparkContext - Created broadcast 1967 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.847 INFO  MemoryStore - Block broadcast_1968 stored as values in memory (estimated size 333.6 KiB, free 1917.3 MiB)
22:45:30.854 INFO  MemoryStore - Block broadcast_1968_piece0 stored as bytes in memory (estimated size 71.2 KiB, free 1917.2 MiB)
22:45:30.854 INFO  BlockManagerInfo - Added broadcast_1968_piece0 in memory on localhost:46727 (size: 71.2 KiB, free: 1919.4 MiB)
22:45:30.854 INFO  SparkContext - Created broadcast 1968 from newAPIHadoopFile at PathSplitSource.java:96
22:45:30.889 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
22:45:30.890 INFO  FileInputFormat - Total input files to process : 1
22:45:30.890 INFO  DAGScheduler - Registering RDD 5448 (mapToPair at SparkUtils.java:161) as input to shuffle 307
22:45:30.890 INFO  DAGScheduler - Got job 555 (collect at SparkUtils.java:205) with 1 output partitions
22:45:30.890 INFO  DAGScheduler - Final stage: ResultStage 1229 (collect at SparkUtils.java:205)
22:45:30.890 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1228)
22:45:30.890 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1228)
22:45:30.890 INFO  DAGScheduler - Submitting ShuffleMapStage 1228 (MapPartitionsRDD[5448] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:30.909 INFO  MemoryStore - Block broadcast_1969 stored as values in memory (estimated size 525.7 KiB, free 1916.7 MiB)
22:45:30.911 INFO  MemoryStore - Block broadcast_1969_piece0 stored as bytes in memory (estimated size 228.4 KiB, free 1916.5 MiB)
22:45:30.912 INFO  BlockManagerInfo - Added broadcast_1969_piece0 in memory on localhost:46727 (size: 228.4 KiB, free: 1919.2 MiB)
22:45:30.912 INFO  SparkContext - Created broadcast 1969 from broadcast at DAGScheduler.scala:1580
22:45:30.912 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1228 (MapPartitionsRDD[5448] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:30.912 INFO  TaskSchedulerImpl - Adding task set 1228.0 with 1 tasks resource profile 0
22:45:30.912 INFO  TaskSetManager - Starting task 0.0 in stage 1228.0 (TID 1052) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9757 bytes) 
22:45:30.912 INFO  Executor - Running task 0.0 in stage 1228.0 (TID 1052)
22:45:30.944 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam:0+5416
22:45:30.949 INFO  Executor - Finished task 0.0 in stage 1228.0 (TID 1052). 1148 bytes result sent to driver
22:45:30.949 INFO  TaskSetManager - Finished task 0.0 in stage 1228.0 (TID 1052) in 37 ms on localhost (executor driver) (1/1)
22:45:30.949 INFO  TaskSchedulerImpl - Removed TaskSet 1228.0, whose tasks have all completed, from pool 
22:45:30.950 INFO  DAGScheduler - ShuffleMapStage 1228 (mapToPair at SparkUtils.java:161) finished in 0.060 s
22:45:30.950 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.950 INFO  DAGScheduler - running: HashSet()
22:45:30.950 INFO  DAGScheduler - waiting: HashSet(ResultStage 1229)
22:45:30.950 INFO  DAGScheduler - failed: HashSet()
22:45:30.950 INFO  DAGScheduler - Submitting ResultStage 1229 (MapPartitionsRDD[5451] at mapPartitions at SparkUtils.java:188), which has no missing parents
22:45:30.950 INFO  MemoryStore - Block broadcast_1970 stored as values in memory (estimated size 7.4 KiB, free 1916.5 MiB)
22:45:30.951 INFO  MemoryStore - Block broadcast_1970_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1916.5 MiB)
22:45:30.951 INFO  BlockManagerInfo - Added broadcast_1970_piece0 in memory on localhost:46727 (size: 4.1 KiB, free: 1919.2 MiB)
22:45:30.951 INFO  SparkContext - Created broadcast 1970 from broadcast at DAGScheduler.scala:1580
22:45:30.951 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1229 (MapPartitionsRDD[5451] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
22:45:30.951 INFO  TaskSchedulerImpl - Adding task set 1229.0 with 1 tasks resource profile 0
22:45:30.951 INFO  TaskSetManager - Starting task 0.0 in stage 1229.0 (TID 1053) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:30.952 INFO  Executor - Running task 0.0 in stage 1229.0 (TID 1053)
22:45:30.952 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.952 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.954 INFO  Executor - Finished task 0.0 in stage 1229.0 (TID 1053). 2039 bytes result sent to driver
22:45:30.954 INFO  TaskSetManager - Finished task 0.0 in stage 1229.0 (TID 1053) in 3 ms on localhost (executor driver) (1/1)
22:45:30.954 INFO  TaskSchedulerImpl - Removed TaskSet 1229.0, whose tasks have all completed, from pool 
22:45:30.954 INFO  DAGScheduler - ResultStage 1229 (collect at SparkUtils.java:205) finished in 0.004 s
22:45:30.954 INFO  DAGScheduler - Job 555 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:30.954 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1229: Stage finished
22:45:30.954 INFO  DAGScheduler - Job 555 finished: collect at SparkUtils.java:205, took 0.065065 s
22:45:30.958 INFO  MemoryStore - Block broadcast_1971 stored as values in memory (estimated size 392.0 B, free 1916.5 MiB)
22:45:30.958 INFO  MemoryStore - Block broadcast_1971_piece0 stored as bytes in memory (estimated size 33.0 B, free 1916.5 MiB)
22:45:30.959 INFO  BlockManagerInfo - Added broadcast_1971_piece0 in memory on localhost:46727 (size: 33.0 B, free: 1919.2 MiB)
22:45:30.959 INFO  SparkContext - Created broadcast 1971 from broadcast at MarkDuplicatesSparkUtils.java:126
22:45:30.959 INFO  MemoryStore - Block broadcast_1972 stored as values in memory (estimated size 144.0 B, free 1916.5 MiB)
22:45:30.959 INFO  MemoryStore - Block broadcast_1972_piece0 stored as bytes in memory (estimated size 28.0 B, free 1916.5 MiB)
22:45:30.959 INFO  BlockManagerInfo - Added broadcast_1972_piece0 in memory on localhost:46727 (size: 28.0 B, free: 1919.2 MiB)
22:45:30.959 INFO  SparkContext - Created broadcast 1972 from broadcast at MarkDuplicatesSparkUtils.java:127
22:45:30.972 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
22:45:30.972 INFO  DAGScheduler - Registering RDD 5460 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 310
22:45:30.972 INFO  DAGScheduler - Registering RDD 5464 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 309
22:45:30.972 INFO  DAGScheduler - Registering RDD 5468 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 308
22:45:30.972 INFO  DAGScheduler - Got job 556 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
22:45:30.972 INFO  DAGScheduler - Final stage: ResultStage 1234 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
22:45:30.972 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1233)
22:45:30.972 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1233)
22:45:30.973 INFO  DAGScheduler - Submitting ShuffleMapStage 1231 (MapPartitionsRDD[5460] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
22:45:30.973 INFO  MemoryStore - Block broadcast_1973 stored as values in memory (estimated size 45.2 KiB, free 1916.4 MiB)
22:45:30.977 INFO  MemoryStore - Block broadcast_1973_piece0 stored as bytes in memory (estimated size 17.1 KiB, free 1916.4 MiB)
22:45:30.977 INFO  BlockManagerInfo - Added broadcast_1973_piece0 in memory on localhost:46727 (size: 17.1 KiB, free: 1919.2 MiB)
22:45:30.977 INFO  SparkContext - Created broadcast 1973 from broadcast at DAGScheduler.scala:1580
22:45:30.977 INFO  BlockManagerInfo - Removed broadcast_1967_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.2 MiB)
22:45:30.977 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1231 (MapPartitionsRDD[5460] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
22:45:30.977 INFO  TaskSchedulerImpl - Adding task set 1231.0 with 1 tasks resource profile 0
22:45:30.978 INFO  BlockManagerInfo - Removed broadcast_1963_piece0 on localhost:46727 in memory (size: 9.0 KiB, free: 1919.3 MiB)
22:45:30.978 INFO  BlockManagerInfo - Removed broadcast_1966_piece0 on localhost:46727 in memory (size: 78.9 KiB, free: 1919.3 MiB)
22:45:30.978 INFO  TaskSetManager - Starting task 0.0 in stage 1231.0 (TID 1054) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
22:45:30.978 INFO  Executor - Running task 0.0 in stage 1231.0 (TID 1054)
22:45:30.978 INFO  BlockManagerInfo - Removed broadcast_1962_piece0 on localhost:46727 in memory (size: 8.3 KiB, free: 1919.3 MiB)
22:45:30.979 INFO  BlockManagerInfo - Removed broadcast_1958_piece0 on localhost:46727 in memory (size: 24.0 B, free: 1919.3 MiB)
22:45:30.979 INFO  BlockManagerInfo - Removed broadcast_1970_piece0 on localhost:46727 in memory (size: 4.1 KiB, free: 1919.3 MiB)
22:45:30.980 INFO  BlockManagerInfo - Removed broadcast_1969_piece0 on localhost:46727 in memory (size: 228.4 KiB, free: 1919.6 MiB)
22:45:30.980 INFO  BlockManagerInfo - Removed broadcast_1955_piece0 on localhost:46727 in memory (size: 71.2 KiB, free: 1919.6 MiB)
22:45:30.980 INFO  BlockManagerInfo - Removed broadcast_1961_piece0 on localhost:46727 in memory (size: 13.1 KiB, free: 1919.7 MiB)
22:45:30.980 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.981 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.981 INFO  BlockManagerInfo - Removed broadcast_1965_piece0 on localhost:46727 in memory (size: 7.8 KiB, free: 1919.7 MiB)
22:45:30.981 INFO  BlockManagerInfo - Removed broadcast_1960_piece0 on localhost:46727 in memory (size: 10.9 KiB, free: 1919.7 MiB)
22:45:30.981 INFO  BlockManagerInfo - Removed broadcast_1964_piece0 on localhost:46727 in memory (size: 500.0 B, free: 1919.7 MiB)
22:45:30.982 INFO  BlockManagerInfo - Removed broadcast_1959_piece0 on localhost:46727 in memory (size: 21.0 B, free: 1919.7 MiB)
22:45:30.983 INFO  Executor - Finished task 0.0 in stage 1231.0 (TID 1054). 1922 bytes result sent to driver
22:45:30.983 INFO  TaskSetManager - Finished task 0.0 in stage 1231.0 (TID 1054) in 5 ms on localhost (executor driver) (1/1)
22:45:30.984 INFO  TaskSchedulerImpl - Removed TaskSet 1231.0, whose tasks have all completed, from pool 
22:45:30.984 INFO  DAGScheduler - ShuffleMapStage 1231 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.011 s
22:45:30.984 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.984 INFO  DAGScheduler - running: HashSet()
22:45:30.984 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1232, ShuffleMapStage 1233, ResultStage 1234)
22:45:30.984 INFO  DAGScheduler - failed: HashSet()
22:45:30.984 INFO  DAGScheduler - Submitting ShuffleMapStage 1232 (MapPartitionsRDD[5464] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
22:45:30.985 INFO  MemoryStore - Block broadcast_1974 stored as values in memory (estimated size 49.3 KiB, free 1918.3 MiB)
22:45:30.985 INFO  MemoryStore - Block broadcast_1974_piece0 stored as bytes in memory (estimated size 19.3 KiB, free 1918.3 MiB)
22:45:30.985 INFO  BlockManagerInfo - Added broadcast_1974_piece0 in memory on localhost:46727 (size: 19.3 KiB, free: 1919.7 MiB)
22:45:30.985 INFO  SparkContext - Created broadcast 1974 from broadcast at DAGScheduler.scala:1580
22:45:30.986 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1232 (MapPartitionsRDD[5464] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
22:45:30.986 INFO  TaskSchedulerImpl - Adding task set 1232.0 with 1 tasks resource profile 0
22:45:30.986 INFO  TaskSetManager - Starting task 0.0 in stage 1232.0 (TID 1055) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
22:45:30.986 INFO  Executor - Running task 0.0 in stage 1232.0 (TID 1055)
22:45:30.987 INFO  ShuffleBlockFetcherIterator - Getting 1 (593.0 B) non-empty blocks including 1 (593.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.987 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.992 INFO  Executor - Finished task 0.0 in stage 1232.0 (TID 1055). 1922 bytes result sent to driver
22:45:30.992 INFO  TaskSetManager - Finished task 0.0 in stage 1232.0 (TID 1055) in 6 ms on localhost (executor driver) (1/1)
22:45:30.992 INFO  TaskSchedulerImpl - Removed TaskSet 1232.0, whose tasks have all completed, from pool 
22:45:30.992 INFO  DAGScheduler - ShuffleMapStage 1232 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.008 s
22:45:30.992 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.992 INFO  DAGScheduler - running: HashSet()
22:45:30.992 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1233, ResultStage 1234)
22:45:30.992 INFO  DAGScheduler - failed: HashSet()
22:45:30.992 INFO  DAGScheduler - Submitting ShuffleMapStage 1233 (MapPartitionsRDD[5468] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
22:45:30.993 INFO  MemoryStore - Block broadcast_1975 stored as values in memory (estimated size 40.0 KiB, free 1918.2 MiB)
22:45:30.993 INFO  MemoryStore - Block broadcast_1975_piece0 stored as bytes in memory (estimated size 14.5 KiB, free 1918.2 MiB)
22:45:30.993 INFO  BlockManagerInfo - Added broadcast_1975_piece0 in memory on localhost:46727 (size: 14.5 KiB, free: 1919.6 MiB)
22:45:30.994 INFO  SparkContext - Created broadcast 1975 from broadcast at DAGScheduler.scala:1580
22:45:30.994 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1233 (MapPartitionsRDD[5468] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
22:45:30.994 INFO  TaskSchedulerImpl - Adding task set 1233.0 with 1 tasks resource profile 0
22:45:30.994 INFO  TaskSetManager - Starting task 0.0 in stage 1233.0 (TID 1056) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:30.994 INFO  Executor - Running task 0.0 in stage 1233.0 (TID 1056)
22:45:30.995 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.995 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.997 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:30.997 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:30.998 INFO  Executor - Finished task 0.0 in stage 1233.0 (TID 1056). 1879 bytes result sent to driver
22:45:30.998 INFO  TaskSetManager - Finished task 0.0 in stage 1233.0 (TID 1056) in 4 ms on localhost (executor driver) (1/1)
22:45:30.998 INFO  TaskSchedulerImpl - Removed TaskSet 1233.0, whose tasks have all completed, from pool 
22:45:30.998 INFO  DAGScheduler - ShuffleMapStage 1233 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.006 s
22:45:30.998 INFO  DAGScheduler - looking for newly runnable stages
22:45:30.998 INFO  DAGScheduler - running: HashSet()
22:45:30.998 INFO  DAGScheduler - waiting: HashSet(ResultStage 1234)
22:45:30.998 INFO  DAGScheduler - failed: HashSet()
22:45:30.998 INFO  DAGScheduler - Submitting ResultStage 1234 (MapPartitionsRDD[5470] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
22:45:30.999 INFO  MemoryStore - Block broadcast_1976 stored as values in memory (estimated size 41.2 KiB, free 1918.2 MiB)
22:45:30.999 INFO  MemoryStore - Block broadcast_1976_piece0 stored as bytes in memory (estimated size 15.3 KiB, free 1918.2 MiB)
22:45:30.999 INFO  BlockManagerInfo - Added broadcast_1976_piece0 in memory on localhost:46727 (size: 15.3 KiB, free: 1919.6 MiB)
22:45:30.999 INFO  SparkContext - Created broadcast 1976 from broadcast at DAGScheduler.scala:1580
22:45:31.000 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1234 (MapPartitionsRDD[5470] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
22:45:31.000 INFO  TaskSchedulerImpl - Adding task set 1234.0 with 1 tasks resource profile 0
22:45:31.000 INFO  TaskSetManager - Starting task 0.0 in stage 1234.0 (TID 1057) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:31.000 INFO  Executor - Running task 0.0 in stage 1234.0 (TID 1057)
22:45:31.001 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:31.001 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:31.002 INFO  Executor - Finished task 0.0 in stage 1234.0 (TID 1057). 1901 bytes result sent to driver
22:45:31.002 INFO  TaskSetManager - Finished task 0.0 in stage 1234.0 (TID 1057) in 2 ms on localhost (executor driver) (1/1)
22:45:31.002 INFO  TaskSchedulerImpl - Removed TaskSet 1234.0, whose tasks have all completed, from pool 
22:45:31.002 INFO  DAGScheduler - ResultStage 1234 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
22:45:31.003 INFO  DAGScheduler - Job 556 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:31.003 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1234: Stage finished
22:45:31.003 INFO  DAGScheduler - Job 556 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.030857 s
22:45:31.014 INFO  MemoryStore - Block broadcast_1977 stored as values in memory (estimated size 93.5 KiB, free 1918.1 MiB)
22:45:31.015 INFO  MemoryStore - Block broadcast_1977_piece0 stored as bytes in memory (estimated size 5.6 KiB, free 1918.1 MiB)
22:45:31.015 INFO  BlockManagerInfo - Added broadcast_1977_piece0 in memory on localhost:46727 (size: 5.6 KiB, free: 1919.6 MiB)
22:45:31.015 INFO  SparkContext - Created broadcast 1977 from broadcast at ReadsSparkSink.java:133
22:45:31.018 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:31.018 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:31.018 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:31.035 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
22:45:31.035 INFO  DAGScheduler - Registering RDD 5472 (mapToPair at SparkUtils.java:161) as input to shuffle 311
22:45:31.035 INFO  DAGScheduler - Got job 557 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
22:45:31.035 INFO  DAGScheduler - Final stage: ResultStage 1239 (runJob at SparkHadoopWriter.scala:83)
22:45:31.035 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1238)
22:45:31.035 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1238)
22:45:31.035 INFO  DAGScheduler - Submitting ShuffleMapStage 1238 (MapPartitionsRDD[5472] at mapToPair at SparkUtils.java:161), which has no missing parents
22:45:31.036 INFO  MemoryStore - Block broadcast_1978 stored as values in memory (estimated size 39.1 KiB, free 1918.0 MiB)
22:45:31.037 INFO  MemoryStore - Block broadcast_1978_piece0 stored as bytes in memory (estimated size 14.1 KiB, free 1918.0 MiB)
22:45:31.037 INFO  BlockManagerInfo - Added broadcast_1978_piece0 in memory on localhost:46727 (size: 14.1 KiB, free: 1919.6 MiB)
22:45:31.037 INFO  SparkContext - Created broadcast 1978 from broadcast at DAGScheduler.scala:1580
22:45:31.037 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1238 (MapPartitionsRDD[5472] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
22:45:31.037 INFO  TaskSchedulerImpl - Adding task set 1238.0 with 1 tasks resource profile 0
22:45:31.037 INFO  TaskSetManager - Starting task 0.0 in stage 1238.0 (TID 1058) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
22:45:31.037 INFO  Executor - Running task 0.0 in stage 1238.0 (TID 1058)
22:45:31.039 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:31.039 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:31.041 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:31.041 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:31.042 INFO  Executor - Finished task 0.0 in stage 1238.0 (TID 1058). 1922 bytes result sent to driver
22:45:31.042 INFO  TaskSetManager - Finished task 0.0 in stage 1238.0 (TID 1058) in 5 ms on localhost (executor driver) (1/1)
22:45:31.042 INFO  TaskSchedulerImpl - Removed TaskSet 1238.0, whose tasks have all completed, from pool 
22:45:31.042 INFO  DAGScheduler - ShuffleMapStage 1238 (mapToPair at SparkUtils.java:161) finished in 0.006 s
22:45:31.042 INFO  DAGScheduler - looking for newly runnable stages
22:45:31.042 INFO  DAGScheduler - running: HashSet()
22:45:31.042 INFO  DAGScheduler - waiting: HashSet(ResultStage 1239)
22:45:31.042 INFO  DAGScheduler - failed: HashSet()
22:45:31.042 INFO  DAGScheduler - Submitting ResultStage 1239 (MapPartitionsRDD[5478] at saveAsTextFile at SamSink.java:65), which has no missing parents
22:45:31.049 INFO  MemoryStore - Block broadcast_1979 stored as values in memory (estimated size 199.1 KiB, free 1917.8 MiB)
22:45:31.050 INFO  MemoryStore - Block broadcast_1979_piece0 stored as bytes in memory (estimated size 85.2 KiB, free 1917.8 MiB)
22:45:31.050 INFO  BlockManagerInfo - Added broadcast_1979_piece0 in memory on localhost:46727 (size: 85.2 KiB, free: 1919.5 MiB)
22:45:31.050 INFO  SparkContext - Created broadcast 1979 from broadcast at DAGScheduler.scala:1580
22:45:31.050 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1239 (MapPartitionsRDD[5478] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
22:45:31.050 INFO  TaskSchedulerImpl - Adding task set 1239.0 with 1 tasks resource profile 0
22:45:31.050 INFO  TaskSetManager - Starting task 0.0 in stage 1239.0 (TID 1059) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
22:45:31.051 INFO  Executor - Running task 0.0 in stage 1239.0 (TID 1059)
22:45:31.054 INFO  ShuffleBlockFetcherIterator - Getting 1 (3.9 KiB) non-empty blocks including 1 (3.9 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
22:45:31.054 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
22:45:31.056 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
22:45:31.056 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
22:45:31.056 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
22:45:31.066 INFO  FileOutputCommitter - Saved output of task 'attempt_202503282245313818999248098393404_5478_m_000000_0' to file:/tmp/MarkDups2303330361025175584/MarkDups.sam.parts/_temporary/0/task_202503282245313818999248098393404_5478_m_000000
22:45:31.066 INFO  SparkHadoopMapRedUtil - attempt_202503282245313818999248098393404_5478_m_000000_0: Committed. Elapsed time: 0 ms.
22:45:31.067 INFO  Executor - Finished task 0.0 in stage 1239.0 (TID 1059). 1858 bytes result sent to driver
22:45:31.067 INFO  TaskSetManager - Finished task 0.0 in stage 1239.0 (TID 1059) in 17 ms on localhost (executor driver) (1/1)
22:45:31.067 INFO  TaskSchedulerImpl - Removed TaskSet 1239.0, whose tasks have all completed, from pool 
22:45:31.067 INFO  DAGScheduler - ResultStage 1239 (runJob at SparkHadoopWriter.scala:83) finished in 0.025 s
22:45:31.067 INFO  DAGScheduler - Job 557 is finished. Cancelling potential speculative or zombie tasks for this job
22:45:31.067 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1239: Stage finished
22:45:31.067 INFO  DAGScheduler - Job 557 finished: runJob at SparkHadoopWriter.scala:83, took 0.032263 s
22:45:31.067 INFO  SparkHadoopWriter - Start to commit write Job job_202503282245313818999248098393404_5478.
22:45:31.071 INFO  SparkHadoopWriter - Write Job job_202503282245313818999248098393404_5478 committed. Elapsed time: 4 ms.
22:45:31.078 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups2303330361025175584/MarkDups.sam
22:45:31.082 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups2303330361025175584/MarkDups.sam done
22:45:31.082 INFO  MarkDuplicatesSpark - Shutting down engine
[March 28, 2025 at 10:45:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
22:45:36.984 ERROR Executor - Exception in task 0.0 in stage 1459.0 (TID 1206)
org.broadinstitute.hellbender.exceptions.UserException$ReadMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 is missing the read group (RG) tag, which is required by the GATK.  Please use http://gatkforums.broadinstitute.org/discussion/59/companion-utilities-replacereadgroups to fix this problem
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:64) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
22:45:36.999 ERROR TaskSetManager - Task 0 in stage 1459.0 failed 1 times; aborting job
[March 28, 2025 at 10:45:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
22:45:37.227 ERROR Executor - Exception in task 0.0 in stage 1466.0 (TID 1209)
org.broadinstitute.hellbender.exceptions.UserException$HeaderMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 contains an (RG) tag with the group NotADuplicateGroup which is not found in the file header.
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:66) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
22:45:37.229 ERROR TaskSetManager - Task 0 in stage 1466.0 failed 1 times; aborting job
[March 28, 2025 at 10:45:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:45:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000
[March 28, 2025 at 10:46:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1048576000