Class org.broadinstitute.hellbender.tools.spark.pipelines.MarkDuplicatesSparkIntegrationTest

152

tests

0

failures

0

ignored

1m2.20s

duration

100%

successful

Tests

Test Duration Result
assertThatExpectedOutputUpdateToggleIsDisabled 0s passed
testAssertAssumeUnsortedFilesAreQueryGroupedFiles 0.555s passed
testAssertCorrectSortOrderMultipleBams 0.138s passed
testAssertCorrectSortOrderMultipleBamsOverriding 0.504s passed
testBulkFragmentsNoDuplicates 0.891s passed
testBulkFragmentsWithDuplicates 2.066s passed
testDifferentChromosomesInOppositeOrder 0.307s passed
testDuplicateDetectionDataProviderWithMetrics[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.chr1.1-1K.unmarkedDups.markDuplicate.metrics) 0.288s passed
testDuplicateDetectionDataProviderWithMetrics[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.optical_dupes.markDuplicate.metrics) 0.270s passed
testDuplicateDetectionDataProviderWithMetrics[2](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.inputSingleLibrarySolexa16404.metrics) 0.260s passed
testFlowModeFlag 1.611s passed
testHashCollisionHandling 1.061s passed
testMDOrder[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug.bam) 0.321s passed
testMDOrder[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug2.bam) 0.305s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[0](true, true, true) 0.308s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[1](true, false, true) 0.327s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[2](false, true, true) 0.311s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[3](true, true, false) 0.317s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[4](true, false, false) 0.316s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[5](false, true, false) 0.328s passed
testMappedFragmentAndMappedPairFirstOfPairNonPrimary 0.308s passed
testMappedFragmentAndMatePairFirstUnmapped 0.314s passed
testMappedFragmentAndMatePairSecondUnmapped 0.318s passed
testMappedPairAndMappedFragmentAndMatePairFirstUnmapped 0.305s passed
testMappedPairAndMappedFragmentAndMatePairSecondUnmapped 0.337s passed
testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped 0.310s passed
testMappedPairAndMatePairFirstUnmapped 0.315s passed
testMappedPairAndMatePairSecondUnmapped 0.310s passed
testMappedPairWithFirstEndSamePositionAndOther 0.307s passed
testMappedPairWithSamePosition 0.317s passed
testMappedPairWithSamePositionSameCigar 0.307s passed
testMarkDuplicatesSparkDeletingDuplicateReads[0]([Ljava.io.File;@a06e823, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.402s passed
testMarkDuplicatesSparkDeletingDuplicateReads[1]([Ljava.io.File;@2bbbbfa, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.451s passed
testMarkDuplicatesSparkDeletingDuplicateReads[2]([Ljava.io.File;@487dbb36, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.385s passed
testMarkDuplicatesSparkDeletingDuplicateReads[3]([Ljava.io.File;@4f2d9dda, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.351s passed
testMarkDuplicatesSparkDeletingDuplicateReads[4]([Ljava.io.File;@1e395bc7, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.339s passed
testMarkDuplicatesSparkDeletingDuplicateReads[5]([Ljava.io.File;@2dacc640, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.201s passed
testMarkDuplicatesSparkDeletingDuplicateReads[6]([Ljava.io.File;@11686d9c, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.160s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[0]([Ljava.io.File;@fba30b7, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.393s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[1]([Ljava.io.File;@bca653c, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.389s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[2]([Ljava.io.File;@f0616c8, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.389s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[3]([Ljava.io.File;@5672d6db, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.351s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[4]([Ljava.io.File;@58bfe9ca, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.334s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[5]([Ljava.io.File;@1c9e0395, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.208s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[6]([Ljava.io.File;@5f2a581f, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.249s passed
testMarkDuplicatesSparkIntegrationTestLocal[0]([Ljava.io.File;@2c14686f, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.362s passed
testMarkDuplicatesSparkIntegrationTestLocal[1]([Ljava.io.File;@513070cb, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.378s passed
testMarkDuplicatesSparkIntegrationTestLocal[2]([Ljava.io.File;@453d1917, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.397s passed
testMarkDuplicatesSparkIntegrationTestLocal[3]([Ljava.io.File;@2f1017cc, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.351s passed
testMarkDuplicatesSparkIntegrationTestLocal[4]([Ljava.io.File;@1e1a0274, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.372s passed
testMarkDuplicatesSparkIntegrationTestLocal[5]([Ljava.io.File;@61e6b05c, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.178s passed
testMarkDuplicatesSparkIntegrationTestLocal[6]([Ljava.io.File;@31cb7793, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.217s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[0]([Ljava.io.File;@7ccadfe0, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.347s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[1]([Ljava.io.File;@107a2c45, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.372s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[2]([Ljava.io.File;@6d33d7b5, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.396s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[3]([Ljava.io.File;@35c79a5b, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.346s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[4]([Ljava.io.File;@20e32503, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.329s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[5]([Ljava.io.File;@53735917, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.155s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[6]([Ljava.io.File;@71fe12f1, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.190s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[0]([Ljava.io.File;@7d9536b3, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.354s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[1]([Ljava.io.File;@4e8fd367, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.371s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[2]([Ljava.io.File;@36701a17, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.387s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[3]([Ljava.io.File;@30b95bc6, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.327s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[4]([Ljava.io.File;@4ca84d6d, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.332s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[5]([Ljava.io.File;@1637c9f7, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.193s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[6]([Ljava.io.File;@4e468941, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.201s passed
testMatePairFirstUnmapped 0.306s passed
testMatePairSecondUnmapped 0.321s passed
testNoReadGroupInRead 0.242s passed
testNonExistantReadGroupInRead 0.228s passed
testNullOpticalDuplicates 0.329s passed
testOpticalDuplicateClusterOneEndSamePositionOneCluster 0.328s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance 0.303s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.340s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.313s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.315s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.324s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.314s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.309s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.325s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.310s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.330s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.322s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.311s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.316s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.306s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.307s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.315s passed
testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance 0.314s passed
testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate 0.329s passed
testOpticalDuplicateFinding 0.348s passed
testOpticalDuplicateFindingPxelDistance 0.324s passed
testOpticalDuplicatesAndPCRDuplicatesOrientationDifference 0.330s passed
testOpticalDuplicatesDifferentReadGroups 0.324s passed
testOpticalDuplicatesTheSameReadGroup 0.304s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.323s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.338s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.307s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.311s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.348s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.323s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.310s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.309s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.307s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.305s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.307s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.312s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.323s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.320s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.313s passed
testPathologicalOrderingAtTheSamePosition 0.330s passed
testReadSameStartPositionOrientationOverride 0.325s passed
testReadsHaveSameStartPositionButDifferentChromosomeNonEquivalence 0.306s passed
testSecondEndIsBeforeFirstInCoordinate 0.351s passed
testSingleMappedFragment 0.314s passed
testSingleMappedFragmentAndSingleMappedPair 0.305s passed
testSingleMappedFragmentAndTwoMappedPairs 0.326s passed
testSingleMappedPair 0.304s passed
testSingleUnmappedFragment 0.302s passed
testSingleUnmappedPair 0.309s passed
testStackOverFlowPairSetSwap 0.321s passed
testSupplementaryReadUnmappedMate 0.319s passed
testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs 0.321s passed
testThreeMappedPairs 0.314s passed
testThreeMappedPairsWithMatchingSecondMate 0.306s passed
testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs 0.310s passed
testTwoGroupsOnDifferentChromosomesOfTwoFragments 0.305s passed
testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs 0.316s passed
testTwoMappedFragments 0.319s passed
testTwoMappedPairWithSamePosition 0.308s passed
testTwoMappedPairWithSamePositionDifferentStrands 0.305s passed
testTwoMappedPairWithSamePositionDifferentStrands2 0.321s passed
testTwoMappedPairs 0.306s passed
testTwoMappedPairsAndMappedSecondaryFragment 0.322s passed
testTwoMappedPairsAndTerminalUnmappedFragment 0.314s passed
testTwoMappedPairsAndTerminalUnmappedPair 0.307s passed
testTwoMappedPairsMatesSoftClipped 0.302s passed
testTwoMappedPairsWithOppositeOrientations 0.331s passed
testTwoMappedPairsWithOppositeOrientationsNumberTwo 0.307s passed
testTwoMappedPairsWithSoftClipping 0.305s passed
testTwoMappedPairsWithSoftClippingBoth 0.325s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnly 0.301s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar 0.302s passed
testTwoMappedPairsWithSupplementaryReads[0](true, true, true) 0.323s passed
testTwoMappedPairsWithSupplementaryReads[1](true, false, true) 0.317s passed
testTwoMappedPairsWithSupplementaryReads[2](false, true, true) 0.320s passed
testTwoMappedPairsWithSupplementaryReads[3](true, true, false) 0.336s passed
testTwoMappedPairsWithSupplementaryReads[4](true, false, false) 0.335s passed
testTwoMappedPairsWithSupplementaryReads[5](false, true, false) 0.329s passed
testTwoUnmappedFragments 0.334s passed

Standard error

[June 2, 2025 at 2:21:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
14:21:38.878 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:38.878 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
14:21:38.878 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
14:21:38.878 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
14:21:38.878 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
14:21:38.878 INFO  MarkDuplicatesSpark - Start Date/Time: June 2, 2025 at 2:21:38 PM UTC
14:21:38.878 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:38.878 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:38.878 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
14:21:38.878 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
14:21:38.879 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
14:21:38.879 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
14:21:38.879 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
14:21:38.879 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
14:21:38.879 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
14:21:38.879 INFO  MarkDuplicatesSpark - Requester pays: disabled
14:21:38.879 INFO  MarkDuplicatesSpark - Initializing engine
14:21:38.879 INFO  MarkDuplicatesSpark - Done initializing engine
14:21:38.879 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
14:21:38.882 INFO  MemoryStore - Block broadcast_1371 stored as values in memory (estimated size 306.3 KiB, free 1900.9 MiB)
14:21:38.892 INFO  MemoryStore - Block broadcast_1371_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1900.8 MiB)
14:21:38.892 INFO  BlockManagerInfo - Added broadcast_1371_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.4 MiB)
14:21:38.892 INFO  SparkContext - Created broadcast 1371 from newAPIHadoopFile at PathSplitSource.java:96
14:21:38.912 INFO  MemoryStore - Block broadcast_1372 stored as values in memory (estimated size 306.3 KiB, free 1900.5 MiB)
14:21:38.918 INFO  MemoryStore - Block broadcast_1372_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1900.5 MiB)
14:21:38.919 INFO  BlockManagerInfo - Added broadcast_1372_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.4 MiB)
14:21:38.919 INFO  SparkContext - Created broadcast 1372 from newAPIHadoopFile at PathSplitSource.java:96
14:21:38.951 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
14:21:38.952 INFO  FileInputFormat - Total input files to process : 1
14:21:38.952 INFO  DAGScheduler - Registering RDD 3483 (mapToPair at SparkUtils.java:161) as input to shuffle 100
14:21:38.953 INFO  DAGScheduler - Got job 423 (collect at SparkUtils.java:205) with 1 output partitions
14:21:38.953 INFO  DAGScheduler - Final stage: ResultStage 735 (collect at SparkUtils.java:205)
14:21:38.953 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 734)
14:21:38.953 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 734)
14:21:38.953 INFO  DAGScheduler - Submitting ShuffleMapStage 734 (MapPartitionsRDD[3483] at mapToPair at SparkUtils.java:161), which has no missing parents
14:21:38.971 INFO  MemoryStore - Block broadcast_1373 stored as values in memory (estimated size 460.4 KiB, free 1900.0 MiB)
14:21:38.973 INFO  MemoryStore - Block broadcast_1373_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1899.8 MiB)
14:21:38.974 INFO  BlockManagerInfo - Added broadcast_1373_piece0 in memory on localhost:39377 (size: 202.5 KiB, free: 1918.2 MiB)
14:21:38.974 INFO  SparkContext - Created broadcast 1373 from broadcast at DAGScheduler.scala:1580
14:21:38.975 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 734 (MapPartitionsRDD[3483] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:21:38.975 INFO  TaskSchedulerImpl - Adding task set 734.0 with 1 tasks resource profile 0
14:21:38.975 INFO  TaskSetManager - Starting task 0.0 in stage 734.0 (TID 676) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9735 bytes) 
14:21:38.976 INFO  Executor - Running task 0.0 in stage 734.0 (TID 676)
14:21:39.007 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam:0+1507
14:21:39.012 INFO  Executor - Finished task 0.0 in stage 734.0 (TID 676). 1148 bytes result sent to driver
14:21:39.012 INFO  TaskSetManager - Finished task 0.0 in stage 734.0 (TID 676) in 37 ms on localhost (executor driver) (1/1)
14:21:39.012 INFO  TaskSchedulerImpl - Removed TaskSet 734.0, whose tasks have all completed, from pool 
14:21:39.013 INFO  DAGScheduler - ShuffleMapStage 734 (mapToPair at SparkUtils.java:161) finished in 0.059 s
14:21:39.013 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.013 INFO  DAGScheduler - running: HashSet()
14:21:39.013 INFO  DAGScheduler - waiting: HashSet(ResultStage 735)
14:21:39.013 INFO  DAGScheduler - failed: HashSet()
14:21:39.013 INFO  DAGScheduler - Submitting ResultStage 735 (MapPartitionsRDD[3486] at mapPartitions at SparkUtils.java:188), which has no missing parents
14:21:39.014 INFO  MemoryStore - Block broadcast_1374 stored as values in memory (estimated size 7.4 KiB, free 1899.8 MiB)
14:21:39.014 INFO  MemoryStore - Block broadcast_1374_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1899.8 MiB)
14:21:39.015 INFO  BlockManagerInfo - Added broadcast_1374_piece0 in memory on localhost:39377 (size: 4.1 KiB, free: 1918.2 MiB)
14:21:39.015 INFO  SparkContext - Created broadcast 1374 from broadcast at DAGScheduler.scala:1580
14:21:39.015 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 735 (MapPartitionsRDD[3486] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
14:21:39.015 INFO  TaskSchedulerImpl - Adding task set 735.0 with 1 tasks resource profile 0
14:21:39.016 INFO  TaskSetManager - Starting task 0.0 in stage 735.0 (TID 677) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.016 INFO  Executor - Running task 0.0 in stage 735.0 (TID 677)
14:21:39.017 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.018 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.020 INFO  Executor - Finished task 0.0 in stage 735.0 (TID 677). 2238 bytes result sent to driver
14:21:39.020 INFO  TaskSetManager - Finished task 0.0 in stage 735.0 (TID 677) in 4 ms on localhost (executor driver) (1/1)
14:21:39.020 INFO  TaskSchedulerImpl - Removed TaskSet 735.0, whose tasks have all completed, from pool 
14:21:39.020 INFO  DAGScheduler - ResultStage 735 (collect at SparkUtils.java:205) finished in 0.006 s
14:21:39.020 INFO  DAGScheduler - Job 423 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.021 INFO  TaskSchedulerImpl - Killing all running tasks in stage 735: Stage finished
14:21:39.021 INFO  DAGScheduler - Job 423 finished: collect at SparkUtils.java:205, took 0.069624 s
14:21:39.025 INFO  MemoryStore - Block broadcast_1375 stored as values in memory (estimated size 1632.0 B, free 1899.8 MiB)
14:21:39.026 INFO  MemoryStore - Block broadcast_1375_piece0 stored as bytes in memory (estimated size 145.0 B, free 1899.8 MiB)
14:21:39.026 INFO  BlockManagerInfo - Added broadcast_1375_piece0 in memory on localhost:39377 (size: 145.0 B, free: 1918.2 MiB)
14:21:39.026 INFO  SparkContext - Created broadcast 1375 from broadcast at MarkDuplicatesSparkUtils.java:126
14:21:39.027 INFO  MemoryStore - Block broadcast_1376 stored as values in memory (estimated size 304.0 B, free 1899.8 MiB)
14:21:39.027 INFO  MemoryStore - Block broadcast_1376_piece0 stored as bytes in memory (estimated size 37.0 B, free 1899.8 MiB)
14:21:39.027 INFO  BlockManagerInfo - Added broadcast_1376_piece0 in memory on localhost:39377 (size: 37.0 B, free: 1918.2 MiB)
14:21:39.028 INFO  SparkContext - Created broadcast 1376 from broadcast at MarkDuplicatesSparkUtils.java:127
14:21:39.041 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
14:21:39.041 INFO  DAGScheduler - Registering RDD 3495 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 103
14:21:39.042 INFO  DAGScheduler - Registering RDD 3499 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 102
14:21:39.042 INFO  DAGScheduler - Registering RDD 3503 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 101
14:21:39.042 INFO  DAGScheduler - Got job 424 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
14:21:39.042 INFO  DAGScheduler - Final stage: ResultStage 740 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
14:21:39.042 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 739)
14:21:39.042 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 739)
14:21:39.042 INFO  DAGScheduler - Submitting ShuffleMapStage 737 (MapPartitionsRDD[3495] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
14:21:39.043 INFO  MemoryStore - Block broadcast_1377 stored as values in memory (estimated size 24.2 KiB, free 1899.8 MiB)
14:21:39.044 INFO  MemoryStore - Block broadcast_1377_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1899.8 MiB)
14:21:39.044 INFO  BlockManagerInfo - Added broadcast_1377_piece0 in memory on localhost:39377 (size: 12.6 KiB, free: 1918.2 MiB)
14:21:39.044 INFO  SparkContext - Created broadcast 1377 from broadcast at DAGScheduler.scala:1580
14:21:39.044 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 737 (MapPartitionsRDD[3495] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
14:21:39.044 INFO  TaskSchedulerImpl - Adding task set 737.0 with 1 tasks resource profile 0
14:21:39.045 INFO  TaskSetManager - Starting task 0.0 in stage 737.0 (TID 678) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
14:21:39.045 INFO  Executor - Running task 0.0 in stage 737.0 (TID 678)
14:21:39.047 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.047 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.050 INFO  Executor - Finished task 0.0 in stage 737.0 (TID 678). 1922 bytes result sent to driver
14:21:39.050 INFO  TaskSetManager - Finished task 0.0 in stage 737.0 (TID 678) in 5 ms on localhost (executor driver) (1/1)
14:21:39.050 INFO  TaskSchedulerImpl - Removed TaskSet 737.0, whose tasks have all completed, from pool 
14:21:39.050 INFO  DAGScheduler - ShuffleMapStage 737 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.008 s
14:21:39.050 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.051 INFO  DAGScheduler - running: HashSet()
14:21:39.051 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 738, ShuffleMapStage 739, ResultStage 740)
14:21:39.051 INFO  DAGScheduler - failed: HashSet()
14:21:39.051 INFO  DAGScheduler - Submitting ShuffleMapStage 738 (MapPartitionsRDD[3499] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
14:21:39.052 INFO  MemoryStore - Block broadcast_1378 stored as values in memory (estimated size 28.3 KiB, free 1899.7 MiB)
14:21:39.052 INFO  MemoryStore - Block broadcast_1378_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1899.7 MiB)
14:21:39.052 INFO  BlockManagerInfo - Added broadcast_1378_piece0 in memory on localhost:39377 (size: 14.8 KiB, free: 1918.1 MiB)
14:21:39.053 INFO  SparkContext - Created broadcast 1378 from broadcast at DAGScheduler.scala:1580
14:21:39.053 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 738 (MapPartitionsRDD[3499] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
14:21:39.053 INFO  TaskSchedulerImpl - Adding task set 738.0 with 1 tasks resource profile 0
14:21:39.053 INFO  TaskSetManager - Starting task 0.0 in stage 738.0 (TID 679) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
14:21:39.054 INFO  Executor - Running task 0.0 in stage 738.0 (TID 679)
14:21:39.056 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.056 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.060 INFO  Executor - Finished task 0.0 in stage 738.0 (TID 679). 1922 bytes result sent to driver
14:21:39.061 INFO  TaskSetManager - Finished task 0.0 in stage 738.0 (TID 679) in 8 ms on localhost (executor driver) (1/1)
14:21:39.061 INFO  TaskSchedulerImpl - Removed TaskSet 738.0, whose tasks have all completed, from pool 
14:21:39.061 INFO  DAGScheduler - ShuffleMapStage 738 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.010 s
14:21:39.061 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.061 INFO  DAGScheduler - running: HashSet()
14:21:39.061 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 739, ResultStage 740)
14:21:39.061 INFO  DAGScheduler - failed: HashSet()
14:21:39.061 INFO  DAGScheduler - Submitting ShuffleMapStage 739 (MapPartitionsRDD[3503] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
14:21:39.062 INFO  MemoryStore - Block broadcast_1379 stored as values in memory (estimated size 19.0 KiB, free 1899.7 MiB)
14:21:39.063 INFO  MemoryStore - Block broadcast_1379_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1899.7 MiB)
14:21:39.063 INFO  BlockManagerInfo - Added broadcast_1379_piece0 in memory on localhost:39377 (size: 9.7 KiB, free: 1918.1 MiB)
14:21:39.063 INFO  SparkContext - Created broadcast 1379 from broadcast at DAGScheduler.scala:1580
14:21:39.064 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 739 (MapPartitionsRDD[3503] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
14:21:39.064 INFO  TaskSchedulerImpl - Adding task set 739.0 with 1 tasks resource profile 0
14:21:39.064 INFO  TaskSetManager - Starting task 0.0 in stage 739.0 (TID 680) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:21:39.065 INFO  Executor - Running task 0.0 in stage 739.0 (TID 680)
14:21:39.067 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.067 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.069 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.070 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.071 INFO  Executor - Finished task 0.0 in stage 739.0 (TID 680). 1922 bytes result sent to driver
14:21:39.071 INFO  TaskSetManager - Finished task 0.0 in stage 739.0 (TID 680) in 7 ms on localhost (executor driver) (1/1)
14:21:39.072 INFO  TaskSchedulerImpl - Removed TaskSet 739.0, whose tasks have all completed, from pool 
14:21:39.072 INFO  DAGScheduler - ShuffleMapStage 739 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.010 s
14:21:39.072 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.072 INFO  DAGScheduler - running: HashSet()
14:21:39.072 INFO  DAGScheduler - waiting: HashSet(ResultStage 740)
14:21:39.072 INFO  DAGScheduler - failed: HashSet()
14:21:39.072 INFO  DAGScheduler - Submitting ResultStage 740 (MapPartitionsRDD[3505] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
14:21:39.073 INFO  MemoryStore - Block broadcast_1380 stored as values in memory (estimated size 20.2 KiB, free 1899.7 MiB)
14:21:39.074 INFO  MemoryStore - Block broadcast_1380_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1899.7 MiB)
14:21:39.074 INFO  BlockManagerInfo - Added broadcast_1380_piece0 in memory on localhost:39377 (size: 10.6 KiB, free: 1918.1 MiB)
14:21:39.074 INFO  SparkContext - Created broadcast 1380 from broadcast at DAGScheduler.scala:1580
14:21:39.074 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 740 (MapPartitionsRDD[3505] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
14:21:39.074 INFO  TaskSchedulerImpl - Adding task set 740.0 with 1 tasks resource profile 0
14:21:39.075 INFO  TaskSetManager - Starting task 0.0 in stage 740.0 (TID 681) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.075 INFO  Executor - Running task 0.0 in stage 740.0 (TID 681)
14:21:39.077 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.077 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.079 INFO  Executor - Finished task 0.0 in stage 740.0 (TID 681). 1944 bytes result sent to driver
14:21:39.080 INFO  TaskSetManager - Finished task 0.0 in stage 740.0 (TID 681) in 5 ms on localhost (executor driver) (1/1)
14:21:39.080 INFO  TaskSchedulerImpl - Removed TaskSet 740.0, whose tasks have all completed, from pool 
14:21:39.080 INFO  DAGScheduler - ResultStage 740 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.008 s
14:21:39.080 INFO  DAGScheduler - Job 424 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.080 INFO  TaskSchedulerImpl - Killing all running tasks in stage 740: Stage finished
14:21:39.080 INFO  DAGScheduler - Job 424 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.039544 s
14:21:39.095 INFO  MemoryStore - Block broadcast_1381 stored as values in memory (estimated size 20.3 KiB, free 1899.6 MiB)
14:21:39.096 INFO  MemoryStore - Block broadcast_1381_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1899.6 MiB)
14:21:39.096 INFO  BlockManagerInfo - Added broadcast_1381_piece0 in memory on localhost:39377 (size: 1850.0 B, free: 1918.1 MiB)
14:21:39.096 INFO  SparkContext - Created broadcast 1381 from broadcast at ReadsSparkSink.java:133
14:21:39.097 INFO  MemoryStore - Block broadcast_1382 stored as values in memory (estimated size 20.4 KiB, free 1899.6 MiB)
14:21:39.097 INFO  MemoryStore - Block broadcast_1382_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1899.6 MiB)
14:21:39.098 INFO  BlockManagerInfo - Added broadcast_1382_piece0 in memory on localhost:39377 (size: 1850.0 B, free: 1918.1 MiB)
14:21:39.098 INFO  SparkContext - Created broadcast 1382 from broadcast at BamSink.java:76
14:21:39.100 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.100 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.100 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.117 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
14:21:39.118 INFO  DAGScheduler - Registering RDD 3507 (mapToPair at SparkUtils.java:161) as input to shuffle 104
14:21:39.118 INFO  DAGScheduler - Got job 425 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
14:21:39.118 INFO  DAGScheduler - Final stage: ResultStage 745 (runJob at SparkHadoopWriter.scala:83)
14:21:39.118 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 744)
14:21:39.118 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 744)
14:21:39.118 INFO  DAGScheduler - Submitting ShuffleMapStage 744 (MapPartitionsRDD[3507] at mapToPair at SparkUtils.java:161), which has no missing parents
14:21:39.119 INFO  MemoryStore - Block broadcast_1383 stored as values in memory (estimated size 18.1 KiB, free 1899.6 MiB)
14:21:39.120 INFO  MemoryStore - Block broadcast_1383_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1899.6 MiB)
14:21:39.120 INFO  BlockManagerInfo - Added broadcast_1383_piece0 in memory on localhost:39377 (size: 9.5 KiB, free: 1918.1 MiB)
14:21:39.120 INFO  SparkContext - Created broadcast 1383 from broadcast at DAGScheduler.scala:1580
14:21:39.120 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 744 (MapPartitionsRDD[3507] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:21:39.121 INFO  TaskSchedulerImpl - Adding task set 744.0 with 1 tasks resource profile 0
14:21:39.121 INFO  TaskSetManager - Starting task 0.0 in stage 744.0 (TID 682) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:21:39.121 INFO  Executor - Running task 0.0 in stage 744.0 (TID 682)
14:21:39.124 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.124 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.126 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.126 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.128 INFO  Executor - Finished task 0.0 in stage 744.0 (TID 682). 1922 bytes result sent to driver
14:21:39.129 INFO  TaskSetManager - Finished task 0.0 in stage 744.0 (TID 682) in 7 ms on localhost (executor driver) (1/1)
14:21:39.129 INFO  TaskSchedulerImpl - Removed TaskSet 744.0, whose tasks have all completed, from pool 
14:21:39.129 INFO  DAGScheduler - ShuffleMapStage 744 (mapToPair at SparkUtils.java:161) finished in 0.010 s
14:21:39.129 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.129 INFO  DAGScheduler - running: HashSet()
14:21:39.129 INFO  DAGScheduler - waiting: HashSet(ResultStage 745)
14:21:39.129 INFO  DAGScheduler - failed: HashSet()
14:21:39.129 INFO  DAGScheduler - Submitting ResultStage 745 (MapPartitionsRDD[3512] at mapToPair at BamSink.java:91), which has no missing parents
14:21:39.136 INFO  MemoryStore - Block broadcast_1384 stored as values in memory (estimated size 163.7 KiB, free 1899.4 MiB)
14:21:39.137 INFO  MemoryStore - Block broadcast_1384_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1899.4 MiB)
14:21:39.137 INFO  BlockManagerInfo - Added broadcast_1384_piece0 in memory on localhost:39377 (size: 73.9 KiB, free: 1918.0 MiB)
14:21:39.137 INFO  SparkContext - Created broadcast 1384 from broadcast at DAGScheduler.scala:1580
14:21:39.137 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 745 (MapPartitionsRDD[3512] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
14:21:39.137 INFO  TaskSchedulerImpl - Adding task set 745.0 with 1 tasks resource profile 0
14:21:39.138 INFO  TaskSetManager - Starting task 0.0 in stage 745.0 (TID 683) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.138 INFO  Executor - Running task 0.0 in stage 745.0 (TID 683)
14:21:39.142 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.142 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.144 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.144 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.144 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.144 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.145 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.145 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.162 INFO  FileOutputCommitter - Saved output of task 'attempt_202506021421395397550322698833496_3512_r_000000_0' to file:/tmp/local111906758591768419528/markdups17062186998966481336.bam.parts/_temporary/0/task_202506021421395397550322698833496_3512_r_000000
14:21:39.162 INFO  SparkHadoopMapRedUtil - attempt_202506021421395397550322698833496_3512_r_000000_0: Committed. Elapsed time: 0 ms.
14:21:39.163 INFO  Executor - Finished task 0.0 in stage 745.0 (TID 683). 1858 bytes result sent to driver
14:21:39.163 INFO  TaskSetManager - Finished task 0.0 in stage 745.0 (TID 683) in 25 ms on localhost (executor driver) (1/1)
14:21:39.163 INFO  TaskSchedulerImpl - Removed TaskSet 745.0, whose tasks have all completed, from pool 
14:21:39.163 INFO  DAGScheduler - ResultStage 745 (runJob at SparkHadoopWriter.scala:83) finished in 0.034 s
14:21:39.164 INFO  DAGScheduler - Job 425 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.164 INFO  TaskSchedulerImpl - Killing all running tasks in stage 745: Stage finished
14:21:39.164 INFO  DAGScheduler - Job 425 finished: runJob at SparkHadoopWriter.scala:83, took 0.046670 s
14:21:39.164 INFO  SparkHadoopWriter - Start to commit write Job job_202506021421395397550322698833496_3512.
14:21:39.169 INFO  SparkHadoopWriter - Write Job job_202506021421395397550322698833496_3512 committed. Elapsed time: 5 ms.
14:21:39.179 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local111906758591768419528/markdups17062186998966481336.bam
14:21:39.183 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local111906758591768419528/markdups17062186998966481336.bam done
14:21:39.183 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local111906758591768419528/markdups17062186998966481336.bam.parts/ to /tmp/local111906758591768419528/markdups17062186998966481336.bam.sbi
14:21:39.187 INFO  IndexFileMerger - Done merging .sbi files
14:21:39.187 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local111906758591768419528/markdups17062186998966481336.bam.parts/ to /tmp/local111906758591768419528/markdups17062186998966481336.bam.bai
14:21:39.191 INFO  IndexFileMerger - Done merging .bai files
14:21:39.191 INFO  MarkDuplicatesSpark - Shutting down engine
[June 2, 2025 at 2:21:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
14:21:39.200 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:39.200 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
14:21:39.201 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
14:21:39.201 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
14:21:39.201 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
14:21:39.201 INFO  MarkDuplicatesSpark - Start Date/Time: June 2, 2025 at 2:21:39 PM UTC
14:21:39.201 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:39.201 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:21:39.201 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
14:21:39.201 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
14:21:39.201 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
14:21:39.201 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
14:21:39.201 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
14:21:39.201 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
14:21:39.201 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
14:21:39.201 INFO  MarkDuplicatesSpark - Requester pays: disabled
14:21:39.201 INFO  MarkDuplicatesSpark - Initializing engine
14:21:39.201 INFO  MarkDuplicatesSpark - Done initializing engine
14:21:39.201 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
14:21:39.204 INFO  MemoryStore - Block broadcast_1385 stored as values in memory (estimated size 306.3 KiB, free 1899.1 MiB)
14:21:39.210 INFO  MemoryStore - Block broadcast_1385_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1899.0 MiB)
14:21:39.211 INFO  BlockManagerInfo - Added broadcast_1385_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.0 MiB)
14:21:39.211 INFO  SparkContext - Created broadcast 1385 from newAPIHadoopFile at PathSplitSource.java:96
14:21:39.230 INFO  MemoryStore - Block broadcast_1386 stored as values in memory (estimated size 306.3 KiB, free 1898.7 MiB)
14:21:39.237 INFO  MemoryStore - Block broadcast_1386_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1898.6 MiB)
14:21:39.237 INFO  BlockManagerInfo - Added broadcast_1386_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1917.9 MiB)
14:21:39.237 INFO  SparkContext - Created broadcast 1386 from newAPIHadoopFile at PathSplitSource.java:96
14:21:39.269 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
14:21:39.270 INFO  FileInputFormat - Total input files to process : 1
14:21:39.270 INFO  DAGScheduler - Registering RDD 3526 (mapToPair at SparkUtils.java:161) as input to shuffle 105
14:21:39.270 INFO  DAGScheduler - Got job 426 (collect at SparkUtils.java:205) with 1 output partitions
14:21:39.270 INFO  DAGScheduler - Final stage: ResultStage 747 (collect at SparkUtils.java:205)
14:21:39.270 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 746)
14:21:39.271 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 746)
14:21:39.271 INFO  DAGScheduler - Submitting ShuffleMapStage 746 (MapPartitionsRDD[3526] at mapToPair at SparkUtils.java:161), which has no missing parents
14:21:39.288 INFO  MemoryStore - Block broadcast_1387 stored as values in memory (estimated size 460.4 KiB, free 1898.2 MiB)
14:21:39.290 INFO  MemoryStore - Block broadcast_1387_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1898.0 MiB)
14:21:39.290 INFO  BlockManagerInfo - Added broadcast_1387_piece0 in memory on localhost:39377 (size: 202.5 KiB, free: 1917.7 MiB)
14:21:39.290 INFO  SparkContext - Created broadcast 1387 from broadcast at DAGScheduler.scala:1580
14:21:39.291 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 746 (MapPartitionsRDD[3526] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:21:39.291 INFO  TaskSchedulerImpl - Adding task set 746.0 with 1 tasks resource profile 0
14:21:39.291 INFO  TaskSetManager - Starting task 0.0 in stage 746.0 (TID 684) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9736 bytes) 
14:21:39.291 INFO  Executor - Running task 0.0 in stage 746.0 (TID 684)
14:21:39.320 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam:0+1856
14:21:39.325 INFO  Executor - Finished task 0.0 in stage 746.0 (TID 684). 1148 bytes result sent to driver
14:21:39.325 INFO  TaskSetManager - Finished task 0.0 in stage 746.0 (TID 684) in 34 ms on localhost (executor driver) (1/1)
14:21:39.326 INFO  TaskSchedulerImpl - Removed TaskSet 746.0, whose tasks have all completed, from pool 
14:21:39.326 INFO  DAGScheduler - ShuffleMapStage 746 (mapToPair at SparkUtils.java:161) finished in 0.055 s
14:21:39.326 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.326 INFO  DAGScheduler - running: HashSet()
14:21:39.326 INFO  DAGScheduler - waiting: HashSet(ResultStage 747)
14:21:39.326 INFO  DAGScheduler - failed: HashSet()
14:21:39.326 INFO  DAGScheduler - Submitting ResultStage 747 (MapPartitionsRDD[3529] at mapPartitions at SparkUtils.java:188), which has no missing parents
14:21:39.327 INFO  MemoryStore - Block broadcast_1388 stored as values in memory (estimated size 7.4 KiB, free 1898.0 MiB)
14:21:39.327 INFO  MemoryStore - Block broadcast_1388_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1898.0 MiB)
14:21:39.327 INFO  BlockManagerInfo - Added broadcast_1388_piece0 in memory on localhost:39377 (size: 4.1 KiB, free: 1917.7 MiB)
14:21:39.328 INFO  SparkContext - Created broadcast 1388 from broadcast at DAGScheduler.scala:1580
14:21:39.328 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 747 (MapPartitionsRDD[3529] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
14:21:39.328 INFO  TaskSchedulerImpl - Adding task set 747.0 with 1 tasks resource profile 0
14:21:39.328 INFO  TaskSetManager - Starting task 0.0 in stage 747.0 (TID 685) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.329 INFO  Executor - Running task 0.0 in stage 747.0 (TID 685)
14:21:39.330 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.330 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.332 INFO  Executor - Finished task 0.0 in stage 747.0 (TID 685). 2495 bytes result sent to driver
14:21:39.333 INFO  TaskSetManager - Finished task 0.0 in stage 747.0 (TID 685) in 5 ms on localhost (executor driver) (1/1)
14:21:39.333 INFO  TaskSchedulerImpl - Removed TaskSet 747.0, whose tasks have all completed, from pool 
14:21:39.333 INFO  DAGScheduler - ResultStage 747 (collect at SparkUtils.java:205) finished in 0.007 s
14:21:39.333 INFO  DAGScheduler - Job 426 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.333 INFO  TaskSchedulerImpl - Killing all running tasks in stage 747: Stage finished
14:21:39.333 INFO  DAGScheduler - Job 426 finished: collect at SparkUtils.java:205, took 0.063884 s
14:21:39.339 INFO  MemoryStore - Block broadcast_1389 stored as values in memory (estimated size 1632.0 B, free 1898.0 MiB)
14:21:39.340 INFO  MemoryStore - Block broadcast_1389_piece0 stored as bytes in memory (estimated size 145.0 B, free 1898.0 MiB)
14:21:39.340 INFO  BlockManagerInfo - Added broadcast_1389_piece0 in memory on localhost:39377 (size: 145.0 B, free: 1917.7 MiB)
14:21:39.340 INFO  SparkContext - Created broadcast 1389 from broadcast at MarkDuplicatesSparkUtils.java:126
14:21:39.340 INFO  MemoryStore - Block broadcast_1390 stored as values in memory (estimated size 304.0 B, free 1898.0 MiB)
14:21:39.341 INFO  MemoryStore - Block broadcast_1390_piece0 stored as bytes in memory (estimated size 37.0 B, free 1898.0 MiB)
14:21:39.341 INFO  BlockManagerInfo - Added broadcast_1390_piece0 in memory on localhost:39377 (size: 37.0 B, free: 1917.7 MiB)
14:21:39.341 INFO  SparkContext - Created broadcast 1390 from broadcast at MarkDuplicatesSparkUtils.java:127
14:21:39.353 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
14:21:39.354 INFO  DAGScheduler - Registering RDD 3538 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 108
14:21:39.354 INFO  DAGScheduler - Registering RDD 3542 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 107
14:21:39.354 INFO  DAGScheduler - Registering RDD 3546 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 106
14:21:39.354 INFO  DAGScheduler - Got job 427 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
14:21:39.354 INFO  DAGScheduler - Final stage: ResultStage 752 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
14:21:39.354 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 751)
14:21:39.354 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 751)
14:21:39.355 INFO  DAGScheduler - Submitting ShuffleMapStage 749 (MapPartitionsRDD[3538] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
14:21:39.355 INFO  MemoryStore - Block broadcast_1391 stored as values in memory (estimated size 24.2 KiB, free 1897.9 MiB)
14:21:39.356 INFO  MemoryStore - Block broadcast_1391_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1897.9 MiB)
14:21:39.356 INFO  BlockManagerInfo - Added broadcast_1391_piece0 in memory on localhost:39377 (size: 12.6 KiB, free: 1917.7 MiB)
14:21:39.356 INFO  SparkContext - Created broadcast 1391 from broadcast at DAGScheduler.scala:1580
14:21:39.357 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 749 (MapPartitionsRDD[3538] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
14:21:39.357 INFO  TaskSchedulerImpl - Adding task set 749.0 with 1 tasks resource profile 0
14:21:39.357 INFO  TaskSetManager - Starting task 0.0 in stage 749.0 (TID 686) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
14:21:39.357 INFO  Executor - Running task 0.0 in stage 749.0 (TID 686)
14:21:39.359 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.359 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.362 INFO  Executor - Finished task 0.0 in stage 749.0 (TID 686). 1922 bytes result sent to driver
14:21:39.362 INFO  TaskSetManager - Finished task 0.0 in stage 749.0 (TID 686) in 5 ms on localhost (executor driver) (1/1)
14:21:39.362 INFO  TaskSchedulerImpl - Removed TaskSet 749.0, whose tasks have all completed, from pool 
14:21:39.362 INFO  DAGScheduler - ShuffleMapStage 749 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.007 s
14:21:39.363 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.363 INFO  DAGScheduler - running: HashSet()
14:21:39.363 INFO  DAGScheduler - waiting: HashSet(ResultStage 752, ShuffleMapStage 750, ShuffleMapStage 751)
14:21:39.363 INFO  DAGScheduler - failed: HashSet()
14:21:39.363 INFO  DAGScheduler - Submitting ShuffleMapStage 750 (MapPartitionsRDD[3542] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
14:21:39.364 INFO  MemoryStore - Block broadcast_1392 stored as values in memory (estimated size 28.3 KiB, free 1897.9 MiB)
14:21:39.364 INFO  MemoryStore - Block broadcast_1392_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1897.9 MiB)
14:21:39.364 INFO  BlockManagerInfo - Added broadcast_1392_piece0 in memory on localhost:39377 (size: 14.8 KiB, free: 1917.7 MiB)
14:21:39.364 INFO  SparkContext - Created broadcast 1392 from broadcast at DAGScheduler.scala:1580
14:21:39.365 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 750 (MapPartitionsRDD[3542] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
14:21:39.365 INFO  TaskSchedulerImpl - Adding task set 750.0 with 1 tasks resource profile 0
14:21:39.365 INFO  TaskSetManager - Starting task 0.0 in stage 750.0 (TID 687) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
14:21:39.365 INFO  Executor - Running task 0.0 in stage 750.0 (TID 687)
14:21:39.367 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.367 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.371 INFO  Executor - Finished task 0.0 in stage 750.0 (TID 687). 1922 bytes result sent to driver
14:21:39.371 INFO  TaskSetManager - Finished task 0.0 in stage 750.0 (TID 687) in 6 ms on localhost (executor driver) (1/1)
14:21:39.371 INFO  TaskSchedulerImpl - Removed TaskSet 750.0, whose tasks have all completed, from pool 
14:21:39.372 INFO  DAGScheduler - ShuffleMapStage 750 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.008 s
14:21:39.372 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.372 INFO  DAGScheduler - running: HashSet()
14:21:39.372 INFO  DAGScheduler - waiting: HashSet(ResultStage 752, ShuffleMapStage 751)
14:21:39.372 INFO  DAGScheduler - failed: HashSet()
14:21:39.372 INFO  DAGScheduler - Submitting ShuffleMapStage 751 (MapPartitionsRDD[3546] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
14:21:39.372 INFO  MemoryStore - Block broadcast_1393 stored as values in memory (estimated size 19.0 KiB, free 1897.9 MiB)
14:21:39.373 INFO  MemoryStore - Block broadcast_1393_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1897.9 MiB)
14:21:39.373 INFO  BlockManagerInfo - Added broadcast_1393_piece0 in memory on localhost:39377 (size: 9.7 KiB, free: 1917.7 MiB)
14:21:39.373 INFO  SparkContext - Created broadcast 1393 from broadcast at DAGScheduler.scala:1580
14:21:39.373 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 751 (MapPartitionsRDD[3546] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
14:21:39.374 INFO  TaskSchedulerImpl - Adding task set 751.0 with 1 tasks resource profile 0
14:21:39.374 INFO  TaskSetManager - Starting task 0.0 in stage 751.0 (TID 688) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:21:39.374 INFO  Executor - Running task 0.0 in stage 751.0 (TID 688)
14:21:39.376 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.376 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.378 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.378 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.379 INFO  Executor - Finished task 0.0 in stage 751.0 (TID 688). 1922 bytes result sent to driver
14:21:39.380 INFO  TaskSetManager - Finished task 0.0 in stage 751.0 (TID 688) in 6 ms on localhost (executor driver) (1/1)
14:21:39.380 INFO  TaskSchedulerImpl - Removed TaskSet 751.0, whose tasks have all completed, from pool 
14:21:39.380 INFO  DAGScheduler - ShuffleMapStage 751 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.008 s
14:21:39.380 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.380 INFO  DAGScheduler - running: HashSet()
14:21:39.380 INFO  DAGScheduler - waiting: HashSet(ResultStage 752)
14:21:39.380 INFO  DAGScheduler - failed: HashSet()
14:21:39.380 INFO  DAGScheduler - Submitting ResultStage 752 (MapPartitionsRDD[3548] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
14:21:39.381 INFO  MemoryStore - Block broadcast_1394 stored as values in memory (estimated size 20.2 KiB, free 1897.8 MiB)
14:21:39.382 INFO  MemoryStore - Block broadcast_1394_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1897.8 MiB)
14:21:39.382 INFO  BlockManagerInfo - Added broadcast_1394_piece0 in memory on localhost:39377 (size: 10.6 KiB, free: 1917.7 MiB)
14:21:39.382 INFO  SparkContext - Created broadcast 1394 from broadcast at DAGScheduler.scala:1580
14:21:39.382 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 752 (MapPartitionsRDD[3548] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
14:21:39.382 INFO  TaskSchedulerImpl - Adding task set 752.0 with 1 tasks resource profile 0
14:21:39.383 INFO  TaskSetManager - Starting task 0.0 in stage 752.0 (TID 689) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.383 INFO  Executor - Running task 0.0 in stage 752.0 (TID 689)
14:21:39.384 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.384 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.387 INFO  Executor - Finished task 0.0 in stage 752.0 (TID 689). 2053 bytes result sent to driver
14:21:39.387 INFO  TaskSetManager - Finished task 0.0 in stage 752.0 (TID 689) in 4 ms on localhost (executor driver) (1/1)
14:21:39.387 INFO  TaskSchedulerImpl - Removed TaskSet 752.0, whose tasks have all completed, from pool 
14:21:39.387 INFO  DAGScheduler - ResultStage 752 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
14:21:39.387 INFO  DAGScheduler - Job 427 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.387 INFO  TaskSchedulerImpl - Killing all running tasks in stage 752: Stage finished
14:21:39.388 INFO  DAGScheduler - Job 427 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.034322 s
14:21:39.398 INFO  MemoryStore - Block broadcast_1395 stored as values in memory (estimated size 20.3 KiB, free 1897.8 MiB)
14:21:39.399 INFO  MemoryStore - Block broadcast_1395_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1897.8 MiB)
14:21:39.399 INFO  BlockManagerInfo - Added broadcast_1395_piece0 in memory on localhost:39377 (size: 1850.0 B, free: 1917.7 MiB)
14:21:39.399 INFO  SparkContext - Created broadcast 1395 from broadcast at ReadsSparkSink.java:133
14:21:39.400 INFO  MemoryStore - Block broadcast_1396 stored as values in memory (estimated size 20.4 KiB, free 1897.8 MiB)
14:21:39.400 INFO  MemoryStore - Block broadcast_1396_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1897.8 MiB)
14:21:39.400 INFO  BlockManagerInfo - Added broadcast_1396_piece0 in memory on localhost:39377 (size: 1850.0 B, free: 1917.7 MiB)
14:21:39.400 INFO  SparkContext - Created broadcast 1396 from broadcast at BamSink.java:76
14:21:39.402 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.402 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.402 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.419 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
14:21:39.420 INFO  DAGScheduler - Registering RDD 3550 (mapToPair at SparkUtils.java:161) as input to shuffle 109
14:21:39.420 INFO  DAGScheduler - Got job 428 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
14:21:39.420 INFO  DAGScheduler - Final stage: ResultStage 757 (runJob at SparkHadoopWriter.scala:83)
14:21:39.420 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 756)
14:21:39.420 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 756)
14:21:39.421 INFO  DAGScheduler - Submitting ShuffleMapStage 756 (MapPartitionsRDD[3550] at mapToPair at SparkUtils.java:161), which has no missing parents
14:21:39.421 INFO  MemoryStore - Block broadcast_1397 stored as values in memory (estimated size 18.1 KiB, free 1897.8 MiB)
14:21:39.422 INFO  MemoryStore - Block broadcast_1397_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1897.8 MiB)
14:21:39.422 INFO  BlockManagerInfo - Added broadcast_1397_piece0 in memory on localhost:39377 (size: 9.5 KiB, free: 1917.7 MiB)
14:21:39.422 INFO  SparkContext - Created broadcast 1397 from broadcast at DAGScheduler.scala:1580
14:21:39.423 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 756 (MapPartitionsRDD[3550] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:21:39.423 INFO  TaskSchedulerImpl - Adding task set 756.0 with 1 tasks resource profile 0
14:21:39.423 INFO  TaskSetManager - Starting task 0.0 in stage 756.0 (TID 690) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:21:39.424 INFO  Executor - Running task 0.0 in stage 756.0 (TID 690)
14:21:39.426 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.426 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.428 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.428 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.429 INFO  Executor - Finished task 0.0 in stage 756.0 (TID 690). 1922 bytes result sent to driver
14:21:39.430 INFO  TaskSetManager - Finished task 0.0 in stage 756.0 (TID 690) in 7 ms on localhost (executor driver) (1/1)
14:21:39.430 INFO  TaskSchedulerImpl - Removed TaskSet 756.0, whose tasks have all completed, from pool 
14:21:39.430 INFO  DAGScheduler - ShuffleMapStage 756 (mapToPair at SparkUtils.java:161) finished in 0.009 s
14:21:39.430 INFO  DAGScheduler - looking for newly runnable stages
14:21:39.430 INFO  DAGScheduler - running: HashSet()
14:21:39.430 INFO  DAGScheduler - waiting: HashSet(ResultStage 757)
14:21:39.430 INFO  DAGScheduler - failed: HashSet()
14:21:39.431 INFO  DAGScheduler - Submitting ResultStage 757 (MapPartitionsRDD[3555] at mapToPair at BamSink.java:91), which has no missing parents
14:21:39.437 INFO  MemoryStore - Block broadcast_1398 stored as values in memory (estimated size 163.7 KiB, free 1897.6 MiB)
14:21:39.438 INFO  MemoryStore - Block broadcast_1398_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1897.5 MiB)
14:21:39.438 INFO  BlockManagerInfo - Added broadcast_1398_piece0 in memory on localhost:39377 (size: 73.9 KiB, free: 1917.6 MiB)
14:21:39.439 INFO  SparkContext - Created broadcast 1398 from broadcast at DAGScheduler.scala:1580
14:21:39.439 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 757 (MapPartitionsRDD[3555] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
14:21:39.439 INFO  TaskSchedulerImpl - Adding task set 757.0 with 1 tasks resource profile 0
14:21:39.439 INFO  TaskSetManager - Starting task 0.0 in stage 757.0 (TID 691) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:21:39.440 INFO  Executor - Running task 0.0 in stage 757.0 (TID 691)
14:21:39.443 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:21:39.443 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:21:39.446 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.446 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.446 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.446 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
14:21:39.446 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:21:39.446 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:21:39.466 INFO  FileOutputCommitter - Saved output of task 'attempt_202506021421398630812061104446954_3555_r_000000_0' to file:/tmp/local111906758591768419528/markdups1685090483179678625.bam.parts/_temporary/0/task_202506021421398630812061104446954_3555_r_000000
14:21:39.466 INFO  SparkHadoopMapRedUtil - attempt_202506021421398630812061104446954_3555_r_000000_0: Committed. Elapsed time: 0 ms.
14:21:39.467 INFO  Executor - Finished task 0.0 in stage 757.0 (TID 691). 1858 bytes result sent to driver
14:21:39.467 INFO  TaskSetManager - Finished task 0.0 in stage 757.0 (TID 691) in 28 ms on localhost (executor driver) (1/1)
14:21:39.467 INFO  TaskSchedulerImpl - Removed TaskSet 757.0, whose tasks have all completed, from pool 
14:21:39.468 INFO  DAGScheduler - ResultStage 757 (runJob at SparkHadoopWriter.scala:83) finished in 0.037 s
14:21:39.468 INFO  DAGScheduler - Job 428 is finished. Cancelling potential speculative or zombie tasks for this job
14:21:39.468 INFO  TaskSchedulerImpl - Killing all running tasks in stage 757: Stage finished
14:21:39.468 INFO  DAGScheduler - Job 428 finished: runJob at SparkHadoopWriter.scala:83, took 0.048279 s
14:21:39.468 INFO  SparkHadoopWriter - Start to commit write Job job_202506021421398630812061104446954_3555.
14:21:39.473 INFO  SparkHadoopWriter - Write Job job_202506021421398630812061104446954_3555 committed. Elapsed time: 4 ms.
14:21:39.484 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local111906758591768419528/markdups1685090483179678625.bam
14:21:39.488 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local111906758591768419528/markdups1685090483179678625.bam done
14:21:39.488 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local111906758591768419528/markdups1685090483179678625.bam.parts/ to /tmp/local111906758591768419528/markdups1685090483179678625.bam.sbi
14:21:39.493 INFO  IndexFileMerger - Done merging .sbi files
14:21:39.493 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local111906758591768419528/markdups1685090483179678625.bam.parts/ to /tmp/local111906758591768419528/markdups1685090483179678625.bam.bai
14:21:39.497 INFO  IndexFileMerger - Done merging .bai files
14:21:39.497 INFO  MarkDuplicatesSpark - Shutting down engine
[June 2, 2025 at 2:21:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:39 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:49 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:21:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:22:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:22:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:22:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=2273312768
[June 2, 2025 at 2:22:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
14:22:04.606 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.606 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
14:22:04.606 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
14:22:04.606 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
14:22:04.606 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
14:22:04.606 INFO  MarkDuplicatesSpark - Start Date/Time: June 2, 2025 at 2:22:04 PM UTC
14:22:04.606 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.606 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.606 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
14:22:04.606 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
14:22:04.606 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
14:22:04.606 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
14:22:04.606 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
14:22:04.606 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
14:22:04.606 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
14:22:04.606 INFO  MarkDuplicatesSpark - Requester pays: disabled
14:22:04.606 INFO  MarkDuplicatesSpark - Initializing engine
14:22:04.606 INFO  MarkDuplicatesSpark - Done initializing engine
14:22:04.607 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
14:22:04.609 INFO  MemoryStore - Block broadcast_1975 stored as values in memory (estimated size 306.3 KiB, free 1914.1 MiB)
14:22:04.615 INFO  MemoryStore - Block broadcast_1975_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1914.0 MiB)
14:22:04.616 INFO  BlockManagerInfo - Added broadcast_1975_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.7 MiB)
14:22:04.616 INFO  SparkContext - Created broadcast 1975 from newAPIHadoopFile at PathSplitSource.java:96
14:22:04.636 INFO  MemoryStore - Block broadcast_1976 stored as values in memory (estimated size 306.3 KiB, free 1913.7 MiB)
14:22:04.643 INFO  MemoryStore - Block broadcast_1976_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1913.6 MiB)
14:22:04.643 INFO  BlockManagerInfo - Added broadcast_1976_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.6 MiB)
14:22:04.643 INFO  SparkContext - Created broadcast 1976 from newAPIHadoopFile at PathSplitSource.java:96
14:22:04.676 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
14:22:04.677 INFO  FileInputFormat - Total input files to process : 1
14:22:04.677 INFO  DAGScheduler - Registering RDD 5447 (mapToPair at SparkUtils.java:161) as input to shuffle 300
14:22:04.677 INFO  DAGScheduler - Got job 559 (collect at SparkUtils.java:205) with 1 output partitions
14:22:04.677 INFO  DAGScheduler - Final stage: ResultStage 1219 (collect at SparkUtils.java:205)
14:22:04.678 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1218)
14:22:04.678 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1218)
14:22:04.678 INFO  DAGScheduler - Submitting ShuffleMapStage 1218 (MapPartitionsRDD[5447] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:04.696 INFO  MemoryStore - Block broadcast_1977 stored as values in memory (estimated size 493.8 KiB, free 1913.2 MiB)
14:22:04.699 INFO  MemoryStore - Block broadcast_1977_piece0 stored as bytes in memory (estimated size 210.1 KiB, free 1913.0 MiB)
14:22:04.699 INFO  BlockManagerInfo - Added broadcast_1977_piece0 in memory on localhost:39377 (size: 210.1 KiB, free: 1918.4 MiB)
14:22:04.699 INFO  SparkContext - Created broadcast 1977 from broadcast at DAGScheduler.scala:1580
14:22:04.699 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1218 (MapPartitionsRDD[5447] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:04.699 INFO  TaskSchedulerImpl - Adding task set 1218.0 with 1 tasks resource profile 0
14:22:04.700 INFO  TaskSetManager - Starting task 0.0 in stage 1218.0 (TID 1062) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9758 bytes) 
14:22:04.700 INFO  Executor - Running task 0.0 in stage 1218.0 (TID 1062)
14:22:04.730 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam:0+13412
14:22:04.735 INFO  Executor - Finished task 0.0 in stage 1218.0 (TID 1062). 1148 bytes result sent to driver
14:22:04.736 INFO  TaskSetManager - Finished task 0.0 in stage 1218.0 (TID 1062) in 36 ms on localhost (executor driver) (1/1)
14:22:04.736 INFO  TaskSchedulerImpl - Removed TaskSet 1218.0, whose tasks have all completed, from pool 
14:22:04.736 INFO  DAGScheduler - ShuffleMapStage 1218 (mapToPair at SparkUtils.java:161) finished in 0.058 s
14:22:04.736 INFO  DAGScheduler - looking for newly runnable stages
14:22:04.736 INFO  DAGScheduler - running: HashSet()
14:22:04.736 INFO  DAGScheduler - waiting: HashSet(ResultStage 1219)
14:22:04.736 INFO  DAGScheduler - failed: HashSet()
14:22:04.736 INFO  DAGScheduler - Submitting ResultStage 1219 (MapPartitionsRDD[5450] at mapPartitions at SparkUtils.java:188), which has no missing parents
14:22:04.737 INFO  MemoryStore - Block broadcast_1978 stored as values in memory (estimated size 7.4 KiB, free 1912.9 MiB)
14:22:04.737 INFO  MemoryStore - Block broadcast_1978_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1912.9 MiB)
14:22:04.737 INFO  BlockManagerInfo - Added broadcast_1978_piece0 in memory on localhost:39377 (size: 4.1 KiB, free: 1918.4 MiB)
14:22:04.738 INFO  SparkContext - Created broadcast 1978 from broadcast at DAGScheduler.scala:1580
14:22:04.738 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1219 (MapPartitionsRDD[5450] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
14:22:04.738 INFO  TaskSchedulerImpl - Adding task set 1219.0 with 1 tasks resource profile 0
14:22:04.738 INFO  TaskSetManager - Starting task 0.0 in stage 1219.0 (TID 1063) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:04.738 INFO  Executor - Running task 0.0 in stage 1219.0 (TID 1063)
14:22:04.739 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.739 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.742 INFO  Executor - Finished task 0.0 in stage 1219.0 (TID 1063). 2329 bytes result sent to driver
14:22:04.742 INFO  TaskSetManager - Finished task 0.0 in stage 1219.0 (TID 1063) in 4 ms on localhost (executor driver) (1/1)
14:22:04.742 INFO  TaskSchedulerImpl - Removed TaskSet 1219.0, whose tasks have all completed, from pool 
14:22:04.742 INFO  DAGScheduler - ResultStage 1219 (collect at SparkUtils.java:205) finished in 0.006 s
14:22:04.742 INFO  DAGScheduler - Job 559 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:04.743 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1219: Stage finished
14:22:04.743 INFO  DAGScheduler - Job 559 finished: collect at SparkUtils.java:205, took 0.066135 s
14:22:04.747 INFO  MemoryStore - Block broadcast_1979 stored as values in memory (estimated size 7.8 KiB, free 1912.9 MiB)
14:22:04.747 INFO  MemoryStore - Block broadcast_1979_piece0 stored as bytes in memory (estimated size 540.0 B, free 1912.9 MiB)
14:22:04.747 INFO  BlockManagerInfo - Added broadcast_1979_piece0 in memory on localhost:39377 (size: 540.0 B, free: 1918.4 MiB)
14:22:04.748 INFO  SparkContext - Created broadcast 1979 from broadcast at MarkDuplicatesSparkUtils.java:126
14:22:04.748 INFO  MemoryStore - Block broadcast_1980 stored as values in memory (estimated size 2.6 KiB, free 1912.9 MiB)
14:22:04.748 INFO  MemoryStore - Block broadcast_1980_piece0 stored as bytes in memory (estimated size 209.0 B, free 1912.9 MiB)
14:22:04.748 INFO  BlockManagerInfo - Added broadcast_1980_piece0 in memory on localhost:39377 (size: 209.0 B, free: 1918.4 MiB)
14:22:04.748 INFO  SparkContext - Created broadcast 1980 from broadcast at MarkDuplicatesSparkUtils.java:127
14:22:04.762 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
14:22:04.762 INFO  DAGScheduler - Registering RDD 5459 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 303
14:22:04.762 INFO  DAGScheduler - Registering RDD 5463 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 302
14:22:04.763 INFO  DAGScheduler - Registering RDD 5467 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 301
14:22:04.763 INFO  DAGScheduler - Got job 560 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
14:22:04.763 INFO  DAGScheduler - Final stage: ResultStage 1224 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
14:22:04.763 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1223)
14:22:04.763 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1223)
14:22:04.763 INFO  DAGScheduler - Submitting ShuffleMapStage 1221 (MapPartitionsRDD[5459] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
14:22:04.764 INFO  MemoryStore - Block broadcast_1981 stored as values in memory (estimated size 57.6 KiB, free 1912.9 MiB)
14:22:04.764 INFO  MemoryStore - Block broadcast_1981_piece0 stored as bytes in memory (estimated size 19.8 KiB, free 1912.9 MiB)
14:22:04.765 INFO  BlockManagerInfo - Added broadcast_1981_piece0 in memory on localhost:39377 (size: 19.8 KiB, free: 1918.4 MiB)
14:22:04.765 INFO  SparkContext - Created broadcast 1981 from broadcast at DAGScheduler.scala:1580
14:22:04.765 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1221 (MapPartitionsRDD[5459] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
14:22:04.765 INFO  TaskSchedulerImpl - Adding task set 1221.0 with 1 tasks resource profile 0
14:22:04.765 INFO  TaskSetManager - Starting task 0.0 in stage 1221.0 (TID 1064) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
14:22:04.766 INFO  Executor - Running task 0.0 in stage 1221.0 (TID 1064)
14:22:04.767 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.767 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.771 INFO  Executor - Finished task 0.0 in stage 1221.0 (TID 1064). 1922 bytes result sent to driver
14:22:04.771 INFO  TaskSetManager - Finished task 0.0 in stage 1221.0 (TID 1064) in 6 ms on localhost (executor driver) (1/1)
14:22:04.771 INFO  TaskSchedulerImpl - Removed TaskSet 1221.0, whose tasks have all completed, from pool 
14:22:04.772 INFO  DAGScheduler - ShuffleMapStage 1221 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.009 s
14:22:04.772 INFO  DAGScheduler - looking for newly runnable stages
14:22:04.772 INFO  DAGScheduler - running: HashSet()
14:22:04.772 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1222, ShuffleMapStage 1223, ResultStage 1224)
14:22:04.772 INFO  DAGScheduler - failed: HashSet()
14:22:04.772 INFO  DAGScheduler - Submitting ShuffleMapStage 1222 (MapPartitionsRDD[5463] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
14:22:04.774 INFO  MemoryStore - Block broadcast_1982 stored as values in memory (estimated size 61.7 KiB, free 1912.8 MiB)
14:22:04.774 INFO  MemoryStore - Block broadcast_1982_piece0 stored as bytes in memory (estimated size 22.0 KiB, free 1912.8 MiB)
14:22:04.774 INFO  BlockManagerInfo - Added broadcast_1982_piece0 in memory on localhost:39377 (size: 22.0 KiB, free: 1918.3 MiB)
14:22:04.775 INFO  SparkContext - Created broadcast 1982 from broadcast at DAGScheduler.scala:1580
14:22:04.775 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1222 (MapPartitionsRDD[5463] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
14:22:04.775 INFO  TaskSchedulerImpl - Adding task set 1222.0 with 1 tasks resource profile 0
14:22:04.775 INFO  TaskSetManager - Starting task 0.0 in stage 1222.0 (TID 1065) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
14:22:04.775 INFO  Executor - Running task 0.0 in stage 1222.0 (TID 1065)
14:22:04.777 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.0 KiB) non-empty blocks including 1 (2.0 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.777 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.783 INFO  Executor - Finished task 0.0 in stage 1222.0 (TID 1065). 1922 bytes result sent to driver
14:22:04.783 INFO  TaskSetManager - Finished task 0.0 in stage 1222.0 (TID 1065) in 8 ms on localhost (executor driver) (1/1)
14:22:04.783 INFO  TaskSchedulerImpl - Removed TaskSet 1222.0, whose tasks have all completed, from pool 
14:22:04.783 INFO  DAGScheduler - ShuffleMapStage 1222 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.011 s
14:22:04.784 INFO  DAGScheduler - looking for newly runnable stages
14:22:04.784 INFO  DAGScheduler - running: HashSet()
14:22:04.784 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1223, ResultStage 1224)
14:22:04.784 INFO  DAGScheduler - failed: HashSet()
14:22:04.784 INFO  DAGScheduler - Submitting ShuffleMapStage 1223 (MapPartitionsRDD[5467] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
14:22:04.785 INFO  MemoryStore - Block broadcast_1983 stored as values in memory (estimated size 52.4 KiB, free 1912.7 MiB)
14:22:04.785 INFO  MemoryStore - Block broadcast_1983_piece0 stored as bytes in memory (estimated size 17.3 KiB, free 1912.7 MiB)
14:22:04.785 INFO  BlockManagerInfo - Added broadcast_1983_piece0 in memory on localhost:39377 (size: 17.3 KiB, free: 1918.3 MiB)
14:22:04.786 INFO  SparkContext - Created broadcast 1983 from broadcast at DAGScheduler.scala:1580
14:22:04.786 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1223 (MapPartitionsRDD[5467] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
14:22:04.786 INFO  TaskSchedulerImpl - Adding task set 1223.0 with 1 tasks resource profile 0
14:22:04.786 INFO  TaskSetManager - Starting task 0.0 in stage 1223.0 (TID 1066) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:04.786 INFO  Executor - Running task 0.0 in stage 1223.0 (TID 1066)
14:22:04.788 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.788 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.791 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.791 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.792 INFO  Executor - Finished task 0.0 in stage 1223.0 (TID 1066). 1922 bytes result sent to driver
14:22:04.792 INFO  TaskSetManager - Finished task 0.0 in stage 1223.0 (TID 1066) in 6 ms on localhost (executor driver) (1/1)
14:22:04.792 INFO  TaskSchedulerImpl - Removed TaskSet 1223.0, whose tasks have all completed, from pool 
14:22:04.793 INFO  DAGScheduler - ShuffleMapStage 1223 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.008 s
14:22:04.793 INFO  DAGScheduler - looking for newly runnable stages
14:22:04.793 INFO  DAGScheduler - running: HashSet()
14:22:04.793 INFO  DAGScheduler - waiting: HashSet(ResultStage 1224)
14:22:04.793 INFO  DAGScheduler - failed: HashSet()
14:22:04.793 INFO  DAGScheduler - Submitting ResultStage 1224 (MapPartitionsRDD[5469] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
14:22:04.794 INFO  MemoryStore - Block broadcast_1984 stored as values in memory (estimated size 53.6 KiB, free 1912.7 MiB)
14:22:04.795 INFO  MemoryStore - Block broadcast_1984_piece0 stored as bytes in memory (estimated size 18.0 KiB, free 1912.6 MiB)
14:22:04.795 INFO  BlockManagerInfo - Added broadcast_1984_piece0 in memory on localhost:39377 (size: 18.0 KiB, free: 1918.3 MiB)
14:22:04.795 INFO  SparkContext - Created broadcast 1984 from broadcast at DAGScheduler.scala:1580
14:22:04.795 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1224 (MapPartitionsRDD[5469] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
14:22:04.795 INFO  TaskSchedulerImpl - Adding task set 1224.0 with 1 tasks resource profile 0
14:22:04.796 INFO  TaskSetManager - Starting task 0.0 in stage 1224.0 (TID 1067) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:04.796 INFO  Executor - Running task 0.0 in stage 1224.0 (TID 1067)
14:22:04.797 INFO  ShuffleBlockFetcherIterator - Getting 1 (251.0 B) non-empty blocks including 1 (251.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.797 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.799 INFO  Executor - Finished task 0.0 in stage 1224.0 (TID 1067). 2371 bytes result sent to driver
14:22:04.799 INFO  TaskSetManager - Finished task 0.0 in stage 1224.0 (TID 1067) in 4 ms on localhost (executor driver) (1/1)
14:22:04.799 INFO  TaskSchedulerImpl - Removed TaskSet 1224.0, whose tasks have all completed, from pool 
14:22:04.799 INFO  DAGScheduler - ResultStage 1224 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
14:22:04.799 INFO  DAGScheduler - Job 560 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:04.799 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1224: Stage finished
14:22:04.799 INFO  DAGScheduler - Job 560 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.037376 s
14:22:04.811 INFO  MemoryStore - Block broadcast_1985 stored as values in memory (estimated size 179.4 KiB, free 1912.5 MiB)
14:22:04.812 INFO  MemoryStore - Block broadcast_1985_piece0 stored as bytes in memory (estimated size 7.9 KiB, free 1912.5 MiB)
14:22:04.812 INFO  BlockManagerInfo - Added broadcast_1985_piece0 in memory on localhost:39377 (size: 7.9 KiB, free: 1918.3 MiB)
14:22:04.812 INFO  SparkContext - Created broadcast 1985 from broadcast at ReadsSparkSink.java:133
14:22:04.815 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:04.815 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:04.815 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:04.831 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
14:22:04.832 INFO  DAGScheduler - Registering RDD 5471 (mapToPair at SparkUtils.java:161) as input to shuffle 304
14:22:04.832 INFO  DAGScheduler - Got job 561 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
14:22:04.832 INFO  DAGScheduler - Final stage: ResultStage 1229 (runJob at SparkHadoopWriter.scala:83)
14:22:04.832 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1228)
14:22:04.832 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1228)
14:22:04.832 INFO  DAGScheduler - Submitting ShuffleMapStage 1228 (MapPartitionsRDD[5471] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:04.833 INFO  MemoryStore - Block broadcast_1986 stored as values in memory (estimated size 51.5 KiB, free 1912.4 MiB)
14:22:04.834 INFO  MemoryStore - Block broadcast_1986_piece0 stored as bytes in memory (estimated size 16.8 KiB, free 1912.4 MiB)
14:22:04.834 INFO  BlockManagerInfo - Added broadcast_1986_piece0 in memory on localhost:39377 (size: 16.8 KiB, free: 1918.3 MiB)
14:22:04.834 INFO  SparkContext - Created broadcast 1986 from broadcast at DAGScheduler.scala:1580
14:22:04.834 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1228 (MapPartitionsRDD[5471] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:04.834 INFO  TaskSchedulerImpl - Adding task set 1228.0 with 1 tasks resource profile 0
14:22:04.835 INFO  TaskSetManager - Starting task 0.0 in stage 1228.0 (TID 1068) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:04.835 INFO  Executor - Running task 0.0 in stage 1228.0 (TID 1068)
14:22:04.837 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.837 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.841 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.841 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.843 INFO  Executor - Finished task 0.0 in stage 1228.0 (TID 1068). 1922 bytes result sent to driver
14:22:04.843 INFO  TaskSetManager - Finished task 0.0 in stage 1228.0 (TID 1068) in 8 ms on localhost (executor driver) (1/1)
14:22:04.843 INFO  TaskSchedulerImpl - Removed TaskSet 1228.0, whose tasks have all completed, from pool 
14:22:04.843 INFO  DAGScheduler - ShuffleMapStage 1228 (mapToPair at SparkUtils.java:161) finished in 0.010 s
14:22:04.843 INFO  DAGScheduler - looking for newly runnable stages
14:22:04.843 INFO  DAGScheduler - running: HashSet()
14:22:04.843 INFO  DAGScheduler - waiting: HashSet(ResultStage 1229)
14:22:04.843 INFO  DAGScheduler - failed: HashSet()
14:22:04.843 INFO  DAGScheduler - Submitting ResultStage 1229 (MapPartitionsRDD[5477] at saveAsTextFile at SamSink.java:65), which has no missing parents
14:22:04.850 INFO  MemoryStore - Block broadcast_1987 stored as values in memory (estimated size 196.7 KiB, free 1912.2 MiB)
14:22:04.851 INFO  MemoryStore - Block broadcast_1987_piece0 stored as bytes in memory (estimated size 80.9 KiB, free 1912.1 MiB)
14:22:04.851 INFO  BlockManagerInfo - Added broadcast_1987_piece0 in memory on localhost:39377 (size: 80.9 KiB, free: 1918.2 MiB)
14:22:04.851 INFO  SparkContext - Created broadcast 1987 from broadcast at DAGScheduler.scala:1580
14:22:04.852 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1229 (MapPartitionsRDD[5477] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
14:22:04.852 INFO  TaskSchedulerImpl - Adding task set 1229.0 with 1 tasks resource profile 0
14:22:04.852 INFO  TaskSetManager - Starting task 0.0 in stage 1229.0 (TID 1069) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:04.852 INFO  Executor - Running task 0.0 in stage 1229.0 (TID 1069)
14:22:04.856 INFO  ShuffleBlockFetcherIterator - Getting 1 (14.8 KiB) non-empty blocks including 1 (14.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:04.856 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:04.859 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:04.859 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:04.859 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:04.871 INFO  FileOutputCommitter - Saved output of task 'attempt_20250602142204673264414251009065_5477_m_000000_0' to file:/tmp/MarkDups2062205494209103614/MarkDups.sam.parts/_temporary/0/task_20250602142204673264414251009065_5477_m_000000
14:22:04.871 INFO  SparkHadoopMapRedUtil - attempt_20250602142204673264414251009065_5477_m_000000_0: Committed. Elapsed time: 0 ms.
14:22:04.872 INFO  Executor - Finished task 0.0 in stage 1229.0 (TID 1069). 1858 bytes result sent to driver
14:22:04.872 INFO  TaskSetManager - Finished task 0.0 in stage 1229.0 (TID 1069) in 20 ms on localhost (executor driver) (1/1)
14:22:04.872 INFO  TaskSchedulerImpl - Removed TaskSet 1229.0, whose tasks have all completed, from pool 
14:22:04.872 INFO  DAGScheduler - ResultStage 1229 (runJob at SparkHadoopWriter.scala:83) finished in 0.028 s
14:22:04.872 INFO  DAGScheduler - Job 561 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:04.872 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1229: Stage finished
14:22:04.872 INFO  DAGScheduler - Job 561 finished: runJob at SparkHadoopWriter.scala:83, took 0.040740 s
14:22:04.872 INFO  SparkHadoopWriter - Start to commit write Job job_20250602142204673264414251009065_5477.
14:22:04.877 INFO  SparkHadoopWriter - Write Job job_20250602142204673264414251009065_5477 committed. Elapsed time: 4 ms.
14:22:04.884 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups2062205494209103614/MarkDups.sam
14:22:04.888 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups2062205494209103614/MarkDups.sam done
14:22:04.888 INFO  MarkDuplicatesSpark - Shutting down engine
[June 2, 2025 at 2:22:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
14:22:04.894 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.894 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
14:22:04.894 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
14:22:04.894 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
14:22:04.894 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
14:22:04.894 INFO  MarkDuplicatesSpark - Start Date/Time: June 2, 2025 at 2:22:04 PM UTC
14:22:04.894 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.894 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:04.894 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
14:22:04.894 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
14:22:04.894 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
14:22:04.894 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
14:22:04.894 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
14:22:04.895 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
14:22:04.895 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
14:22:04.895 INFO  MarkDuplicatesSpark - Requester pays: disabled
14:22:04.895 INFO  MarkDuplicatesSpark - Initializing engine
14:22:04.895 INFO  MarkDuplicatesSpark - Done initializing engine
14:22:04.895 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
14:22:04.897 INFO  MemoryStore - Block broadcast_1988 stored as values in memory (estimated size 306.3 KiB, free 1911.8 MiB)
14:22:04.905 INFO  MemoryStore - Block broadcast_1988_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1911.8 MiB)
14:22:04.905 INFO  BlockManagerInfo - Added broadcast_1988_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.1 MiB)
14:22:04.906 INFO  SparkContext - Created broadcast 1988 from newAPIHadoopFile at PathSplitSource.java:96
14:22:04.925 INFO  MemoryStore - Block broadcast_1989 stored as values in memory (estimated size 306.3 KiB, free 1911.5 MiB)
14:22:04.932 INFO  MemoryStore - Block broadcast_1989_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1911.4 MiB)
14:22:04.932 INFO  BlockManagerInfo - Added broadcast_1989_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1918.1 MiB)
14:22:04.932 INFO  SparkContext - Created broadcast 1989 from newAPIHadoopFile at PathSplitSource.java:96
14:22:04.964 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
14:22:04.964 INFO  FileInputFormat - Total input files to process : 1
14:22:04.965 INFO  DAGScheduler - Registering RDD 5491 (mapToPair at SparkUtils.java:161) as input to shuffle 305
14:22:04.965 INFO  DAGScheduler - Got job 562 (collect at SparkUtils.java:205) with 1 output partitions
14:22:04.965 INFO  DAGScheduler - Final stage: ResultStage 1231 (collect at SparkUtils.java:205)
14:22:04.965 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1230)
14:22:04.965 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1230)
14:22:04.965 INFO  DAGScheduler - Submitting ShuffleMapStage 1230 (MapPartitionsRDD[5491] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:04.982 INFO  MemoryStore - Block broadcast_1990 stored as values in memory (estimated size 457.4 KiB, free 1910.9 MiB)
14:22:04.984 INFO  MemoryStore - Block broadcast_1990_piece0 stored as bytes in memory (estimated size 201.1 KiB, free 1910.8 MiB)
14:22:04.985 INFO  BlockManagerInfo - Added broadcast_1990_piece0 in memory on localhost:39377 (size: 201.1 KiB, free: 1917.9 MiB)
14:22:04.985 INFO  SparkContext - Created broadcast 1990 from broadcast at DAGScheduler.scala:1580
14:22:04.985 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1230 (MapPartitionsRDD[5491] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:04.985 INFO  TaskSchedulerImpl - Adding task set 1230.0 with 1 tasks resource profile 0
14:22:04.985 INFO  TaskSetManager - Starting task 0.0 in stage 1230.0 (TID 1070) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9738 bytes) 
14:22:04.985 INFO  Executor - Running task 0.0 in stage 1230.0 (TID 1070)
14:22:05.015 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam:0+678
14:22:05.019 INFO  Executor - Finished task 0.0 in stage 1230.0 (TID 1070). 1148 bytes result sent to driver
14:22:05.019 INFO  TaskSetManager - Finished task 0.0 in stage 1230.0 (TID 1070) in 34 ms on localhost (executor driver) (1/1)
14:22:05.019 INFO  TaskSchedulerImpl - Removed TaskSet 1230.0, whose tasks have all completed, from pool 
14:22:05.019 INFO  DAGScheduler - ShuffleMapStage 1230 (mapToPair at SparkUtils.java:161) finished in 0.054 s
14:22:05.019 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.019 INFO  DAGScheduler - running: HashSet()
14:22:05.019 INFO  DAGScheduler - waiting: HashSet(ResultStage 1231)
14:22:05.019 INFO  DAGScheduler - failed: HashSet()
14:22:05.019 INFO  DAGScheduler - Submitting ResultStage 1231 (MapPartitionsRDD[5494] at mapPartitions at SparkUtils.java:188), which has no missing parents
14:22:05.020 INFO  MemoryStore - Block broadcast_1991 stored as values in memory (estimated size 7.4 KiB, free 1910.7 MiB)
14:22:05.020 INFO  MemoryStore - Block broadcast_1991_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1910.7 MiB)
14:22:05.020 INFO  BlockManagerInfo - Added broadcast_1991_piece0 in memory on localhost:39377 (size: 4.1 KiB, free: 1917.9 MiB)
14:22:05.021 INFO  SparkContext - Created broadcast 1991 from broadcast at DAGScheduler.scala:1580
14:22:05.021 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1231 (MapPartitionsRDD[5494] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
14:22:05.021 INFO  TaskSchedulerImpl - Adding task set 1231.0 with 1 tasks resource profile 0
14:22:05.021 INFO  TaskSetManager - Starting task 0.0 in stage 1231.0 (TID 1071) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.021 INFO  Executor - Running task 0.0 in stage 1231.0 (TID 1071)
14:22:05.022 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.022 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.024 INFO  Executor - Finished task 0.0 in stage 1231.0 (TID 1071). 2269 bytes result sent to driver
14:22:05.024 INFO  TaskSetManager - Finished task 0.0 in stage 1231.0 (TID 1071) in 3 ms on localhost (executor driver) (1/1)
14:22:05.024 INFO  TaskSchedulerImpl - Removed TaskSet 1231.0, whose tasks have all completed, from pool 
14:22:05.024 INFO  DAGScheduler - ResultStage 1231 (collect at SparkUtils.java:205) finished in 0.004 s
14:22:05.024 INFO  DAGScheduler - Job 562 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.024 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1231: Stage finished
14:22:05.024 INFO  DAGScheduler - Job 562 finished: collect at SparkUtils.java:205, took 0.060470 s
14:22:05.028 INFO  MemoryStore - Block broadcast_1992 stored as values in memory (estimated size 136.0 B, free 1910.7 MiB)
14:22:05.029 INFO  MemoryStore - Block broadcast_1992_piece0 stored as bytes in memory (estimated size 24.0 B, free 1910.7 MiB)
14:22:05.029 INFO  BlockManagerInfo - Added broadcast_1992_piece0 in memory on localhost:39377 (size: 24.0 B, free: 1917.9 MiB)
14:22:05.029 INFO  SparkContext - Created broadcast 1992 from broadcast at MarkDuplicatesSparkUtils.java:126
14:22:05.029 INFO  MemoryStore - Block broadcast_1993 stored as values in memory (estimated size 136.0 B, free 1910.7 MiB)
14:22:05.030 INFO  MemoryStore - Block broadcast_1993_piece0 stored as bytes in memory (estimated size 21.0 B, free 1910.7 MiB)
14:22:05.030 INFO  BlockManagerInfo - Added broadcast_1993_piece0 in memory on localhost:39377 (size: 21.0 B, free: 1917.9 MiB)
14:22:05.030 INFO  SparkContext - Created broadcast 1993 from broadcast at MarkDuplicatesSparkUtils.java:127
14:22:05.041 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
14:22:05.041 INFO  DAGScheduler - Registering RDD 5503 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 308
14:22:05.042 INFO  DAGScheduler - Registering RDD 5507 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 307
14:22:05.042 INFO  DAGScheduler - Registering RDD 5511 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 306
14:22:05.042 INFO  DAGScheduler - Got job 563 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
14:22:05.042 INFO  DAGScheduler - Final stage: ResultStage 1236 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
14:22:05.042 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1235)
14:22:05.042 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1235)
14:22:05.042 INFO  DAGScheduler - Submitting ShuffleMapStage 1233 (MapPartitionsRDD[5503] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
14:22:05.043 INFO  MemoryStore - Block broadcast_1994 stored as values in memory (estimated size 21.1 KiB, free 1910.7 MiB)
14:22:05.043 INFO  MemoryStore - Block broadcast_1994_piece0 stored as bytes in memory (estimated size 10.9 KiB, free 1910.7 MiB)
14:22:05.043 INFO  BlockManagerInfo - Added broadcast_1994_piece0 in memory on localhost:39377 (size: 10.9 KiB, free: 1917.9 MiB)
14:22:05.043 INFO  SparkContext - Created broadcast 1994 from broadcast at DAGScheduler.scala:1580
14:22:05.044 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1233 (MapPartitionsRDD[5503] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
14:22:05.044 INFO  TaskSchedulerImpl - Adding task set 1233.0 with 1 tasks resource profile 0
14:22:05.044 INFO  TaskSetManager - Starting task 0.0 in stage 1233.0 (TID 1072) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
14:22:05.044 INFO  Executor - Running task 0.0 in stage 1233.0 (TID 1072)
14:22:05.046 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.046 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.048 INFO  Executor - Finished task 0.0 in stage 1233.0 (TID 1072). 1922 bytes result sent to driver
14:22:05.048 INFO  TaskSetManager - Finished task 0.0 in stage 1233.0 (TID 1072) in 4 ms on localhost (executor driver) (1/1)
14:22:05.048 INFO  TaskSchedulerImpl - Removed TaskSet 1233.0, whose tasks have all completed, from pool 
14:22:05.048 INFO  DAGScheduler - ShuffleMapStage 1233 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
14:22:05.048 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.048 INFO  DAGScheduler - running: HashSet()
14:22:05.048 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1234, ShuffleMapStage 1235, ResultStage 1236)
14:22:05.049 INFO  DAGScheduler - failed: HashSet()
14:22:05.049 INFO  DAGScheduler - Submitting ShuffleMapStage 1234 (MapPartitionsRDD[5507] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
14:22:05.049 INFO  MemoryStore - Block broadcast_1995 stored as values in memory (estimated size 25.2 KiB, free 1910.7 MiB)
14:22:05.050 INFO  MemoryStore - Block broadcast_1995_piece0 stored as bytes in memory (estimated size 13.1 KiB, free 1910.7 MiB)
14:22:05.050 INFO  BlockManagerInfo - Added broadcast_1995_piece0 in memory on localhost:39377 (size: 13.1 KiB, free: 1917.9 MiB)
14:22:05.050 INFO  SparkContext - Created broadcast 1995 from broadcast at DAGScheduler.scala:1580
14:22:05.050 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1234 (MapPartitionsRDD[5507] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
14:22:05.050 INFO  TaskSchedulerImpl - Adding task set 1234.0 with 1 tasks resource profile 0
14:22:05.050 INFO  TaskSetManager - Starting task 0.0 in stage 1234.0 (TID 1073) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
14:22:05.051 INFO  Executor - Running task 0.0 in stage 1234.0 (TID 1073)
14:22:05.052 INFO  ShuffleBlockFetcherIterator - Getting 1 (106.0 B) non-empty blocks including 1 (106.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.052 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.055 INFO  Executor - Finished task 0.0 in stage 1234.0 (TID 1073). 1922 bytes result sent to driver
14:22:05.056 INFO  TaskSetManager - Finished task 0.0 in stage 1234.0 (TID 1073) in 6 ms on localhost (executor driver) (1/1)
14:22:05.056 INFO  TaskSchedulerImpl - Removed TaskSet 1234.0, whose tasks have all completed, from pool 
14:22:05.056 INFO  DAGScheduler - ShuffleMapStage 1234 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
14:22:05.056 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.056 INFO  DAGScheduler - running: HashSet()
14:22:05.056 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1235, ResultStage 1236)
14:22:05.056 INFO  DAGScheduler - failed: HashSet()
14:22:05.056 INFO  DAGScheduler - Submitting ShuffleMapStage 1235 (MapPartitionsRDD[5511] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
14:22:05.057 INFO  MemoryStore - Block broadcast_1996 stored as values in memory (estimated size 15.9 KiB, free 1910.7 MiB)
14:22:05.057 INFO  MemoryStore - Block broadcast_1996_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1910.6 MiB)
14:22:05.057 INFO  BlockManagerInfo - Added broadcast_1996_piece0 in memory on localhost:39377 (size: 8.3 KiB, free: 1917.8 MiB)
14:22:05.057 INFO  SparkContext - Created broadcast 1996 from broadcast at DAGScheduler.scala:1580
14:22:05.057 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1235 (MapPartitionsRDD[5511] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
14:22:05.057 INFO  TaskSchedulerImpl - Adding task set 1235.0 with 1 tasks resource profile 0
14:22:05.058 INFO  TaskSetManager - Starting task 0.0 in stage 1235.0 (TID 1074) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:05.058 INFO  Executor - Running task 0.0 in stage 1235.0 (TID 1074)
14:22:05.059 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.059 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.061 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.061 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.062 INFO  Executor - Finished task 0.0 in stage 1235.0 (TID 1074). 1879 bytes result sent to driver
14:22:05.062 INFO  TaskSetManager - Finished task 0.0 in stage 1235.0 (TID 1074) in 4 ms on localhost (executor driver) (1/1)
14:22:05.063 INFO  TaskSchedulerImpl - Removed TaskSet 1235.0, whose tasks have all completed, from pool 
14:22:05.063 INFO  DAGScheduler - ShuffleMapStage 1235 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.007 s
14:22:05.063 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.063 INFO  DAGScheduler - running: HashSet()
14:22:05.063 INFO  DAGScheduler - waiting: HashSet(ResultStage 1236)
14:22:05.063 INFO  DAGScheduler - failed: HashSet()
14:22:05.063 INFO  DAGScheduler - Submitting ResultStage 1236 (MapPartitionsRDD[5513] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
14:22:05.063 INFO  MemoryStore - Block broadcast_1997 stored as values in memory (estimated size 17.1 KiB, free 1910.6 MiB)
14:22:05.064 INFO  MemoryStore - Block broadcast_1997_piece0 stored as bytes in memory (estimated size 9.0 KiB, free 1910.6 MiB)
14:22:05.064 INFO  BlockManagerInfo - Added broadcast_1997_piece0 in memory on localhost:39377 (size: 9.0 KiB, free: 1917.8 MiB)
14:22:05.064 INFO  SparkContext - Created broadcast 1997 from broadcast at DAGScheduler.scala:1580
14:22:05.064 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1236 (MapPartitionsRDD[5513] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
14:22:05.064 INFO  TaskSchedulerImpl - Adding task set 1236.0 with 1 tasks resource profile 0
14:22:05.065 INFO  TaskSetManager - Starting task 0.0 in stage 1236.0 (TID 1075) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.065 INFO  Executor - Running task 0.0 in stage 1236.0 (TID 1075)
14:22:05.067 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.067 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.074 INFO  Executor - Finished task 0.0 in stage 1236.0 (TID 1075). 2016 bytes result sent to driver
14:22:05.074 INFO  BlockManagerInfo - Removed broadcast_1973_piece0 on localhost:39377 in memory (size: 8.0 KiB, free: 1917.8 MiB)
14:22:05.074 INFO  TaskSetManager - Finished task 0.0 in stage 1236.0 (TID 1075) in 9 ms on localhost (executor driver) (1/1)
14:22:05.074 INFO  TaskSchedulerImpl - Removed TaskSet 1236.0, whose tasks have all completed, from pool 
14:22:05.075 INFO  DAGScheduler - ResultStage 1236 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.011 s
14:22:05.075 INFO  DAGScheduler - Job 563 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.075 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1236: Stage finished
14:22:05.075 INFO  BlockManagerInfo - Removed broadcast_1961_piece0 on localhost:39377 in memory (size: 72.1 KiB, free: 1917.9 MiB)
14:22:05.075 INFO  DAGScheduler - Job 563 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.033865 s
14:22:05.075 INFO  BlockManagerInfo - Removed broadcast_1971_piece0 on localhost:39377 in memory (size: 9.1 KiB, free: 1917.9 MiB)
14:22:05.076 INFO  BlockManagerInfo - Removed broadcast_1970_piece0 on localhost:39377 in memory (size: 8.5 KiB, free: 1917.9 MiB)
14:22:05.076 INFO  BlockManagerInfo - Removed broadcast_1995_piece0 on localhost:39377 in memory (size: 13.1 KiB, free: 1917.9 MiB)
14:22:05.077 INFO  BlockManagerInfo - Removed broadcast_1966_piece0 on localhost:39377 in memory (size: 19.0 B, free: 1917.9 MiB)
14:22:05.077 INFO  BlockManagerInfo - Removed broadcast_1983_piece0 on localhost:39377 in memory (size: 17.3 KiB, free: 1918.0 MiB)
14:22:05.078 INFO  BlockManagerInfo - Removed broadcast_1959_piece0 on localhost:39377 in memory (size: 663.0 B, free: 1918.0 MiB)
14:22:05.078 INFO  BlockManagerInfo - Removed broadcast_1982_piece0 on localhost:39377 in memory (size: 22.0 KiB, free: 1918.0 MiB)
14:22:05.079 INFO  BlockManagerInfo - Removed broadcast_1991_piece0 on localhost:39377 in memory (size: 4.1 KiB, free: 1918.0 MiB)
14:22:05.079 INFO  BlockManagerInfo - Removed broadcast_1986_piece0 on localhost:39377 in memory (size: 16.8 KiB, free: 1918.0 MiB)
14:22:05.079 INFO  BlockManagerInfo - Removed broadcast_1990_piece0 on localhost:39377 in memory (size: 201.1 KiB, free: 1918.2 MiB)
14:22:05.080 INFO  BlockManagerInfo - Removed broadcast_1980_piece0 on localhost:39377 in memory (size: 209.0 B, free: 1918.2 MiB)
14:22:05.080 INFO  BlockManagerInfo - Removed broadcast_1958_piece0 on localhost:39377 in memory (size: 9.1 KiB, free: 1918.2 MiB)
14:22:05.080 INFO  BlockManagerInfo - Removed broadcast_1967_piece0 on localhost:39377 in memory (size: 31.0 B, free: 1918.2 MiB)
14:22:05.081 INFO  BlockManagerInfo - Removed broadcast_1977_piece0 on localhost:39377 in memory (size: 210.1 KiB, free: 1918.4 MiB)
14:22:05.081 INFO  BlockManagerInfo - Removed broadcast_1968_piece0 on localhost:39377 in memory (size: 11.2 KiB, free: 1918.4 MiB)
14:22:05.081 INFO  BlockManagerInfo - Removed broadcast_1960_piece0 on localhost:39377 in memory (size: 8.0 KiB, free: 1918.4 MiB)
14:22:05.082 INFO  BlockManagerInfo - Removed broadcast_1988_piece0 on localhost:39377 in memory (size: 64.4 KiB, free: 1918.5 MiB)
14:22:05.082 INFO  BlockManagerInfo - Removed broadcast_1984_piece0 on localhost:39377 in memory (size: 18.0 KiB, free: 1918.5 MiB)
14:22:05.082 INFO  BlockManagerInfo - Removed broadcast_1976_piece0 on localhost:39377 in memory (size: 64.4 KiB, free: 1918.6 MiB)
14:22:05.083 INFO  BlockManagerInfo - Removed broadcast_1987_piece0 on localhost:39377 in memory (size: 80.9 KiB, free: 1918.7 MiB)
14:22:05.083 INFO  BlockManagerInfo - Removed broadcast_1975_piece0 on localhost:39377 in memory (size: 64.4 KiB, free: 1918.7 MiB)
14:22:05.083 INFO  BlockManagerInfo - Removed broadcast_1979_piece0 on localhost:39377 in memory (size: 540.0 B, free: 1918.7 MiB)
14:22:05.084 INFO  BlockManagerInfo - Removed broadcast_1964_piece0 on localhost:39377 in memory (size: 201.2 KiB, free: 1918.9 MiB)
14:22:05.084 INFO  BlockManagerInfo - Removed broadcast_1957_piece0 on localhost:39377 in memory (size: 8.5 KiB, free: 1918.9 MiB)
14:22:05.084 INFO  BlockManagerInfo - Removed broadcast_1965_piece0 on localhost:39377 in memory (size: 4.1 KiB, free: 1918.9 MiB)
14:22:05.084 INFO  BlockManagerInfo - Removed broadcast_1953_piece0 on localhost:39377 in memory (size: 19.0 B, free: 1918.9 MiB)
14:22:05.085 INFO  BlockManagerInfo - Removed broadcast_1969_piece0 on localhost:39377 in memory (size: 13.2 KiB, free: 1918.9 MiB)
14:22:05.085 INFO  BlockManagerInfo - Removed broadcast_1996_piece0 on localhost:39377 in memory (size: 8.3 KiB, free: 1918.9 MiB)
14:22:05.085 INFO  BlockManagerInfo - Removed broadcast_1963_piece0 on localhost:39377 in memory (size: 64.3 KiB, free: 1919.0 MiB)
14:22:05.086 INFO  BlockManagerInfo - Removed broadcast_1981_piece0 on localhost:39377 in memory (size: 19.8 KiB, free: 1919.0 MiB)
14:22:05.086 INFO  BlockManagerInfo - Removed broadcast_1978_piece0 on localhost:39377 in memory (size: 4.1 KiB, free: 1919.0 MiB)
14:22:05.086 INFO  BlockManagerInfo - Removed broadcast_1962_piece0 on localhost:39377 in memory (size: 64.3 KiB, free: 1919.1 MiB)
14:22:05.087 INFO  BlockManagerInfo - Removed broadcast_1954_piece0 on localhost:39377 in memory (size: 31.0 B, free: 1919.1 MiB)
14:22:05.087 INFO  BlockManagerInfo - Removed broadcast_1972_piece0 on localhost:39377 in memory (size: 663.0 B, free: 1919.1 MiB)
14:22:05.087 INFO  BlockManagerInfo - Removed broadcast_1985_piece0 on localhost:39377 in memory (size: 7.9 KiB, free: 1919.1 MiB)
14:22:05.088 INFO  BlockManagerInfo - Removed broadcast_1974_piece0 on localhost:39377 in memory (size: 72.1 KiB, free: 1919.2 MiB)
14:22:05.088 INFO  BlockManagerInfo - Removed broadcast_1950_piece0 on localhost:39377 in memory (size: 64.3 KiB, free: 1919.2 MiB)
14:22:05.089 INFO  BlockManagerInfo - Removed broadcast_1994_piece0 on localhost:39377 in memory (size: 10.9 KiB, free: 1919.2 MiB)
14:22:05.090 INFO  MemoryStore - Block broadcast_1998 stored as values in memory (estimated size 5.6 KiB, free 1916.4 MiB)
14:22:05.090 INFO  MemoryStore - Block broadcast_1998_piece0 stored as bytes in memory (estimated size 500.0 B, free 1916.4 MiB)
14:22:05.090 INFO  BlockManagerInfo - Added broadcast_1998_piece0 in memory on localhost:39377 (size: 500.0 B, free: 1919.2 MiB)
14:22:05.090 INFO  SparkContext - Created broadcast 1998 from broadcast at ReadsSparkSink.java:133
14:22:05.094 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:05.094 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:05.094 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:05.110 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
14:22:05.111 INFO  DAGScheduler - Registering RDD 5515 (mapToPair at SparkUtils.java:161) as input to shuffle 309
14:22:05.111 INFO  DAGScheduler - Got job 564 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
14:22:05.111 INFO  DAGScheduler - Final stage: ResultStage 1241 (runJob at SparkHadoopWriter.scala:83)
14:22:05.111 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1240)
14:22:05.111 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1240)
14:22:05.111 INFO  DAGScheduler - Submitting ShuffleMapStage 1240 (MapPartitionsRDD[5515] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:05.112 INFO  MemoryStore - Block broadcast_1999 stored as values in memory (estimated size 15.0 KiB, free 1916.4 MiB)
14:22:05.112 INFO  MemoryStore - Block broadcast_1999_piece0 stored as bytes in memory (estimated size 7.8 KiB, free 1916.4 MiB)
14:22:05.112 INFO  BlockManagerInfo - Added broadcast_1999_piece0 in memory on localhost:39377 (size: 7.8 KiB, free: 1919.2 MiB)
14:22:05.112 INFO  SparkContext - Created broadcast 1999 from broadcast at DAGScheduler.scala:1580
14:22:05.112 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1240 (MapPartitionsRDD[5515] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:05.113 INFO  TaskSchedulerImpl - Adding task set 1240.0 with 1 tasks resource profile 0
14:22:05.113 INFO  TaskSetManager - Starting task 0.0 in stage 1240.0 (TID 1076) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:05.113 INFO  Executor - Running task 0.0 in stage 1240.0 (TID 1076)
14:22:05.114 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.115 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.116 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.116 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.117 INFO  Executor - Finished task 0.0 in stage 1240.0 (TID 1076). 1922 bytes result sent to driver
14:22:05.117 INFO  TaskSetManager - Finished task 0.0 in stage 1240.0 (TID 1076) in 4 ms on localhost (executor driver) (1/1)
14:22:05.117 INFO  TaskSchedulerImpl - Removed TaskSet 1240.0, whose tasks have all completed, from pool 
14:22:05.117 INFO  DAGScheduler - ShuffleMapStage 1240 (mapToPair at SparkUtils.java:161) finished in 0.006 s
14:22:05.117 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.117 INFO  DAGScheduler - running: HashSet()
14:22:05.117 INFO  DAGScheduler - waiting: HashSet(ResultStage 1241)
14:22:05.117 INFO  DAGScheduler - failed: HashSet()
14:22:05.117 INFO  DAGScheduler - Submitting ResultStage 1241 (MapPartitionsRDD[5521] at saveAsTextFile at SamSink.java:65), which has no missing parents
14:22:05.124 INFO  MemoryStore - Block broadcast_2000 stored as values in memory (estimated size 160.3 KiB, free 1916.2 MiB)
14:22:05.125 INFO  MemoryStore - Block broadcast_2000_piece0 stored as bytes in memory (estimated size 72.1 KiB, free 1916.2 MiB)
14:22:05.125 INFO  BlockManagerInfo - Added broadcast_2000_piece0 in memory on localhost:39377 (size: 72.1 KiB, free: 1919.2 MiB)
14:22:05.125 INFO  SparkContext - Created broadcast 2000 from broadcast at DAGScheduler.scala:1580
14:22:05.125 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1241 (MapPartitionsRDD[5521] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
14:22:05.125 INFO  TaskSchedulerImpl - Adding task set 1241.0 with 1 tasks resource profile 0
14:22:05.125 INFO  TaskSetManager - Starting task 0.0 in stage 1241.0 (TID 1077) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.126 INFO  Executor - Running task 0.0 in stage 1241.0 (TID 1077)
14:22:05.129 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.129 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.130 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:05.130 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:05.130 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:05.142 INFO  FileOutputCommitter - Saved output of task 'attempt_202506021422053171065650908227962_5521_m_000000_0' to file:/tmp/MarkDups1657138809007107585/MarkDups.sam.parts/_temporary/0/task_202506021422053171065650908227962_5521_m_000000
14:22:05.142 INFO  SparkHadoopMapRedUtil - attempt_202506021422053171065650908227962_5521_m_000000_0: Committed. Elapsed time: 0 ms.
14:22:05.142 INFO  Executor - Finished task 0.0 in stage 1241.0 (TID 1077). 1858 bytes result sent to driver
14:22:05.142 INFO  TaskSetManager - Finished task 0.0 in stage 1241.0 (TID 1077) in 17 ms on localhost (executor driver) (1/1)
14:22:05.142 INFO  TaskSchedulerImpl - Removed TaskSet 1241.0, whose tasks have all completed, from pool 
14:22:05.142 INFO  DAGScheduler - ResultStage 1241 (runJob at SparkHadoopWriter.scala:83) finished in 0.024 s
14:22:05.142 INFO  DAGScheduler - Job 564 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.142 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1241: Stage finished
14:22:05.143 INFO  DAGScheduler - Job 564 finished: runJob at SparkHadoopWriter.scala:83, took 0.032186 s
14:22:05.143 INFO  SparkHadoopWriter - Start to commit write Job job_202506021422053171065650908227962_5521.
14:22:05.147 INFO  SparkHadoopWriter - Write Job job_202506021422053171065650908227962_5521 committed. Elapsed time: 4 ms.
14:22:05.154 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups1657138809007107585/MarkDups.sam
14:22:05.158 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups1657138809007107585/MarkDups.sam done
14:22:05.158 INFO  MarkDuplicatesSpark - Shutting down engine
[June 2, 2025 at 2:22:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
14:22:05.164 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:05.164 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
14:22:05.164 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
14:22:05.164 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
14:22:05.164 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
14:22:05.164 INFO  MarkDuplicatesSpark - Start Date/Time: June 2, 2025 at 2:22:05 PM UTC
14:22:05.164 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:05.164 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
14:22:05.164 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
14:22:05.164 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
14:22:05.164 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
14:22:05.164 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
14:22:05.164 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
14:22:05.164 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
14:22:05.164 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
14:22:05.164 INFO  MarkDuplicatesSpark - Requester pays: disabled
14:22:05.164 INFO  MarkDuplicatesSpark - Initializing engine
14:22:05.164 INFO  MarkDuplicatesSpark - Done initializing engine
14:22:05.164 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
14:22:05.166 INFO  MemoryStore - Block broadcast_2001 stored as values in memory (estimated size 306.3 KiB, free 1915.9 MiB)
14:22:05.173 INFO  MemoryStore - Block broadcast_2001_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1915.8 MiB)
14:22:05.173 INFO  BlockManagerInfo - Added broadcast_2001_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1919.1 MiB)
14:22:05.173 INFO  SparkContext - Created broadcast 2001 from newAPIHadoopFile at PathSplitSource.java:96
14:22:05.193 INFO  MemoryStore - Block broadcast_2002 stored as values in memory (estimated size 306.3 KiB, free 1915.5 MiB)
14:22:05.200 INFO  MemoryStore - Block broadcast_2002_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1915.4 MiB)
14:22:05.200 INFO  BlockManagerInfo - Added broadcast_2002_piece0 in memory on localhost:39377 (size: 64.4 KiB, free: 1919.0 MiB)
14:22:05.200 INFO  SparkContext - Created broadcast 2002 from newAPIHadoopFile at PathSplitSource.java:96
14:22:05.232 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
14:22:05.233 INFO  FileInputFormat - Total input files to process : 1
14:22:05.233 INFO  DAGScheduler - Registering RDD 5535 (mapToPair at SparkUtils.java:161) as input to shuffle 310
14:22:05.233 INFO  DAGScheduler - Got job 565 (collect at SparkUtils.java:205) with 1 output partitions
14:22:05.233 INFO  DAGScheduler - Final stage: ResultStage 1243 (collect at SparkUtils.java:205)
14:22:05.233 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1242)
14:22:05.233 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1242)
14:22:05.233 INFO  DAGScheduler - Submitting ShuffleMapStage 1242 (MapPartitionsRDD[5535] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:05.251 INFO  MemoryStore - Block broadcast_2003 stored as values in memory (estimated size 481.4 KiB, free 1915.0 MiB)
14:22:05.253 INFO  MemoryStore - Block broadcast_2003_piece0 stored as bytes in memory (estimated size 207.4 KiB, free 1914.8 MiB)
14:22:05.253 INFO  BlockManagerInfo - Added broadcast_2003_piece0 in memory on localhost:39377 (size: 207.4 KiB, free: 1918.8 MiB)
14:22:05.254 INFO  SparkContext - Created broadcast 2003 from broadcast at DAGScheduler.scala:1580
14:22:05.254 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1242 (MapPartitionsRDD[5535] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:05.254 INFO  TaskSchedulerImpl - Adding task set 1242.0 with 1 tasks resource profile 0
14:22:05.254 INFO  TaskSetManager - Starting task 0.0 in stage 1242.0 (TID 1078) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9757 bytes) 
14:22:05.254 INFO  Executor - Running task 0.0 in stage 1242.0 (TID 1078)
14:22:05.283 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam:0+5416
14:22:05.288 INFO  Executor - Finished task 0.0 in stage 1242.0 (TID 1078). 1148 bytes result sent to driver
14:22:05.288 INFO  TaskSetManager - Finished task 0.0 in stage 1242.0 (TID 1078) in 34 ms on localhost (executor driver) (1/1)
14:22:05.288 INFO  TaskSchedulerImpl - Removed TaskSet 1242.0, whose tasks have all completed, from pool 
14:22:05.288 INFO  DAGScheduler - ShuffleMapStage 1242 (mapToPair at SparkUtils.java:161) finished in 0.054 s
14:22:05.288 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.288 INFO  DAGScheduler - running: HashSet()
14:22:05.288 INFO  DAGScheduler - waiting: HashSet(ResultStage 1243)
14:22:05.288 INFO  DAGScheduler - failed: HashSet()
14:22:05.289 INFO  DAGScheduler - Submitting ResultStage 1243 (MapPartitionsRDD[5538] at mapPartitions at SparkUtils.java:188), which has no missing parents
14:22:05.289 INFO  MemoryStore - Block broadcast_2004 stored as values in memory (estimated size 7.4 KiB, free 1914.8 MiB)
14:22:05.289 INFO  MemoryStore - Block broadcast_2004_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1914.8 MiB)
14:22:05.289 INFO  BlockManagerInfo - Added broadcast_2004_piece0 in memory on localhost:39377 (size: 4.1 KiB, free: 1918.8 MiB)
14:22:05.290 INFO  SparkContext - Created broadcast 2004 from broadcast at DAGScheduler.scala:1580
14:22:05.290 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1243 (MapPartitionsRDD[5538] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
14:22:05.290 INFO  TaskSchedulerImpl - Adding task set 1243.0 with 1 tasks resource profile 0
14:22:05.290 INFO  TaskSetManager - Starting task 0.0 in stage 1243.0 (TID 1079) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.290 INFO  Executor - Running task 0.0 in stage 1243.0 (TID 1079)
14:22:05.291 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.291 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.293 INFO  Executor - Finished task 0.0 in stage 1243.0 (TID 1079). 2039 bytes result sent to driver
14:22:05.293 INFO  TaskSetManager - Finished task 0.0 in stage 1243.0 (TID 1079) in 3 ms on localhost (executor driver) (1/1)
14:22:05.293 INFO  TaskSchedulerImpl - Removed TaskSet 1243.0, whose tasks have all completed, from pool 
14:22:05.293 INFO  DAGScheduler - ResultStage 1243 (collect at SparkUtils.java:205) finished in 0.004 s
14:22:05.293 INFO  DAGScheduler - Job 565 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.293 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1243: Stage finished
14:22:05.293 INFO  DAGScheduler - Job 565 finished: collect at SparkUtils.java:205, took 0.060845 s
14:22:05.298 INFO  MemoryStore - Block broadcast_2005 stored as values in memory (estimated size 392.0 B, free 1914.8 MiB)
14:22:05.298 INFO  MemoryStore - Block broadcast_2005_piece0 stored as bytes in memory (estimated size 33.0 B, free 1914.8 MiB)
14:22:05.298 INFO  BlockManagerInfo - Added broadcast_2005_piece0 in memory on localhost:39377 (size: 33.0 B, free: 1918.8 MiB)
14:22:05.298 INFO  SparkContext - Created broadcast 2005 from broadcast at MarkDuplicatesSparkUtils.java:126
14:22:05.299 INFO  MemoryStore - Block broadcast_2006 stored as values in memory (estimated size 144.0 B, free 1914.8 MiB)
14:22:05.299 INFO  MemoryStore - Block broadcast_2006_piece0 stored as bytes in memory (estimated size 28.0 B, free 1914.8 MiB)
14:22:05.299 INFO  BlockManagerInfo - Added broadcast_2006_piece0 in memory on localhost:39377 (size: 28.0 B, free: 1918.8 MiB)
14:22:05.299 INFO  SparkContext - Created broadcast 2006 from broadcast at MarkDuplicatesSparkUtils.java:127
14:22:05.311 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
14:22:05.312 INFO  DAGScheduler - Registering RDD 5547 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 313
14:22:05.312 INFO  DAGScheduler - Registering RDD 5551 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 312
14:22:05.312 INFO  DAGScheduler - Registering RDD 5555 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 311
14:22:05.312 INFO  DAGScheduler - Got job 566 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
14:22:05.312 INFO  DAGScheduler - Final stage: ResultStage 1248 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
14:22:05.312 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1247)
14:22:05.312 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1247)
14:22:05.312 INFO  DAGScheduler - Submitting ShuffleMapStage 1245 (MapPartitionsRDD[5547] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
14:22:05.313 INFO  MemoryStore - Block broadcast_2007 stored as values in memory (estimated size 45.2 KiB, free 1914.7 MiB)
14:22:05.314 INFO  MemoryStore - Block broadcast_2007_piece0 stored as bytes in memory (estimated size 17.1 KiB, free 1914.7 MiB)
14:22:05.314 INFO  BlockManagerInfo - Added broadcast_2007_piece0 in memory on localhost:39377 (size: 17.1 KiB, free: 1918.8 MiB)
14:22:05.314 INFO  SparkContext - Created broadcast 2007 from broadcast at DAGScheduler.scala:1580
14:22:05.314 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1245 (MapPartitionsRDD[5547] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
14:22:05.314 INFO  TaskSchedulerImpl - Adding task set 1245.0 with 1 tasks resource profile 0
14:22:05.314 INFO  TaskSetManager - Starting task 0.0 in stage 1245.0 (TID 1080) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
14:22:05.315 INFO  Executor - Running task 0.0 in stage 1245.0 (TID 1080)
14:22:05.316 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.316 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.319 INFO  Executor - Finished task 0.0 in stage 1245.0 (TID 1080). 1922 bytes result sent to driver
14:22:05.319 INFO  TaskSetManager - Finished task 0.0 in stage 1245.0 (TID 1080) in 5 ms on localhost (executor driver) (1/1)
14:22:05.319 INFO  TaskSchedulerImpl - Removed TaskSet 1245.0, whose tasks have all completed, from pool 
14:22:05.319 INFO  DAGScheduler - ShuffleMapStage 1245 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
14:22:05.319 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.319 INFO  DAGScheduler - running: HashSet()
14:22:05.319 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248, ShuffleMapStage 1246, ShuffleMapStage 1247)
14:22:05.319 INFO  DAGScheduler - failed: HashSet()
14:22:05.319 INFO  DAGScheduler - Submitting ShuffleMapStage 1246 (MapPartitionsRDD[5551] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
14:22:05.320 INFO  MemoryStore - Block broadcast_2008 stored as values in memory (estimated size 49.3 KiB, free 1914.6 MiB)
14:22:05.321 INFO  MemoryStore - Block broadcast_2008_piece0 stored as bytes in memory (estimated size 19.3 KiB, free 1914.6 MiB)
14:22:05.321 INFO  BlockManagerInfo - Added broadcast_2008_piece0 in memory on localhost:39377 (size: 19.3 KiB, free: 1918.8 MiB)
14:22:05.321 INFO  SparkContext - Created broadcast 2008 from broadcast at DAGScheduler.scala:1580
14:22:05.321 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1246 (MapPartitionsRDD[5551] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
14:22:05.321 INFO  TaskSchedulerImpl - Adding task set 1246.0 with 1 tasks resource profile 0
14:22:05.321 INFO  TaskSetManager - Starting task 0.0 in stage 1246.0 (TID 1081) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
14:22:05.321 INFO  Executor - Running task 0.0 in stage 1246.0 (TID 1081)
14:22:05.323 INFO  ShuffleBlockFetcherIterator - Getting 1 (593.0 B) non-empty blocks including 1 (593.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.323 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.327 INFO  Executor - Finished task 0.0 in stage 1246.0 (TID 1081). 1922 bytes result sent to driver
14:22:05.328 INFO  TaskSetManager - Finished task 0.0 in stage 1246.0 (TID 1081) in 7 ms on localhost (executor driver) (1/1)
14:22:05.328 INFO  TaskSchedulerImpl - Removed TaskSet 1246.0, whose tasks have all completed, from pool 
14:22:05.328 INFO  DAGScheduler - ShuffleMapStage 1246 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.009 s
14:22:05.328 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.328 INFO  DAGScheduler - running: HashSet()
14:22:05.328 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248, ShuffleMapStage 1247)
14:22:05.328 INFO  DAGScheduler - failed: HashSet()
14:22:05.328 INFO  DAGScheduler - Submitting ShuffleMapStage 1247 (MapPartitionsRDD[5555] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
14:22:05.329 INFO  MemoryStore - Block broadcast_2009 stored as values in memory (estimated size 40.0 KiB, free 1914.6 MiB)
14:22:05.329 INFO  MemoryStore - Block broadcast_2009_piece0 stored as bytes in memory (estimated size 14.5 KiB, free 1914.6 MiB)
14:22:05.329 INFO  BlockManagerInfo - Added broadcast_2009_piece0 in memory on localhost:39377 (size: 14.5 KiB, free: 1918.8 MiB)
14:22:05.329 INFO  SparkContext - Created broadcast 2009 from broadcast at DAGScheduler.scala:1580
14:22:05.330 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1247 (MapPartitionsRDD[5555] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
14:22:05.330 INFO  TaskSchedulerImpl - Adding task set 1247.0 with 1 tasks resource profile 0
14:22:05.330 INFO  TaskSetManager - Starting task 0.0 in stage 1247.0 (TID 1082) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:05.330 INFO  Executor - Running task 0.0 in stage 1247.0 (TID 1082)
14:22:05.332 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.332 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.333 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.333 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.334 INFO  Executor - Finished task 0.0 in stage 1247.0 (TID 1082). 1922 bytes result sent to driver
14:22:05.334 INFO  TaskSetManager - Finished task 0.0 in stage 1247.0 (TID 1082) in 4 ms on localhost (executor driver) (1/1)
14:22:05.334 INFO  TaskSchedulerImpl - Removed TaskSet 1247.0, whose tasks have all completed, from pool 
14:22:05.334 INFO  DAGScheduler - ShuffleMapStage 1247 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.006 s
14:22:05.334 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.335 INFO  DAGScheduler - running: HashSet()
14:22:05.335 INFO  DAGScheduler - waiting: HashSet(ResultStage 1248)
14:22:05.335 INFO  DAGScheduler - failed: HashSet()
14:22:05.335 INFO  DAGScheduler - Submitting ResultStage 1248 (MapPartitionsRDD[5557] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
14:22:05.335 INFO  MemoryStore - Block broadcast_2010 stored as values in memory (estimated size 41.2 KiB, free 1914.5 MiB)
14:22:05.336 INFO  MemoryStore - Block broadcast_2010_piece0 stored as bytes in memory (estimated size 15.3 KiB, free 1914.5 MiB)
14:22:05.336 INFO  BlockManagerInfo - Added broadcast_2010_piece0 in memory on localhost:39377 (size: 15.3 KiB, free: 1918.8 MiB)
14:22:05.336 INFO  SparkContext - Created broadcast 2010 from broadcast at DAGScheduler.scala:1580
14:22:05.336 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1248 (MapPartitionsRDD[5557] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
14:22:05.336 INFO  TaskSchedulerImpl - Adding task set 1248.0 with 1 tasks resource profile 0
14:22:05.337 INFO  TaskSetManager - Starting task 0.0 in stage 1248.0 (TID 1083) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.337 INFO  Executor - Running task 0.0 in stage 1248.0 (TID 1083)
14:22:05.338 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.338 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.340 INFO  Executor - Finished task 0.0 in stage 1248.0 (TID 1083). 1944 bytes result sent to driver
14:22:05.340 INFO  TaskSetManager - Finished task 0.0 in stage 1248.0 (TID 1083) in 3 ms on localhost (executor driver) (1/1)
14:22:05.340 INFO  TaskSchedulerImpl - Removed TaskSet 1248.0, whose tasks have all completed, from pool 
14:22:05.340 INFO  DAGScheduler - ResultStage 1248 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
14:22:05.340 INFO  DAGScheduler - Job 566 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.340 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1248: Stage finished
14:22:05.340 INFO  DAGScheduler - Job 566 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.028497 s
14:22:05.351 INFO  MemoryStore - Block broadcast_2011 stored as values in memory (estimated size 93.5 KiB, free 1914.4 MiB)
14:22:05.351 INFO  MemoryStore - Block broadcast_2011_piece0 stored as bytes in memory (estimated size 5.6 KiB, free 1914.4 MiB)
14:22:05.352 INFO  BlockManagerInfo - Added broadcast_2011_piece0 in memory on localhost:39377 (size: 5.6 KiB, free: 1918.8 MiB)
14:22:05.352 INFO  SparkContext - Created broadcast 2011 from broadcast at ReadsSparkSink.java:133
14:22:05.355 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:05.355 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:05.355 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:05.371 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
14:22:05.371 INFO  DAGScheduler - Registering RDD 5559 (mapToPair at SparkUtils.java:161) as input to shuffle 314
14:22:05.371 INFO  DAGScheduler - Got job 567 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
14:22:05.371 INFO  DAGScheduler - Final stage: ResultStage 1253 (runJob at SparkHadoopWriter.scala:83)
14:22:05.371 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1252)
14:22:05.371 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1252)
14:22:05.372 INFO  DAGScheduler - Submitting ShuffleMapStage 1252 (MapPartitionsRDD[5559] at mapToPair at SparkUtils.java:161), which has no missing parents
14:22:05.372 INFO  MemoryStore - Block broadcast_2012 stored as values in memory (estimated size 39.1 KiB, free 1914.4 MiB)
14:22:05.373 INFO  MemoryStore - Block broadcast_2012_piece0 stored as bytes in memory (estimated size 14.1 KiB, free 1914.4 MiB)
14:22:05.373 INFO  BlockManagerInfo - Added broadcast_2012_piece0 in memory on localhost:39377 (size: 14.1 KiB, free: 1918.8 MiB)
14:22:05.373 INFO  SparkContext - Created broadcast 2012 from broadcast at DAGScheduler.scala:1580
14:22:05.373 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1252 (MapPartitionsRDD[5559] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
14:22:05.373 INFO  TaskSchedulerImpl - Adding task set 1252.0 with 1 tasks resource profile 0
14:22:05.374 INFO  TaskSetManager - Starting task 0.0 in stage 1252.0 (TID 1084) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
14:22:05.374 INFO  Executor - Running task 0.0 in stage 1252.0 (TID 1084)
14:22:05.375 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.375 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.377 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.377 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.378 INFO  Executor - Finished task 0.0 in stage 1252.0 (TID 1084). 1922 bytes result sent to driver
14:22:05.378 INFO  TaskSetManager - Finished task 0.0 in stage 1252.0 (TID 1084) in 5 ms on localhost (executor driver) (1/1)
14:22:05.378 INFO  TaskSchedulerImpl - Removed TaskSet 1252.0, whose tasks have all completed, from pool 
14:22:05.378 INFO  DAGScheduler - ShuffleMapStage 1252 (mapToPair at SparkUtils.java:161) finished in 0.006 s
14:22:05.378 INFO  DAGScheduler - looking for newly runnable stages
14:22:05.379 INFO  DAGScheduler - running: HashSet()
14:22:05.379 INFO  DAGScheduler - waiting: HashSet(ResultStage 1253)
14:22:05.379 INFO  DAGScheduler - failed: HashSet()
14:22:05.379 INFO  DAGScheduler - Submitting ResultStage 1253 (MapPartitionsRDD[5565] at saveAsTextFile at SamSink.java:65), which has no missing parents
14:22:05.385 INFO  MemoryStore - Block broadcast_2013 stored as values in memory (estimated size 184.3 KiB, free 1914.2 MiB)
14:22:05.386 INFO  MemoryStore - Block broadcast_2013_piece0 stored as bytes in memory (estimated size 78.3 KiB, free 1914.1 MiB)
14:22:05.386 INFO  BlockManagerInfo - Added broadcast_2013_piece0 in memory on localhost:39377 (size: 78.3 KiB, free: 1918.7 MiB)
14:22:05.386 INFO  SparkContext - Created broadcast 2013 from broadcast at DAGScheduler.scala:1580
14:22:05.386 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1253 (MapPartitionsRDD[5565] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
14:22:05.386 INFO  TaskSchedulerImpl - Adding task set 1253.0 with 1 tasks resource profile 0
14:22:05.387 INFO  TaskSetManager - Starting task 0.0 in stage 1253.0 (TID 1085) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
14:22:05.387 INFO  Executor - Running task 0.0 in stage 1253.0 (TID 1085)
14:22:05.390 INFO  ShuffleBlockFetcherIterator - Getting 1 (3.9 KiB) non-empty blocks including 1 (3.9 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
14:22:05.390 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
14:22:05.392 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
14:22:05.392 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
14:22:05.392 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
14:22:05.402 INFO  FileOutputCommitter - Saved output of task 'attempt_202506021422057038634515689138248_5565_m_000000_0' to file:/tmp/MarkDups6715684135323250328/MarkDups.sam.parts/_temporary/0/task_202506021422057038634515689138248_5565_m_000000
14:22:05.402 INFO  SparkHadoopMapRedUtil - attempt_202506021422057038634515689138248_5565_m_000000_0: Committed. Elapsed time: 0 ms.
14:22:05.403 INFO  Executor - Finished task 0.0 in stage 1253.0 (TID 1085). 1858 bytes result sent to driver
14:22:05.403 INFO  TaskSetManager - Finished task 0.0 in stage 1253.0 (TID 1085) in 16 ms on localhost (executor driver) (1/1)
14:22:05.403 INFO  TaskSchedulerImpl - Removed TaskSet 1253.0, whose tasks have all completed, from pool 
14:22:05.403 INFO  DAGScheduler - ResultStage 1253 (runJob at SparkHadoopWriter.scala:83) finished in 0.024 s
14:22:05.403 INFO  DAGScheduler - Job 567 is finished. Cancelling potential speculative or zombie tasks for this job
14:22:05.403 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1253: Stage finished
14:22:05.403 INFO  DAGScheduler - Job 567 finished: runJob at SparkHadoopWriter.scala:83, took 0.032327 s
14:22:05.403 INFO  SparkHadoopWriter - Start to commit write Job job_202506021422057038634515689138248_5565.
14:22:05.407 INFO  SparkHadoopWriter - Write Job job_202506021422057038634515689138248_5565 committed. Elapsed time: 4 ms.
14:22:05.415 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups6715684135323250328/MarkDups.sam
14:22:05.418 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups6715684135323250328/MarkDups.sam done
14:22:05.418 INFO  MarkDuplicatesSpark - Shutting down engine
[June 2, 2025 at 2:22:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:05 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
14:22:11.290 ERROR Executor - Exception in task 0.0 in stage 1473.0 (TID 1232)
org.broadinstitute.hellbender.exceptions.UserException$ReadMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 is missing the read group (RG) tag, which is required by the GATK.  Please use http://gatkforums.broadinstitute.org/discussion/59/companion-utilities-replacereadgroups to fix this problem
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:64) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
14:22:11.306 ERROR TaskSetManager - Task 0 in stage 1473.0 failed 1 times; aborting job
[June 2, 2025 at 2:22:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
14:22:11.536 ERROR Executor - Exception in task 0.0 in stage 1480.0 (TID 1235)
org.broadinstitute.hellbender.exceptions.UserException$HeaderMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 contains an (RG) tag with the group NotADuplicateGroup which is not found in the file header.
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:66) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
14:22:11.538 ERROR TaskSetManager - Task 0 in stage 1480.0 failed 1 times; aborting job
[June 2, 2025 at 2:22:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680
[June 2, 2025 at 2:22:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=2285895680