Class org.broadinstitute.hellbender.tools.spark.pipelines.MarkDuplicatesSparkIntegrationTest

152

tests

0

failures

0

ignored

1m0.40s

duration

100%

successful

Tests

Test Duration Result
assertThatExpectedOutputUpdateToggleIsDisabled 0s passed
testAssertAssumeUnsortedFilesAreQueryGroupedFiles 0.543s passed
testAssertCorrectSortOrderMultipleBams 0.143s passed
testAssertCorrectSortOrderMultipleBamsOverriding 0.487s passed
testBulkFragmentsNoDuplicates 1.163s passed
testBulkFragmentsWithDuplicates 2.107s passed
testDifferentChromosomesInOppositeOrder 0.301s passed
testDuplicateDetectionDataProviderWithMetrics[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.chr1.1-1K.unmarkedDups.markDuplicate.metrics) 0.287s passed
testDuplicateDetectionDataProviderWithMetrics[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.optical_dupes.markDuplicate.metrics) 0.244s passed
testDuplicateDetectionDataProviderWithMetrics[2](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.inputSingleLibrarySolexa16404.metrics) 0.273s passed
testFlowModeFlag 1.599s passed
testHashCollisionHandling 1.041s passed
testMDOrder[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug.bam) 0.308s passed
testMDOrder[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug2.bam) 0.288s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[0](true, true, true) 0.301s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[1](true, false, true) 0.300s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[2](false, true, true) 0.312s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[3](true, true, false) 0.298s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[4](true, false, false) 0.309s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[5](false, true, false) 0.314s passed
testMappedFragmentAndMappedPairFirstOfPairNonPrimary 0.296s passed
testMappedFragmentAndMatePairFirstUnmapped 0.305s passed
testMappedFragmentAndMatePairSecondUnmapped 0.299s passed
testMappedPairAndMappedFragmentAndMatePairFirstUnmapped 0.310s passed
testMappedPairAndMappedFragmentAndMatePairSecondUnmapped 0.321s passed
testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped 0.308s passed
testMappedPairAndMatePairFirstUnmapped 0.295s passed
testMappedPairAndMatePairSecondUnmapped 0.305s passed
testMappedPairWithFirstEndSamePositionAndOther 0.298s passed
testMappedPairWithSamePosition 0.303s passed
testMappedPairWithSamePositionSameCigar 0.302s passed
testMarkDuplicatesSparkDeletingDuplicateReads[0]([Ljava.io.File;@5fa4715a, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.371s passed
testMarkDuplicatesSparkDeletingDuplicateReads[1]([Ljava.io.File;@598c2478, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.415s passed
testMarkDuplicatesSparkDeletingDuplicateReads[2]([Ljava.io.File;@191781f1, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.369s passed
testMarkDuplicatesSparkDeletingDuplicateReads[3]([Ljava.io.File;@2cb96ded, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.330s passed
testMarkDuplicatesSparkDeletingDuplicateReads[4]([Ljava.io.File;@11cc685d, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.327s passed
testMarkDuplicatesSparkDeletingDuplicateReads[5]([Ljava.io.File;@67009546, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.165s passed
testMarkDuplicatesSparkDeletingDuplicateReads[6]([Ljava.io.File;@5f3dc21b, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.221s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[0]([Ljava.io.File;@54f1dfe2, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.343s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[1]([Ljava.io.File;@36db3fe0, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.420s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[2]([Ljava.io.File;@5857a478, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.397s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[3]([Ljava.io.File;@5bb944d6, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.340s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[4]([Ljava.io.File;@6faf862b, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.332s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[5]([Ljava.io.File;@762dcca7, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.211s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[6]([Ljava.io.File;@36c4bf6c, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.193s passed
testMarkDuplicatesSparkIntegrationTestLocal[0]([Ljava.io.File;@2ae20e03, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.351s passed
testMarkDuplicatesSparkIntegrationTestLocal[1]([Ljava.io.File;@7c5229a3, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.374s passed
testMarkDuplicatesSparkIntegrationTestLocal[2]([Ljava.io.File;@5ca6d775, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.349s passed
testMarkDuplicatesSparkIntegrationTestLocal[3]([Ljava.io.File;@336e5f8c, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.317s passed
testMarkDuplicatesSparkIntegrationTestLocal[4]([Ljava.io.File;@3887a7df, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.345s passed
testMarkDuplicatesSparkIntegrationTestLocal[5]([Ljava.io.File;@4175d7e6, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.171s passed
testMarkDuplicatesSparkIntegrationTestLocal[6]([Ljava.io.File;@19f8b18c, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.229s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[0]([Ljava.io.File;@29ed7e4, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.358s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[1]([Ljava.io.File;@2479abc6, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.348s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[2]([Ljava.io.File;@449a46fe, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.360s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[3]([Ljava.io.File;@4ef3e539, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.326s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[4]([Ljava.io.File;@6132f47, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.330s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[5]([Ljava.io.File;@686cc2fa, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.136s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[6]([Ljava.io.File;@58a5bd42, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.132s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[0]([Ljava.io.File;@166ab4a1, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.335s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[1]([Ljava.io.File;@391d203a, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.356s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[2]([Ljava.io.File;@1dc42dd2, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.364s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[3]([Ljava.io.File;@7149bfce, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.317s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[4]([Ljava.io.File;@79676663, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.323s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[5]([Ljava.io.File;@4ce6ef5b, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.181s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[6]([Ljava.io.File;@55ac36dd, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.133s passed
testMatePairFirstUnmapped 0.297s passed
testMatePairSecondUnmapped 0.302s passed
testNoReadGroupInRead 0.246s passed
testNonExistantReadGroupInRead 0.248s passed
testNullOpticalDuplicates 0.318s passed
testOpticalDuplicateClusterOneEndSamePositionOneCluster 0.318s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance 0.299s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.324s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.294s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.302s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.292s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.298s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.301s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.308s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.307s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.303s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.303s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.322s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.303s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.300s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.325s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.304s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.302s passed
testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance 0.306s passed
testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate 0.300s passed
testOpticalDuplicateFinding 0.295s passed
testOpticalDuplicateFindingPxelDistance 0.306s passed
testOpticalDuplicatesAndPCRDuplicatesOrientationDifference 0.296s passed
testOpticalDuplicatesDifferentReadGroups 0.298s passed
testOpticalDuplicatesTheSameReadGroup 0.301s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.319s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.294s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.296s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.291s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.294s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.291s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.302s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.299s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.297s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.297s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.304s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.307s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.317s passed
testPathologicalOrderingAtTheSamePosition 0.332s passed
testReadSameStartPositionOrientationOverride 0.293s passed
testReadsHaveSameStartPositionButDifferentChromosomeNonEquivalence 0.293s passed
testSecondEndIsBeforeFirstInCoordinate 0.308s passed
testSingleMappedFragment 0.305s passed
testSingleMappedFragmentAndSingleMappedPair 0.294s passed
testSingleMappedFragmentAndTwoMappedPairs 0.308s passed
testSingleMappedPair 0.301s passed
testSingleUnmappedFragment 0.293s passed
testSingleUnmappedPair 0.296s passed
testStackOverFlowPairSetSwap 0.309s passed
testSupplementaryReadUnmappedMate 0.292s passed
testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs 0.300s passed
testThreeMappedPairs 0.307s passed
testThreeMappedPairsWithMatchingSecondMate 0.294s passed
testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs 0.294s passed
testTwoGroupsOnDifferentChromosomesOfTwoFragments 0.315s passed
testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs 0.321s passed
testTwoMappedFragments 0.317s passed
testTwoMappedPairWithSamePosition 0.308s passed
testTwoMappedPairWithSamePositionDifferentStrands 0.295s passed
testTwoMappedPairWithSamePositionDifferentStrands2 0.295s passed
testTwoMappedPairs 0.307s passed
testTwoMappedPairsAndMappedSecondaryFragment 0.306s passed
testTwoMappedPairsAndTerminalUnmappedFragment 0.293s passed
testTwoMappedPairsAndTerminalUnmappedPair 0.296s passed
testTwoMappedPairsMatesSoftClipped 0.301s passed
testTwoMappedPairsWithOppositeOrientations 0.295s passed
testTwoMappedPairsWithOppositeOrientationsNumberTwo 0.290s passed
testTwoMappedPairsWithSoftClipping 0.309s passed
testTwoMappedPairsWithSoftClippingBoth 0.294s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnly 0.292s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar 0.299s passed
testTwoMappedPairsWithSupplementaryReads[0](true, true, true) 0.291s passed
testTwoMappedPairsWithSupplementaryReads[1](true, false, true) 0.291s passed
testTwoMappedPairsWithSupplementaryReads[2](false, true, true) 0.301s passed
testTwoMappedPairsWithSupplementaryReads[3](true, true, false) 0.298s passed
testTwoMappedPairsWithSupplementaryReads[4](true, false, false) 0.291s passed
testTwoMappedPairsWithSupplementaryReads[5](false, true, false) 0.288s passed
testTwoUnmappedFragments 0.303s passed

Standard error

[May 27, 2025 at 4:49:44 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:44 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:45 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:46 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:47 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
16:49:47.668 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.669 INFO  MarkDuplicatesSpark - The Genome Analysis Toolkit (GATK) v113e5d7-SNAPSHOT
16:49:47.669 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:49:47.669 INFO  MarkDuplicatesSpark - Executing as root@1c4c36ffc4f6 on Linux v6.11.0-1014-azure amd64
16:49:47.669 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.12+7-Ubuntu-1ubuntu222.04
16:49:47.669 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:49:47 PM GMT
16:49:47.669 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.669 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.669 INFO  MarkDuplicatesSpark - HTSJDK Version: test_cram_wiring-SNAPSHOT
16:49:47.669 INFO  MarkDuplicatesSpark - Picard Version: 3.4.0
16:49:47.669 INFO  MarkDuplicatesSpark - Built for Spark Version: 3.5.0
16:49:47.669 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:49:47.669 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:49:47.669 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:49:47.669 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:49:47.670 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:49:47.670 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:49:47.670 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:49:47.670 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:49:47.670 INFO  MarkDuplicatesSpark - Initializing engine
16:49:47.670 INFO  MarkDuplicatesSpark - Done initializing engine
16:49:47.670 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:49:47.672 INFO  MemoryStore - Block broadcast_1337 stored as values in memory (estimated size 305.5 KiB, free 1903.3 MiB)
16:49:47.679 INFO  MemoryStore - Block broadcast_1337_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1903.2 MiB)
16:49:47.679 INFO  BlockManagerInfo - Added broadcast_1337_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.9 MiB)
16:49:47.679 INFO  SparkContext - Created broadcast 1337 from newAPIHadoopFile at PathSplitSource.java:96
16:49:47.700 INFO  MemoryStore - Block broadcast_1338 stored as values in memory (estimated size 305.5 KiB, free 1902.9 MiB)
16:49:47.711 INFO  MemoryStore - Block broadcast_1338_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1902.9 MiB)
16:49:47.711 INFO  BlockManagerInfo - Added broadcast_1338_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.9 MiB)
16:49:47.712 INFO  SparkContext - Created broadcast 1338 from newAPIHadoopFile at PathSplitSource.java:96
16:49:47.754 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:49:47.754 INFO  FileInputFormat - Total input files to process : 1
16:49:47.755 INFO  DAGScheduler - Registering RDD 3396 (mapToPair at SparkUtils.java:161) as input to shuffle 97
16:49:47.755 INFO  DAGScheduler - Got job 413 (collect at SparkUtils.java:205) with 1 output partitions
16:49:47.755 INFO  DAGScheduler - Final stage: ResultStage 721 (collect at SparkUtils.java:205)
16:49:47.756 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 720)
16:49:47.756 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 720)
16:49:47.756 INFO  DAGScheduler - Submitting ShuffleMapStage 720 (MapPartitionsRDD[3396] at mapToPair at SparkUtils.java:161), which has no missing parents
16:49:47.773 INFO  MemoryStore - Block broadcast_1339 stored as values in memory (estimated size 459.2 KiB, free 1902.4 MiB)
16:49:47.775 INFO  MemoryStore - Block broadcast_1339_piece0 stored as bytes in memory (estimated size 201.4 KiB, free 1902.2 MiB)
16:49:47.776 INFO  BlockManagerInfo - Added broadcast_1339_piece0 in memory on localhost:37833 (size: 201.4 KiB, free: 1918.7 MiB)
16:49:47.776 INFO  SparkContext - Created broadcast 1339 from broadcast at DAGScheduler.scala:1580
16:49:47.776 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 720 (MapPartitionsRDD[3396] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:49:47.777 INFO  TaskSchedulerImpl - Adding task set 720.0 with 1 tasks resource profile 0
16:49:47.777 INFO  TaskSetManager - Starting task 0.0 in stage 720.0 (TID 650) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9650 bytes) 
16:49:47.778 INFO  Executor - Running task 0.0 in stage 720.0 (TID 650)
16:49:47.807 INFO  NewHadoopRDD - Input split: file:/gatkCloneMountPoint/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam:0+1507
16:49:47.811 INFO  Executor - Finished task 0.0 in stage 720.0 (TID 650). 1148 bytes result sent to driver
16:49:47.812 INFO  TaskSetManager - Finished task 0.0 in stage 720.0 (TID 650) in 35 ms on localhost (executor driver) (1/1)
16:49:47.812 INFO  TaskSchedulerImpl - Removed TaskSet 720.0, whose tasks have all completed, from pool 
16:49:47.812 INFO  DAGScheduler - ShuffleMapStage 720 (mapToPair at SparkUtils.java:161) finished in 0.056 s
16:49:47.812 INFO  DAGScheduler - looking for newly runnable stages
16:49:47.812 INFO  DAGScheduler - running: HashSet()
16:49:47.812 INFO  DAGScheduler - waiting: HashSet(ResultStage 721)
16:49:47.812 INFO  DAGScheduler - failed: HashSet()
16:49:47.813 INFO  DAGScheduler - Submitting ResultStage 721 (MapPartitionsRDD[3399] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:49:47.813 INFO  MemoryStore - Block broadcast_1340 stored as values in memory (estimated size 7.4 KiB, free 1902.2 MiB)
16:49:47.814 INFO  MemoryStore - Block broadcast_1340_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1902.2 MiB)
16:49:47.814 INFO  BlockManagerInfo - Added broadcast_1340_piece0 in memory on localhost:37833 (size: 4.1 KiB, free: 1918.7 MiB)
16:49:47.814 INFO  SparkContext - Created broadcast 1340 from broadcast at DAGScheduler.scala:1580
16:49:47.814 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 721 (MapPartitionsRDD[3399] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:49:47.815 INFO  TaskSchedulerImpl - Adding task set 721.0 with 1 tasks resource profile 0
16:49:47.815 INFO  TaskSetManager - Starting task 0.0 in stage 721.0 (TID 651) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:47.816 INFO  Executor - Running task 0.0 in stage 721.0 (TID 651)
16:49:47.817 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.817 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.820 INFO  Executor - Finished task 0.0 in stage 721.0 (TID 651). 2238 bytes result sent to driver
16:49:47.820 INFO  TaskSetManager - Finished task 0.0 in stage 721.0 (TID 651) in 5 ms on localhost (executor driver) (1/1)
16:49:47.820 INFO  DAGScheduler - ResultStage 721 (collect at SparkUtils.java:205) finished in 0.007 s
16:49:47.820 INFO  DAGScheduler - Job 413 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:47.820 INFO  TaskSchedulerImpl - Removed TaskSet 721.0, whose tasks have all completed, from pool 
16:49:47.821 INFO  TaskSchedulerImpl - Killing all running tasks in stage 721: Stage finished
16:49:47.821 INFO  DAGScheduler - Job 413 finished: collect at SparkUtils.java:205, took 0.066947 s
16:49:47.827 INFO  MemoryStore - Block broadcast_1341 stored as values in memory (estimated size 1632.0 B, free 1902.2 MiB)
16:49:47.827 INFO  MemoryStore - Block broadcast_1341_piece0 stored as bytes in memory (estimated size 145.0 B, free 1902.2 MiB)
16:49:47.828 INFO  BlockManagerInfo - Added broadcast_1341_piece0 in memory on localhost:37833 (size: 145.0 B, free: 1918.7 MiB)
16:49:47.828 INFO  SparkContext - Created broadcast 1341 from broadcast at MarkDuplicatesSparkUtils.java:126
16:49:47.828 INFO  MemoryStore - Block broadcast_1342 stored as values in memory (estimated size 304.0 B, free 1902.2 MiB)
16:49:47.828 INFO  MemoryStore - Block broadcast_1342_piece0 stored as bytes in memory (estimated size 37.0 B, free 1902.2 MiB)
16:49:47.829 INFO  BlockManagerInfo - Added broadcast_1342_piece0 in memory on localhost:37833 (size: 37.0 B, free: 1918.7 MiB)
16:49:47.829 INFO  SparkContext - Created broadcast 1342 from broadcast at MarkDuplicatesSparkUtils.java:127
16:49:47.843 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:49:47.843 INFO  DAGScheduler - Registering RDD 3408 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 100
16:49:47.844 INFO  DAGScheduler - Registering RDD 3412 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 99
16:49:47.844 INFO  DAGScheduler - Registering RDD 3416 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 98
16:49:47.844 INFO  DAGScheduler - Got job 414 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:49:47.844 INFO  DAGScheduler - Final stage: ResultStage 726 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:49:47.844 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 725)
16:49:47.844 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 725)
16:49:47.844 INFO  DAGScheduler - Submitting ShuffleMapStage 723 (MapPartitionsRDD[3408] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:49:47.845 INFO  MemoryStore - Block broadcast_1343 stored as values in memory (estimated size 24.2 KiB, free 1902.2 MiB)
16:49:47.846 INFO  MemoryStore - Block broadcast_1343_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1902.2 MiB)
16:49:47.846 INFO  BlockManagerInfo - Added broadcast_1343_piece0 in memory on localhost:37833 (size: 12.6 KiB, free: 1918.6 MiB)
16:49:47.846 INFO  SparkContext - Created broadcast 1343 from broadcast at DAGScheduler.scala:1580
16:49:47.846 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 723 (MapPartitionsRDD[3408] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:49:47.847 INFO  TaskSchedulerImpl - Adding task set 723.0 with 1 tasks resource profile 0
16:49:47.847 INFO  TaskSetManager - Starting task 0.0 in stage 723.0 (TID 652) (localhost, executor driver, partition 0, NODE_LOCAL, 9813 bytes) 
16:49:47.848 INFO  Executor - Running task 0.0 in stage 723.0 (TID 652)
16:49:47.849 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.849 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.852 INFO  Executor - Finished task 0.0 in stage 723.0 (TID 652). 1922 bytes result sent to driver
16:49:47.853 INFO  TaskSetManager - Finished task 0.0 in stage 723.0 (TID 652) in 6 ms on localhost (executor driver) (1/1)
16:49:47.853 INFO  TaskSchedulerImpl - Removed TaskSet 723.0, whose tasks have all completed, from pool 
16:49:47.853 INFO  DAGScheduler - ShuffleMapStage 723 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.008 s
16:49:47.853 INFO  DAGScheduler - looking for newly runnable stages
16:49:47.853 INFO  DAGScheduler - running: HashSet()
16:49:47.853 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 724, ShuffleMapStage 725, ResultStage 726)
16:49:47.853 INFO  DAGScheduler - failed: HashSet()
16:49:47.853 INFO  DAGScheduler - Submitting ShuffleMapStage 724 (MapPartitionsRDD[3412] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:49:47.854 INFO  MemoryStore - Block broadcast_1344 stored as values in memory (estimated size 28.3 KiB, free 1902.1 MiB)
16:49:47.855 INFO  MemoryStore - Block broadcast_1344_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1902.1 MiB)
16:49:47.855 INFO  BlockManagerInfo - Added broadcast_1344_piece0 in memory on localhost:37833 (size: 14.8 KiB, free: 1918.6 MiB)
16:49:47.855 INFO  SparkContext - Created broadcast 1344 from broadcast at DAGScheduler.scala:1580
16:49:47.855 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 724 (MapPartitionsRDD[3412] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:49:47.855 INFO  TaskSchedulerImpl - Adding task set 724.0 with 1 tasks resource profile 0
16:49:47.856 INFO  TaskSetManager - Starting task 0.0 in stage 724.0 (TID 653) (localhost, executor driver, partition 0, NODE_LOCAL, 9288 bytes) 
16:49:47.856 INFO  Executor - Running task 0.0 in stage 724.0 (TID 653)
16:49:47.858 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.858 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.861 INFO  Executor - Finished task 0.0 in stage 724.0 (TID 653). 1922 bytes result sent to driver
16:49:47.861 INFO  TaskSetManager - Finished task 0.0 in stage 724.0 (TID 653) in 5 ms on localhost (executor driver) (1/1)
16:49:47.861 INFO  DAGScheduler - ShuffleMapStage 724 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
16:49:47.861 INFO  TaskSchedulerImpl - Removed TaskSet 724.0, whose tasks have all completed, from pool 
16:49:47.862 INFO  DAGScheduler - looking for newly runnable stages
16:49:47.862 INFO  DAGScheduler - running: HashSet()
16:49:47.862 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 725, ResultStage 726)
16:49:47.862 INFO  DAGScheduler - failed: HashSet()
16:49:47.862 INFO  DAGScheduler - Submitting ShuffleMapStage 725 (MapPartitionsRDD[3416] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:49:47.863 INFO  MemoryStore - Block broadcast_1345 stored as values in memory (estimated size 19.0 KiB, free 1902.1 MiB)
16:49:47.863 INFO  MemoryStore - Block broadcast_1345_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1902.1 MiB)
16:49:47.863 INFO  BlockManagerInfo - Added broadcast_1345_piece0 in memory on localhost:37833 (size: 9.7 KiB, free: 1918.6 MiB)
16:49:47.863 INFO  SparkContext - Created broadcast 1345 from broadcast at DAGScheduler.scala:1580
16:49:47.864 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 725 (MapPartitionsRDD[3416] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:49:47.864 INFO  TaskSchedulerImpl - Adding task set 725.0 with 1 tasks resource profile 0
16:49:47.864 INFO  TaskSetManager - Starting task 0.0 in stage 725.0 (TID 654) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:49:47.864 INFO  Executor - Running task 0.0 in stage 725.0 (TID 654)
16:49:47.866 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.866 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.868 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.868 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.870 INFO  Executor - Finished task 0.0 in stage 725.0 (TID 654). 1922 bytes result sent to driver
16:49:47.870 INFO  TaskSetManager - Finished task 0.0 in stage 725.0 (TID 654) in 6 ms on localhost (executor driver) (1/1)
16:49:47.870 INFO  TaskSchedulerImpl - Removed TaskSet 725.0, whose tasks have all completed, from pool 
16:49:47.870 INFO  DAGScheduler - ShuffleMapStage 725 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.008 s
16:49:47.870 INFO  DAGScheduler - looking for newly runnable stages
16:49:47.870 INFO  DAGScheduler - running: HashSet()
16:49:47.870 INFO  DAGScheduler - waiting: HashSet(ResultStage 726)
16:49:47.870 INFO  DAGScheduler - failed: HashSet()
16:49:47.871 INFO  DAGScheduler - Submitting ResultStage 726 (MapPartitionsRDD[3418] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:49:47.871 INFO  MemoryStore - Block broadcast_1346 stored as values in memory (estimated size 20.2 KiB, free 1902.1 MiB)
16:49:47.872 INFO  MemoryStore - Block broadcast_1346_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1902.1 MiB)
16:49:47.872 INFO  BlockManagerInfo - Added broadcast_1346_piece0 in memory on localhost:37833 (size: 10.6 KiB, free: 1918.6 MiB)
16:49:47.872 INFO  SparkContext - Created broadcast 1346 from broadcast at DAGScheduler.scala:1580
16:49:47.872 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 726 (MapPartitionsRDD[3418] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:49:47.872 INFO  TaskSchedulerImpl - Adding task set 726.0 with 1 tasks resource profile 0
16:49:47.873 INFO  TaskSetManager - Starting task 0.0 in stage 726.0 (TID 655) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:47.873 INFO  Executor - Running task 0.0 in stage 726.0 (TID 655)
16:49:47.875 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.875 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.876 INFO  Executor - Finished task 0.0 in stage 726.0 (TID 655). 1944 bytes result sent to driver
16:49:47.877 INFO  TaskSetManager - Finished task 0.0 in stage 726.0 (TID 655) in 4 ms on localhost (executor driver) (1/1)
16:49:47.877 INFO  TaskSchedulerImpl - Removed TaskSet 726.0, whose tasks have all completed, from pool 
16:49:47.877 INFO  DAGScheduler - ResultStage 726 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
16:49:47.877 INFO  DAGScheduler - Job 414 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:47.877 INFO  TaskSchedulerImpl - Killing all running tasks in stage 726: Stage finished
16:49:47.877 INFO  DAGScheduler - Job 414 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.034288 s
16:49:47.887 INFO  MemoryStore - Block broadcast_1347 stored as values in memory (estimated size 20.3 KiB, free 1902.0 MiB)
16:49:47.888 INFO  MemoryStore - Block broadcast_1347_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1902.0 MiB)
16:49:47.888 INFO  BlockManagerInfo - Added broadcast_1347_piece0 in memory on localhost:37833 (size: 1850.0 B, free: 1918.6 MiB)
16:49:47.888 INFO  SparkContext - Created broadcast 1347 from broadcast at ReadsSparkSink.java:133
16:49:47.889 INFO  MemoryStore - Block broadcast_1348 stored as values in memory (estimated size 20.4 KiB, free 1902.0 MiB)
16:49:47.889 INFO  MemoryStore - Block broadcast_1348_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1902.0 MiB)
16:49:47.889 INFO  BlockManagerInfo - Added broadcast_1348_piece0 in memory on localhost:37833 (size: 1850.0 B, free: 1918.6 MiB)
16:49:47.890 INFO  SparkContext - Created broadcast 1348 from broadcast at BamSink.java:76
16:49:47.891 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:47.891 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:47.891 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:47.903 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:49:47.904 INFO  DAGScheduler - Registering RDD 3420 (mapToPair at SparkUtils.java:161) as input to shuffle 101
16:49:47.904 INFO  DAGScheduler - Got job 415 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:49:47.904 INFO  DAGScheduler - Final stage: ResultStage 731 (runJob at SparkHadoopWriter.scala:83)
16:49:47.904 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 730)
16:49:47.904 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 730)
16:49:47.904 INFO  DAGScheduler - Submitting ShuffleMapStage 730 (MapPartitionsRDD[3420] at mapToPair at SparkUtils.java:161), which has no missing parents
16:49:47.905 INFO  MemoryStore - Block broadcast_1349 stored as values in memory (estimated size 18.1 KiB, free 1902.0 MiB)
16:49:47.906 INFO  MemoryStore - Block broadcast_1349_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1902.0 MiB)
16:49:47.906 INFO  BlockManagerInfo - Added broadcast_1349_piece0 in memory on localhost:37833 (size: 9.5 KiB, free: 1918.6 MiB)
16:49:47.906 INFO  SparkContext - Created broadcast 1349 from broadcast at DAGScheduler.scala:1580
16:49:47.906 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 730 (MapPartitionsRDD[3420] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:49:47.907 INFO  TaskSchedulerImpl - Adding task set 730.0 with 1 tasks resource profile 0
16:49:47.907 INFO  TaskSetManager - Starting task 0.0 in stage 730.0 (TID 656) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:49:47.907 INFO  Executor - Running task 0.0 in stage 730.0 (TID 656)
16:49:47.909 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.909 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.911 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.911 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.913 INFO  Executor - Finished task 0.0 in stage 730.0 (TID 656). 1922 bytes result sent to driver
16:49:47.914 INFO  TaskSetManager - Finished task 0.0 in stage 730.0 (TID 656) in 6 ms on localhost (executor driver) (1/1)
16:49:47.914 INFO  TaskSchedulerImpl - Removed TaskSet 730.0, whose tasks have all completed, from pool 
16:49:47.914 INFO  DAGScheduler - ShuffleMapStage 730 (mapToPair at SparkUtils.java:161) finished in 0.010 s
16:49:47.914 INFO  DAGScheduler - looking for newly runnable stages
16:49:47.914 INFO  DAGScheduler - running: HashSet()
16:49:47.914 INFO  DAGScheduler - waiting: HashSet(ResultStage 731)
16:49:47.914 INFO  DAGScheduler - failed: HashSet()
16:49:47.914 INFO  DAGScheduler - Submitting ResultStage 731 (MapPartitionsRDD[3425] at mapToPair at BamSink.java:91), which has no missing parents
16:49:47.922 INFO  MemoryStore - Block broadcast_1350 stored as values in memory (estimated size 163.3 KiB, free 1901.8 MiB)
16:49:47.923 INFO  MemoryStore - Block broadcast_1350_piece0 stored as bytes in memory (estimated size 73.4 KiB, free 1901.8 MiB)
16:49:47.923 INFO  BlockManagerInfo - Added broadcast_1350_piece0 in memory on localhost:37833 (size: 73.4 KiB, free: 1918.5 MiB)
16:49:47.923 INFO  SparkContext - Created broadcast 1350 from broadcast at DAGScheduler.scala:1580
16:49:47.923 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 731 (MapPartitionsRDD[3425] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
16:49:47.923 INFO  TaskSchedulerImpl - Adding task set 731.0 with 1 tasks resource profile 0
16:49:47.924 INFO  TaskSetManager - Starting task 0.0 in stage 731.0 (TID 657) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:47.924 INFO  Executor - Running task 0.0 in stage 731.0 (TID 657)
16:49:47.928 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:47.928 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:47.930 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:47.930 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:47.930 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:47.930 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:47.930 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:47.930 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:47.941 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271649472111973589959680634_3425_r_000000_0' to file:/tmp/local17251393156006128657/markdups9602616445028402802.bam.parts/_temporary/0/task_202505271649472111973589959680634_3425_r_000000
16:49:47.941 INFO  SparkHadoopMapRedUtil - attempt_202505271649472111973589959680634_3425_r_000000_0: Committed. Elapsed time: 0 ms.
16:49:47.941 INFO  Executor - Finished task 0.0 in stage 731.0 (TID 657). 1858 bytes result sent to driver
16:49:47.942 INFO  TaskSetManager - Finished task 0.0 in stage 731.0 (TID 657) in 18 ms on localhost (executor driver) (1/1)
16:49:47.942 INFO  TaskSchedulerImpl - Removed TaskSet 731.0, whose tasks have all completed, from pool 
16:49:47.942 INFO  DAGScheduler - ResultStage 731 (runJob at SparkHadoopWriter.scala:83) finished in 0.027 s
16:49:47.942 INFO  DAGScheduler - Job 415 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:47.942 INFO  TaskSchedulerImpl - Killing all running tasks in stage 731: Stage finished
16:49:47.942 INFO  DAGScheduler - Job 415 finished: runJob at SparkHadoopWriter.scala:83, took 0.038975 s
16:49:47.942 INFO  SparkHadoopWriter - Start to commit write Job job_202505271649472111973589959680634_3425.
16:49:47.945 INFO  SparkHadoopWriter - Write Job job_202505271649472111973589959680634_3425 committed. Elapsed time: 2 ms.
16:49:47.952 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local17251393156006128657/markdups9602616445028402802.bam
16:49:47.954 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local17251393156006128657/markdups9602616445028402802.bam done
16:49:47.954 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local17251393156006128657/markdups9602616445028402802.bam.parts/ to /tmp/local17251393156006128657/markdups9602616445028402802.bam.sbi
16:49:47.957 INFO  IndexFileMerger - Done merging .sbi files
16:49:47.957 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local17251393156006128657/markdups9602616445028402802.bam.parts/ to /tmp/local17251393156006128657/markdups9602616445028402802.bam.bai
16:49:47.960 INFO  IndexFileMerger - Done merging .bai files
16:49:47.960 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:49:47 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:49:47.977 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.977 INFO  MarkDuplicatesSpark - The Genome Analysis Toolkit (GATK) v113e5d7-SNAPSHOT
16:49:47.977 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:49:47.977 INFO  MarkDuplicatesSpark - Executing as root@1c4c36ffc4f6 on Linux v6.11.0-1014-azure amd64
16:49:47.977 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.12+7-Ubuntu-1ubuntu222.04
16:49:47.977 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:49:47 PM GMT
16:49:47.977 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.977 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:49:47.977 INFO  MarkDuplicatesSpark - HTSJDK Version: test_cram_wiring-SNAPSHOT
16:49:47.977 INFO  MarkDuplicatesSpark - Picard Version: 3.4.0
16:49:47.977 INFO  MarkDuplicatesSpark - Built for Spark Version: 3.5.0
16:49:47.977 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:49:47.977 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:49:47.977 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:49:47.977 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:49:47.977 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:49:47.978 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:49:47.978 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:49:47.978 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:49:47.978 INFO  MarkDuplicatesSpark - Initializing engine
16:49:47.978 INFO  MarkDuplicatesSpark - Done initializing engine
16:49:47.978 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:49:47.980 INFO  MemoryStore - Block broadcast_1351 stored as values in memory (estimated size 305.5 KiB, free 1901.5 MiB)
16:49:47.986 INFO  MemoryStore - Block broadcast_1351_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1901.4 MiB)
16:49:47.987 INFO  BlockManagerInfo - Added broadcast_1351_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.5 MiB)
16:49:47.987 INFO  SparkContext - Created broadcast 1351 from newAPIHadoopFile at PathSplitSource.java:96
16:49:48.007 INFO  MemoryStore - Block broadcast_1352 stored as values in memory (estimated size 305.5 KiB, free 1901.1 MiB)
16:49:48.014 INFO  MemoryStore - Block broadcast_1352_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1901.0 MiB)
16:49:48.014 INFO  BlockManagerInfo - Added broadcast_1352_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.4 MiB)
16:49:48.014 INFO  SparkContext - Created broadcast 1352 from newAPIHadoopFile at PathSplitSource.java:96
16:49:48.045 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:49:48.046 INFO  FileInputFormat - Total input files to process : 1
16:49:48.046 INFO  DAGScheduler - Registering RDD 3439 (mapToPair at SparkUtils.java:161) as input to shuffle 102
16:49:48.046 INFO  DAGScheduler - Got job 416 (collect at SparkUtils.java:205) with 1 output partitions
16:49:48.046 INFO  DAGScheduler - Final stage: ResultStage 733 (collect at SparkUtils.java:205)
16:49:48.047 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 732)
16:49:48.047 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 732)
16:49:48.047 INFO  DAGScheduler - Submitting ShuffleMapStage 732 (MapPartitionsRDD[3439] at mapToPair at SparkUtils.java:161), which has no missing parents
16:49:48.064 INFO  MemoryStore - Block broadcast_1353 stored as values in memory (estimated size 459.2 KiB, free 1900.6 MiB)
16:49:48.067 INFO  MemoryStore - Block broadcast_1353_piece0 stored as bytes in memory (estimated size 201.4 KiB, free 1900.4 MiB)
16:49:48.067 INFO  BlockManagerInfo - Added broadcast_1353_piece0 in memory on localhost:37833 (size: 201.4 KiB, free: 1918.2 MiB)
16:49:48.067 INFO  SparkContext - Created broadcast 1353 from broadcast at DAGScheduler.scala:1580
16:49:48.067 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 732 (MapPartitionsRDD[3439] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:49:48.067 INFO  TaskSchedulerImpl - Adding task set 732.0 with 1 tasks resource profile 0
16:49:48.068 INFO  TaskSetManager - Starting task 0.0 in stage 732.0 (TID 658) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9651 bytes) 
16:49:48.068 INFO  Executor - Running task 0.0 in stage 732.0 (TID 658)
16:49:48.097 INFO  NewHadoopRDD - Input split: file:/gatkCloneMountPoint/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam:0+1856
16:49:48.102 INFO  Executor - Finished task 0.0 in stage 732.0 (TID 658). 1148 bytes result sent to driver
16:49:48.102 INFO  TaskSetManager - Finished task 0.0 in stage 732.0 (TID 658) in 34 ms on localhost (executor driver) (1/1)
16:49:48.102 INFO  TaskSchedulerImpl - Removed TaskSet 732.0, whose tasks have all completed, from pool 
16:49:48.102 INFO  DAGScheduler - ShuffleMapStage 732 (mapToPair at SparkUtils.java:161) finished in 0.055 s
16:49:48.102 INFO  DAGScheduler - looking for newly runnable stages
16:49:48.102 INFO  DAGScheduler - running: HashSet()
16:49:48.102 INFO  DAGScheduler - waiting: HashSet(ResultStage 733)
16:49:48.102 INFO  DAGScheduler - failed: HashSet()
16:49:48.103 INFO  DAGScheduler - Submitting ResultStage 733 (MapPartitionsRDD[3442] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:49:48.103 INFO  MemoryStore - Block broadcast_1354 stored as values in memory (estimated size 7.4 KiB, free 1900.4 MiB)
16:49:48.104 INFO  MemoryStore - Block broadcast_1354_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1900.4 MiB)
16:49:48.104 INFO  BlockManagerInfo - Added broadcast_1354_piece0 in memory on localhost:37833 (size: 4.1 KiB, free: 1918.2 MiB)
16:49:48.104 INFO  SparkContext - Created broadcast 1354 from broadcast at DAGScheduler.scala:1580
16:49:48.104 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 733 (MapPartitionsRDD[3442] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:49:48.104 INFO  TaskSchedulerImpl - Adding task set 733.0 with 1 tasks resource profile 0
16:49:48.104 INFO  TaskSetManager - Starting task 0.0 in stage 733.0 (TID 659) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:48.105 INFO  Executor - Running task 0.0 in stage 733.0 (TID 659)
16:49:48.106 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.106 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.108 INFO  Executor - Finished task 0.0 in stage 733.0 (TID 659). 2495 bytes result sent to driver
16:49:48.108 INFO  TaskSetManager - Finished task 0.0 in stage 733.0 (TID 659) in 4 ms on localhost (executor driver) (1/1)
16:49:48.108 INFO  TaskSchedulerImpl - Removed TaskSet 733.0, whose tasks have all completed, from pool 
16:49:48.108 INFO  DAGScheduler - ResultStage 733 (collect at SparkUtils.java:205) finished in 0.005 s
16:49:48.108 INFO  DAGScheduler - Job 416 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:48.108 INFO  TaskSchedulerImpl - Killing all running tasks in stage 733: Stage finished
16:49:48.109 INFO  DAGScheduler - Job 416 finished: collect at SparkUtils.java:205, took 0.063005 s
16:49:48.113 INFO  MemoryStore - Block broadcast_1355 stored as values in memory (estimated size 1632.0 B, free 1900.4 MiB)
16:49:48.113 INFO  MemoryStore - Block broadcast_1355_piece0 stored as bytes in memory (estimated size 145.0 B, free 1900.4 MiB)
16:49:48.113 INFO  BlockManagerInfo - Added broadcast_1355_piece0 in memory on localhost:37833 (size: 145.0 B, free: 1918.2 MiB)
16:49:48.113 INFO  SparkContext - Created broadcast 1355 from broadcast at MarkDuplicatesSparkUtils.java:126
16:49:48.114 INFO  MemoryStore - Block broadcast_1356 stored as values in memory (estimated size 304.0 B, free 1900.4 MiB)
16:49:48.114 INFO  MemoryStore - Block broadcast_1356_piece0 stored as bytes in memory (estimated size 37.0 B, free 1900.4 MiB)
16:49:48.114 INFO  BlockManagerInfo - Added broadcast_1356_piece0 in memory on localhost:37833 (size: 37.0 B, free: 1918.2 MiB)
16:49:48.114 INFO  SparkContext - Created broadcast 1356 from broadcast at MarkDuplicatesSparkUtils.java:127
16:49:48.126 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:49:48.127 INFO  DAGScheduler - Registering RDD 3451 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 105
16:49:48.127 INFO  DAGScheduler - Registering RDD 3455 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 104
16:49:48.127 INFO  DAGScheduler - Registering RDD 3459 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 103
16:49:48.127 INFO  DAGScheduler - Got job 417 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:49:48.127 INFO  DAGScheduler - Final stage: ResultStage 738 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:49:48.128 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 737)
16:49:48.128 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 737)
16:49:48.128 INFO  DAGScheduler - Submitting ShuffleMapStage 735 (MapPartitionsRDD[3451] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:49:48.129 INFO  MemoryStore - Block broadcast_1357 stored as values in memory (estimated size 24.2 KiB, free 1900.4 MiB)
16:49:48.129 INFO  MemoryStore - Block broadcast_1357_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1900.3 MiB)
16:49:48.130 INFO  BlockManagerInfo - Added broadcast_1357_piece0 in memory on localhost:37833 (size: 12.6 KiB, free: 1918.2 MiB)
16:49:48.130 INFO  SparkContext - Created broadcast 1357 from broadcast at DAGScheduler.scala:1580
16:49:48.130 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 735 (MapPartitionsRDD[3451] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:49:48.130 INFO  TaskSchedulerImpl - Adding task set 735.0 with 1 tasks resource profile 0
16:49:48.131 INFO  TaskSetManager - Starting task 0.0 in stage 735.0 (TID 660) (localhost, executor driver, partition 0, NODE_LOCAL, 9813 bytes) 
16:49:48.131 INFO  Executor - Running task 0.0 in stage 735.0 (TID 660)
16:49:48.133 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.133 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.136 INFO  Executor - Finished task 0.0 in stage 735.0 (TID 660). 1922 bytes result sent to driver
16:49:48.136 INFO  TaskSetManager - Finished task 0.0 in stage 735.0 (TID 660) in 6 ms on localhost (executor driver) (1/1)
16:49:48.137 INFO  TaskSchedulerImpl - Removed TaskSet 735.0, whose tasks have all completed, from pool 
16:49:48.137 INFO  DAGScheduler - ShuffleMapStage 735 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.009 s
16:49:48.137 INFO  DAGScheduler - looking for newly runnable stages
16:49:48.137 INFO  DAGScheduler - running: HashSet()
16:49:48.137 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 736, ShuffleMapStage 737, ResultStage 738)
16:49:48.137 INFO  DAGScheduler - failed: HashSet()
16:49:48.137 INFO  DAGScheduler - Submitting ShuffleMapStage 736 (MapPartitionsRDD[3455] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:49:48.138 INFO  MemoryStore - Block broadcast_1358 stored as values in memory (estimated size 28.3 KiB, free 1900.3 MiB)
16:49:48.139 INFO  MemoryStore - Block broadcast_1358_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1900.3 MiB)
16:49:48.139 INFO  BlockManagerInfo - Added broadcast_1358_piece0 in memory on localhost:37833 (size: 14.8 KiB, free: 1918.2 MiB)
16:49:48.139 INFO  SparkContext - Created broadcast 1358 from broadcast at DAGScheduler.scala:1580
16:49:48.139 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 736 (MapPartitionsRDD[3455] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:49:48.139 INFO  TaskSchedulerImpl - Adding task set 736.0 with 1 tasks resource profile 0
16:49:48.140 INFO  TaskSetManager - Starting task 0.0 in stage 736.0 (TID 661) (localhost, executor driver, partition 0, NODE_LOCAL, 9288 bytes) 
16:49:48.140 INFO  Executor - Running task 0.0 in stage 736.0 (TID 661)
16:49:48.142 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.142 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.146 INFO  Executor - Finished task 0.0 in stage 736.0 (TID 661). 1922 bytes result sent to driver
16:49:48.146 INFO  TaskSetManager - Finished task 0.0 in stage 736.0 (TID 661) in 6 ms on localhost (executor driver) (1/1)
16:49:48.146 INFO  TaskSchedulerImpl - Removed TaskSet 736.0, whose tasks have all completed, from pool 
16:49:48.146 INFO  DAGScheduler - ShuffleMapStage 736 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.009 s
16:49:48.146 INFO  DAGScheduler - looking for newly runnable stages
16:49:48.146 INFO  DAGScheduler - running: HashSet()
16:49:48.147 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 737, ResultStage 738)
16:49:48.147 INFO  DAGScheduler - failed: HashSet()
16:49:48.147 INFO  DAGScheduler - Submitting ShuffleMapStage 737 (MapPartitionsRDD[3459] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:49:48.147 INFO  MemoryStore - Block broadcast_1359 stored as values in memory (estimated size 19.0 KiB, free 1900.3 MiB)
16:49:48.148 INFO  MemoryStore - Block broadcast_1359_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1900.3 MiB)
16:49:48.148 INFO  BlockManagerInfo - Added broadcast_1359_piece0 in memory on localhost:37833 (size: 9.7 KiB, free: 1918.2 MiB)
16:49:48.148 INFO  SparkContext - Created broadcast 1359 from broadcast at DAGScheduler.scala:1580
16:49:48.149 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 737 (MapPartitionsRDD[3459] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:49:48.149 INFO  TaskSchedulerImpl - Adding task set 737.0 with 1 tasks resource profile 0
16:49:48.149 INFO  TaskSetManager - Starting task 0.0 in stage 737.0 (TID 662) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:49:48.150 INFO  Executor - Running task 0.0 in stage 737.0 (TID 662)
16:49:48.152 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.153 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 1 ms
16:49:48.156 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.157 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.158 INFO  Executor - Finished task 0.0 in stage 737.0 (TID 662). 1922 bytes result sent to driver
16:49:48.159 INFO  TaskSetManager - Finished task 0.0 in stage 737.0 (TID 662) in 10 ms on localhost (executor driver) (1/1)
16:49:48.159 INFO  TaskSchedulerImpl - Removed TaskSet 737.0, whose tasks have all completed, from pool 
16:49:48.159 INFO  DAGScheduler - ShuffleMapStage 737 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.012 s
16:49:48.159 INFO  DAGScheduler - looking for newly runnable stages
16:49:48.159 INFO  DAGScheduler - running: HashSet()
16:49:48.159 INFO  DAGScheduler - waiting: HashSet(ResultStage 738)
16:49:48.159 INFO  DAGScheduler - failed: HashSet()
16:49:48.159 INFO  DAGScheduler - Submitting ResultStage 738 (MapPartitionsRDD[3461] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:49:48.160 INFO  MemoryStore - Block broadcast_1360 stored as values in memory (estimated size 20.2 KiB, free 1900.3 MiB)
16:49:48.161 INFO  MemoryStore - Block broadcast_1360_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1900.2 MiB)
16:49:48.161 INFO  BlockManagerInfo - Added broadcast_1360_piece0 in memory on localhost:37833 (size: 10.6 KiB, free: 1918.1 MiB)
16:49:48.161 INFO  SparkContext - Created broadcast 1360 from broadcast at DAGScheduler.scala:1580
16:49:48.161 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 738 (MapPartitionsRDD[3461] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:49:48.161 INFO  TaskSchedulerImpl - Adding task set 738.0 with 1 tasks resource profile 0
16:49:48.162 INFO  TaskSetManager - Starting task 0.0 in stage 738.0 (TID 663) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:48.162 INFO  Executor - Running task 0.0 in stage 738.0 (TID 663)
16:49:48.163 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.163 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.165 INFO  Executor - Finished task 0.0 in stage 738.0 (TID 663). 2053 bytes result sent to driver
16:49:48.166 INFO  TaskSetManager - Finished task 0.0 in stage 738.0 (TID 663) in 5 ms on localhost (executor driver) (1/1)
16:49:48.166 INFO  TaskSchedulerImpl - Removed TaskSet 738.0, whose tasks have all completed, from pool 
16:49:48.166 INFO  DAGScheduler - ResultStage 738 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
16:49:48.166 INFO  DAGScheduler - Job 417 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:48.166 INFO  TaskSchedulerImpl - Killing all running tasks in stage 738: Stage finished
16:49:48.166 INFO  DAGScheduler - Job 417 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.039847 s
16:49:48.177 INFO  MemoryStore - Block broadcast_1361 stored as values in memory (estimated size 20.3 KiB, free 1900.2 MiB)
16:49:48.178 INFO  MemoryStore - Block broadcast_1361_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1900.2 MiB)
16:49:48.178 INFO  BlockManagerInfo - Added broadcast_1361_piece0 in memory on localhost:37833 (size: 1850.0 B, free: 1918.1 MiB)
16:49:48.178 INFO  SparkContext - Created broadcast 1361 from broadcast at ReadsSparkSink.java:133
16:49:48.179 INFO  MemoryStore - Block broadcast_1362 stored as values in memory (estimated size 20.4 KiB, free 1900.2 MiB)
16:49:48.179 INFO  MemoryStore - Block broadcast_1362_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1900.2 MiB)
16:49:48.179 INFO  BlockManagerInfo - Added broadcast_1362_piece0 in memory on localhost:37833 (size: 1850.0 B, free: 1918.1 MiB)
16:49:48.180 INFO  SparkContext - Created broadcast 1362 from broadcast at BamSink.java:76
16:49:48.182 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:48.182 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:48.182 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:48.193 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:49:48.193 INFO  DAGScheduler - Registering RDD 3463 (mapToPair at SparkUtils.java:161) as input to shuffle 106
16:49:48.194 INFO  DAGScheduler - Got job 418 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:49:48.194 INFO  DAGScheduler - Final stage: ResultStage 743 (runJob at SparkHadoopWriter.scala:83)
16:49:48.194 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 742)
16:49:48.194 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 742)
16:49:48.194 INFO  DAGScheduler - Submitting ShuffleMapStage 742 (MapPartitionsRDD[3463] at mapToPair at SparkUtils.java:161), which has no missing parents
16:49:48.195 INFO  MemoryStore - Block broadcast_1363 stored as values in memory (estimated size 18.1 KiB, free 1900.2 MiB)
16:49:48.195 INFO  MemoryStore - Block broadcast_1363_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1900.2 MiB)
16:49:48.196 INFO  BlockManagerInfo - Added broadcast_1363_piece0 in memory on localhost:37833 (size: 9.5 KiB, free: 1918.1 MiB)
16:49:48.196 INFO  SparkContext - Created broadcast 1363 from broadcast at DAGScheduler.scala:1580
16:49:48.196 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 742 (MapPartitionsRDD[3463] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:49:48.196 INFO  TaskSchedulerImpl - Adding task set 742.0 with 1 tasks resource profile 0
16:49:48.196 INFO  TaskSetManager - Starting task 0.0 in stage 742.0 (TID 664) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:49:48.197 INFO  Executor - Running task 0.0 in stage 742.0 (TID 664)
16:49:48.198 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.198 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.200 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.201 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.202 INFO  Executor - Finished task 0.0 in stage 742.0 (TID 664). 1922 bytes result sent to driver
16:49:48.202 INFO  TaskSetManager - Finished task 0.0 in stage 742.0 (TID 664) in 6 ms on localhost (executor driver) (1/1)
16:49:48.202 INFO  TaskSchedulerImpl - Removed TaskSet 742.0, whose tasks have all completed, from pool 
16:49:48.202 INFO  DAGScheduler - ShuffleMapStage 742 (mapToPair at SparkUtils.java:161) finished in 0.008 s
16:49:48.202 INFO  DAGScheduler - looking for newly runnable stages
16:49:48.202 INFO  DAGScheduler - running: HashSet()
16:49:48.203 INFO  DAGScheduler - waiting: HashSet(ResultStage 743)
16:49:48.203 INFO  DAGScheduler - failed: HashSet()
16:49:48.203 INFO  DAGScheduler - Submitting ResultStage 743 (MapPartitionsRDD[3468] at mapToPair at BamSink.java:91), which has no missing parents
16:49:48.210 INFO  MemoryStore - Block broadcast_1364 stored as values in memory (estimated size 163.3 KiB, free 1900.0 MiB)
16:49:48.211 INFO  MemoryStore - Block broadcast_1364_piece0 stored as bytes in memory (estimated size 73.4 KiB, free 1899.9 MiB)
16:49:48.211 INFO  BlockManagerInfo - Added broadcast_1364_piece0 in memory on localhost:37833 (size: 73.4 KiB, free: 1918.1 MiB)
16:49:48.211 INFO  SparkContext - Created broadcast 1364 from broadcast at DAGScheduler.scala:1580
16:49:48.211 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 743 (MapPartitionsRDD[3468] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
16:49:48.211 INFO  TaskSchedulerImpl - Adding task set 743.0 with 1 tasks resource profile 0
16:49:48.212 INFO  TaskSetManager - Starting task 0.0 in stage 743.0 (TID 665) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:49:48.212 INFO  Executor - Running task 0.0 in stage 743.0 (TID 665)
16:49:48.216 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:49:48.216 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:49:48.219 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:48.219 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:48.219 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:48.219 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
16:49:48.219 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:49:48.219 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:49:48.230 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271649485338734155167380368_3468_r_000000_0' to file:/tmp/local17251393156006128657/markdups12768822936210503266.bam.parts/_temporary/0/task_202505271649485338734155167380368_3468_r_000000
16:49:48.230 INFO  SparkHadoopMapRedUtil - attempt_202505271649485338734155167380368_3468_r_000000_0: Committed. Elapsed time: 0 ms.
16:49:48.231 INFO  Executor - Finished task 0.0 in stage 743.0 (TID 665). 1858 bytes result sent to driver
16:49:48.231 INFO  TaskSetManager - Finished task 0.0 in stage 743.0 (TID 665) in 19 ms on localhost (executor driver) (1/1)
16:49:48.231 INFO  TaskSchedulerImpl - Removed TaskSet 743.0, whose tasks have all completed, from pool 
16:49:48.231 INFO  DAGScheduler - ResultStage 743 (runJob at SparkHadoopWriter.scala:83) finished in 0.028 s
16:49:48.231 INFO  DAGScheduler - Job 418 is finished. Cancelling potential speculative or zombie tasks for this job
16:49:48.231 INFO  TaskSchedulerImpl - Killing all running tasks in stage 743: Stage finished
16:49:48.231 INFO  DAGScheduler - Job 418 finished: runJob at SparkHadoopWriter.scala:83, took 0.038261 s
16:49:48.232 INFO  SparkHadoopWriter - Start to commit write Job job_202505271649485338734155167380368_3468.
16:49:48.235 INFO  SparkHadoopWriter - Write Job job_202505271649485338734155167380368_3468 committed. Elapsed time: 3 ms.
16:49:48.240 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local17251393156006128657/markdups12768822936210503266.bam
16:49:48.243 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local17251393156006128657/markdups12768822936210503266.bam done
16:49:48.243 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local17251393156006128657/markdups12768822936210503266.bam.parts/ to /tmp/local17251393156006128657/markdups12768822936210503266.bam.sbi
16:49:48.245 INFO  IndexFileMerger - Done merging .sbi files
16:49:48.245 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local17251393156006128657/markdups12768822936210503266.bam.parts/ to /tmp/local17251393156006128657/markdups12768822936210503266.bam.bai
16:49:48.248 INFO  IndexFileMerger - Done merging .bai files
16:49:48.248 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:49:48 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:48 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:48 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:49 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:49 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:50 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:50 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:51 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:52 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:53 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:53 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:53 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:54 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:54 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:55 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:57 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:57 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:57 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:58 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:58 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:58 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:49:59 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:01 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:01 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:01 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:02 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:02 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:02 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:04 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:05 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:05 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:05 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:06 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:06 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:06 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:08 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:09 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:09 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:10 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:12 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:13 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:50:13.060 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.060 INFO  MarkDuplicatesSpark - The Genome Analysis Toolkit (GATK) v113e5d7-SNAPSHOT
16:50:13.060 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:50:13.060 INFO  MarkDuplicatesSpark - Executing as root@1c4c36ffc4f6 on Linux v6.11.0-1014-azure amd64
16:50:13.060 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.12+7-Ubuntu-1ubuntu222.04
16:50:13.060 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:50:13 PM GMT
16:50:13.060 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.060 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.060 INFO  MarkDuplicatesSpark - HTSJDK Version: test_cram_wiring-SNAPSHOT
16:50:13.060 INFO  MarkDuplicatesSpark - Picard Version: 3.4.0
16:50:13.060 INFO  MarkDuplicatesSpark - Built for Spark Version: 3.5.0
16:50:13.060 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:50:13.060 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:50:13.060 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:50:13.060 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:50:13.060 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:50:13.060 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:50:13.060 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:50:13.061 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:50:13.061 INFO  MarkDuplicatesSpark - Initializing engine
16:50:13.061 INFO  MarkDuplicatesSpark - Done initializing engine
16:50:13.061 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:50:13.063 INFO  MemoryStore - Block broadcast_1941 stored as values in memory (estimated size 305.5 KiB, free 1916.5 MiB)
16:50:13.069 INFO  MemoryStore - Block broadcast_1941_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1916.5 MiB)
16:50:13.070 INFO  BlockManagerInfo - Added broadcast_1941_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1919.2 MiB)
16:50:13.070 INFO  SparkContext - Created broadcast 1941 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.090 INFO  MemoryStore - Block broadcast_1942 stored as values in memory (estimated size 305.5 KiB, free 1916.2 MiB)
16:50:13.097 INFO  MemoryStore - Block broadcast_1942_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1916.1 MiB)
16:50:13.097 INFO  BlockManagerInfo - Added broadcast_1942_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1919.1 MiB)
16:50:13.097 INFO  SparkContext - Created broadcast 1942 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.131 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:50:13.132 INFO  FileInputFormat - Total input files to process : 1
16:50:13.132 INFO  DAGScheduler - Registering RDD 5360 (mapToPair at SparkUtils.java:161) as input to shuffle 297
16:50:13.132 INFO  DAGScheduler - Got job 549 (collect at SparkUtils.java:205) with 1 output partitions
16:50:13.133 INFO  DAGScheduler - Final stage: ResultStage 1205 (collect at SparkUtils.java:205)
16:50:13.133 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1204)
16:50:13.133 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1204)
16:50:13.133 INFO  DAGScheduler - Submitting ShuffleMapStage 1204 (MapPartitionsRDD[5360] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.150 INFO  MemoryStore - Block broadcast_1943 stored as values in memory (estimated size 492.6 KiB, free 1915.6 MiB)
16:50:13.152 INFO  MemoryStore - Block broadcast_1943_piece0 stored as bytes in memory (estimated size 208.9 KiB, free 1915.4 MiB)
16:50:13.152 INFO  BlockManagerInfo - Added broadcast_1943_piece0 in memory on localhost:37833 (size: 208.9 KiB, free: 1918.9 MiB)
16:50:13.152 INFO  SparkContext - Created broadcast 1943 from broadcast at DAGScheduler.scala:1580
16:50:13.153 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1204 (MapPartitionsRDD[5360] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.153 INFO  TaskSchedulerImpl - Adding task set 1204.0 with 1 tasks resource profile 0
16:50:13.153 INFO  TaskSetManager - Starting task 0.0 in stage 1204.0 (TID 1036) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9674 bytes) 
16:50:13.153 INFO  Executor - Running task 0.0 in stage 1204.0 (TID 1036)
16:50:13.181 INFO  NewHadoopRDD - Input split: file:/gatkCloneMountPoint/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam:0+13412
16:50:13.187 INFO  Executor - Finished task 0.0 in stage 1204.0 (TID 1036). 1148 bytes result sent to driver
16:50:13.187 INFO  TaskSetManager - Finished task 0.0 in stage 1204.0 (TID 1036) in 34 ms on localhost (executor driver) (1/1)
16:50:13.187 INFO  TaskSchedulerImpl - Removed TaskSet 1204.0, whose tasks have all completed, from pool 
16:50:13.187 INFO  DAGScheduler - ShuffleMapStage 1204 (mapToPair at SparkUtils.java:161) finished in 0.054 s
16:50:13.187 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.187 INFO  DAGScheduler - running: HashSet()
16:50:13.187 INFO  DAGScheduler - waiting: HashSet(ResultStage 1205)
16:50:13.187 INFO  DAGScheduler - failed: HashSet()
16:50:13.187 INFO  DAGScheduler - Submitting ResultStage 1205 (MapPartitionsRDD[5363] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:50:13.188 INFO  MemoryStore - Block broadcast_1944 stored as values in memory (estimated size 7.4 KiB, free 1915.4 MiB)
16:50:13.188 INFO  MemoryStore - Block broadcast_1944_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1915.4 MiB)
16:50:13.188 INFO  BlockManagerInfo - Added broadcast_1944_piece0 in memory on localhost:37833 (size: 4.1 KiB, free: 1918.9 MiB)
16:50:13.189 INFO  SparkContext - Created broadcast 1944 from broadcast at DAGScheduler.scala:1580
16:50:13.189 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1205 (MapPartitionsRDD[5363] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:50:13.189 INFO  TaskSchedulerImpl - Adding task set 1205.0 with 1 tasks resource profile 0
16:50:13.189 INFO  TaskSetManager - Starting task 0.0 in stage 1205.0 (TID 1037) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.189 INFO  Executor - Running task 0.0 in stage 1205.0 (TID 1037)
16:50:13.190 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.190 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.193 INFO  Executor - Finished task 0.0 in stage 1205.0 (TID 1037). 2329 bytes result sent to driver
16:50:13.193 INFO  TaskSetManager - Finished task 0.0 in stage 1205.0 (TID 1037) in 4 ms on localhost (executor driver) (1/1)
16:50:13.193 INFO  TaskSchedulerImpl - Removed TaskSet 1205.0, whose tasks have all completed, from pool 
16:50:13.193 INFO  DAGScheduler - ResultStage 1205 (collect at SparkUtils.java:205) finished in 0.005 s
16:50:13.193 INFO  DAGScheduler - Job 549 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.193 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1205: Stage finished
16:50:13.193 INFO  DAGScheduler - Job 549 finished: collect at SparkUtils.java:205, took 0.061884 s
16:50:13.199 INFO  MemoryStore - Block broadcast_1945 stored as values in memory (estimated size 7.8 KiB, free 1915.4 MiB)
16:50:13.199 INFO  MemoryStore - Block broadcast_1945_piece0 stored as bytes in memory (estimated size 540.0 B, free 1915.4 MiB)
16:50:13.199 INFO  BlockManagerInfo - Added broadcast_1945_piece0 in memory on localhost:37833 (size: 540.0 B, free: 1918.9 MiB)
16:50:13.200 INFO  SparkContext - Created broadcast 1945 from broadcast at MarkDuplicatesSparkUtils.java:126
16:50:13.200 INFO  MemoryStore - Block broadcast_1946 stored as values in memory (estimated size 2.6 KiB, free 1915.4 MiB)
16:50:13.200 INFO  MemoryStore - Block broadcast_1946_piece0 stored as bytes in memory (estimated size 209.0 B, free 1915.4 MiB)
16:50:13.200 INFO  BlockManagerInfo - Added broadcast_1946_piece0 in memory on localhost:37833 (size: 209.0 B, free: 1918.9 MiB)
16:50:13.201 INFO  SparkContext - Created broadcast 1946 from broadcast at MarkDuplicatesSparkUtils.java:127
16:50:13.220 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:50:13.220 INFO  DAGScheduler - Registering RDD 5372 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 300
16:50:13.220 INFO  DAGScheduler - Registering RDD 5376 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 299
16:50:13.220 INFO  DAGScheduler - Registering RDD 5380 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 298
16:50:13.221 INFO  DAGScheduler - Got job 550 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:50:13.221 INFO  DAGScheduler - Final stage: ResultStage 1210 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:50:13.221 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1209)
16:50:13.221 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1209)
16:50:13.221 INFO  DAGScheduler - Submitting ShuffleMapStage 1207 (MapPartitionsRDD[5372] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:50:13.222 INFO  MemoryStore - Block broadcast_1947 stored as values in memory (estimated size 57.6 KiB, free 1915.4 MiB)
16:50:13.222 INFO  MemoryStore - Block broadcast_1947_piece0 stored as bytes in memory (estimated size 19.8 KiB, free 1915.3 MiB)
16:50:13.223 INFO  BlockManagerInfo - Added broadcast_1947_piece0 in memory on localhost:37833 (size: 19.8 KiB, free: 1918.9 MiB)
16:50:13.223 INFO  SparkContext - Created broadcast 1947 from broadcast at DAGScheduler.scala:1580
16:50:13.223 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1207 (MapPartitionsRDD[5372] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:50:13.223 INFO  TaskSchedulerImpl - Adding task set 1207.0 with 1 tasks resource profile 0
16:50:13.223 INFO  TaskSetManager - Starting task 0.0 in stage 1207.0 (TID 1038) (localhost, executor driver, partition 0, NODE_LOCAL, 9813 bytes) 
16:50:13.223 INFO  Executor - Running task 0.0 in stage 1207.0 (TID 1038)
16:50:13.225 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.225 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.229 INFO  Executor - Finished task 0.0 in stage 1207.0 (TID 1038). 1922 bytes result sent to driver
16:50:13.229 INFO  TaskSetManager - Finished task 0.0 in stage 1207.0 (TID 1038) in 6 ms on localhost (executor driver) (1/1)
16:50:13.229 INFO  TaskSchedulerImpl - Removed TaskSet 1207.0, whose tasks have all completed, from pool 
16:50:13.229 INFO  DAGScheduler - ShuffleMapStage 1207 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.008 s
16:50:13.229 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.229 INFO  DAGScheduler - running: HashSet()
16:50:13.229 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1208, ShuffleMapStage 1209, ResultStage 1210)
16:50:13.229 INFO  DAGScheduler - failed: HashSet()
16:50:13.230 INFO  DAGScheduler - Submitting ShuffleMapStage 1208 (MapPartitionsRDD[5376] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:50:13.231 INFO  MemoryStore - Block broadcast_1948 stored as values in memory (estimated size 61.7 KiB, free 1915.3 MiB)
16:50:13.232 INFO  MemoryStore - Block broadcast_1948_piece0 stored as bytes in memory (estimated size 22.0 KiB, free 1915.3 MiB)
16:50:13.232 INFO  BlockManagerInfo - Added broadcast_1948_piece0 in memory on localhost:37833 (size: 22.0 KiB, free: 1918.8 MiB)
16:50:13.232 INFO  SparkContext - Created broadcast 1948 from broadcast at DAGScheduler.scala:1580
16:50:13.232 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1208 (MapPartitionsRDD[5376] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:50:13.232 INFO  TaskSchedulerImpl - Adding task set 1208.0 with 1 tasks resource profile 0
16:50:13.232 INFO  TaskSetManager - Starting task 0.0 in stage 1208.0 (TID 1039) (localhost, executor driver, partition 0, NODE_LOCAL, 9288 bytes) 
16:50:13.232 INFO  Executor - Running task 0.0 in stage 1208.0 (TID 1039)
16:50:13.234 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.0 KiB) non-empty blocks including 1 (2.0 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.234 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.240 INFO  Executor - Finished task 0.0 in stage 1208.0 (TID 1039). 1922 bytes result sent to driver
16:50:13.240 INFO  TaskSetManager - Finished task 0.0 in stage 1208.0 (TID 1039) in 8 ms on localhost (executor driver) (1/1)
16:50:13.240 INFO  TaskSchedulerImpl - Removed TaskSet 1208.0, whose tasks have all completed, from pool 
16:50:13.240 INFO  DAGScheduler - ShuffleMapStage 1208 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.010 s
16:50:13.240 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.240 INFO  DAGScheduler - running: HashSet()
16:50:13.240 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1209, ResultStage 1210)
16:50:13.240 INFO  DAGScheduler - failed: HashSet()
16:50:13.240 INFO  DAGScheduler - Submitting ShuffleMapStage 1209 (MapPartitionsRDD[5380] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:50:13.241 INFO  MemoryStore - Block broadcast_1949 stored as values in memory (estimated size 52.4 KiB, free 1915.2 MiB)
16:50:13.242 INFO  MemoryStore - Block broadcast_1949_piece0 stored as bytes in memory (estimated size 17.3 KiB, free 1915.2 MiB)
16:50:13.242 INFO  BlockManagerInfo - Added broadcast_1949_piece0 in memory on localhost:37833 (size: 17.3 KiB, free: 1918.8 MiB)
16:50:13.242 INFO  SparkContext - Created broadcast 1949 from broadcast at DAGScheduler.scala:1580
16:50:13.242 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1209 (MapPartitionsRDD[5380] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:50:13.243 INFO  TaskSchedulerImpl - Adding task set 1209.0 with 1 tasks resource profile 0
16:50:13.243 INFO  TaskSetManager - Starting task 0.0 in stage 1209.0 (TID 1040) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.243 INFO  Executor - Running task 0.0 in stage 1209.0 (TID 1040)
16:50:13.244 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.244 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.247 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.247 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.249 INFO  Executor - Finished task 0.0 in stage 1209.0 (TID 1040). 1922 bytes result sent to driver
16:50:13.249 INFO  TaskSetManager - Finished task 0.0 in stage 1209.0 (TID 1040) in 6 ms on localhost (executor driver) (1/1)
16:50:13.249 INFO  TaskSchedulerImpl - Removed TaskSet 1209.0, whose tasks have all completed, from pool 
16:50:13.249 INFO  DAGScheduler - ShuffleMapStage 1209 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.009 s
16:50:13.249 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.249 INFO  DAGScheduler - running: HashSet()
16:50:13.249 INFO  DAGScheduler - waiting: HashSet(ResultStage 1210)
16:50:13.249 INFO  DAGScheduler - failed: HashSet()
16:50:13.249 INFO  DAGScheduler - Submitting ResultStage 1210 (MapPartitionsRDD[5382] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:50:13.250 INFO  MemoryStore - Block broadcast_1950 stored as values in memory (estimated size 53.6 KiB, free 1915.1 MiB)
16:50:13.251 INFO  MemoryStore - Block broadcast_1950_piece0 stored as bytes in memory (estimated size 18.0 KiB, free 1915.1 MiB)
16:50:13.251 INFO  BlockManagerInfo - Added broadcast_1950_piece0 in memory on localhost:37833 (size: 18.0 KiB, free: 1918.8 MiB)
16:50:13.251 INFO  SparkContext - Created broadcast 1950 from broadcast at DAGScheduler.scala:1580
16:50:13.251 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1210 (MapPartitionsRDD[5382] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:50:13.251 INFO  TaskSchedulerImpl - Adding task set 1210.0 with 1 tasks resource profile 0
16:50:13.252 INFO  TaskSetManager - Starting task 0.0 in stage 1210.0 (TID 1041) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.252 INFO  Executor - Running task 0.0 in stage 1210.0 (TID 1041)
16:50:13.253 INFO  ShuffleBlockFetcherIterator - Getting 1 (251.0 B) non-empty blocks including 1 (251.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.253 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.255 INFO  Executor - Finished task 0.0 in stage 1210.0 (TID 1041). 2371 bytes result sent to driver
16:50:13.255 INFO  TaskSetManager - Finished task 0.0 in stage 1210.0 (TID 1041) in 4 ms on localhost (executor driver) (1/1)
16:50:13.255 INFO  TaskSchedulerImpl - Removed TaskSet 1210.0, whose tasks have all completed, from pool 
16:50:13.255 INFO  DAGScheduler - ResultStage 1210 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
16:50:13.255 INFO  DAGScheduler - Job 550 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.255 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1210: Stage finished
16:50:13.255 INFO  DAGScheduler - Job 550 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.035545 s
16:50:13.271 INFO  MemoryStore - Block broadcast_1951 stored as values in memory (estimated size 179.4 KiB, free 1914.9 MiB)
16:50:13.271 INFO  MemoryStore - Block broadcast_1951_piece0 stored as bytes in memory (estimated size 7.9 KiB, free 1914.9 MiB)
16:50:13.271 INFO  BlockManagerInfo - Added broadcast_1951_piece0 in memory on localhost:37833 (size: 7.9 KiB, free: 1918.8 MiB)
16:50:13.272 INFO  SparkContext - Created broadcast 1951 from broadcast at ReadsSparkSink.java:133
16:50:13.275 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.275 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.275 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.286 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:50:13.287 INFO  DAGScheduler - Registering RDD 5384 (mapToPair at SparkUtils.java:161) as input to shuffle 301
16:50:13.287 INFO  DAGScheduler - Got job 551 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:50:13.287 INFO  DAGScheduler - Final stage: ResultStage 1215 (runJob at SparkHadoopWriter.scala:83)
16:50:13.287 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1214)
16:50:13.287 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1214)
16:50:13.287 INFO  DAGScheduler - Submitting ShuffleMapStage 1214 (MapPartitionsRDD[5384] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.288 INFO  MemoryStore - Block broadcast_1952 stored as values in memory (estimated size 51.5 KiB, free 1914.9 MiB)
16:50:13.289 INFO  MemoryStore - Block broadcast_1952_piece0 stored as bytes in memory (estimated size 16.8 KiB, free 1914.9 MiB)
16:50:13.289 INFO  BlockManagerInfo - Added broadcast_1952_piece0 in memory on localhost:37833 (size: 16.8 KiB, free: 1918.8 MiB)
16:50:13.289 INFO  SparkContext - Created broadcast 1952 from broadcast at DAGScheduler.scala:1580
16:50:13.289 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1214 (MapPartitionsRDD[5384] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.289 INFO  TaskSchedulerImpl - Adding task set 1214.0 with 1 tasks resource profile 0
16:50:13.289 INFO  TaskSetManager - Starting task 0.0 in stage 1214.0 (TID 1042) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.290 INFO  Executor - Running task 0.0 in stage 1214.0 (TID 1042)
16:50:13.291 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.291 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.294 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.294 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.296 INFO  Executor - Finished task 0.0 in stage 1214.0 (TID 1042). 1922 bytes result sent to driver
16:50:13.296 INFO  TaskSetManager - Finished task 0.0 in stage 1214.0 (TID 1042) in 7 ms on localhost (executor driver) (1/1)
16:50:13.296 INFO  TaskSchedulerImpl - Removed TaskSet 1214.0, whose tasks have all completed, from pool 
16:50:13.296 INFO  DAGScheduler - ShuffleMapStage 1214 (mapToPair at SparkUtils.java:161) finished in 0.009 s
16:50:13.296 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.296 INFO  DAGScheduler - running: HashSet()
16:50:13.296 INFO  DAGScheduler - waiting: HashSet(ResultStage 1215)
16:50:13.296 INFO  DAGScheduler - failed: HashSet()
16:50:13.296 INFO  DAGScheduler - Submitting ResultStage 1215 (MapPartitionsRDD[5390] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:50:13.303 INFO  MemoryStore - Block broadcast_1953 stored as values in memory (estimated size 196.3 KiB, free 1914.7 MiB)
16:50:13.305 INFO  MemoryStore - Block broadcast_1953_piece0 stored as bytes in memory (estimated size 80.6 KiB, free 1914.6 MiB)
16:50:13.305 INFO  BlockManagerInfo - Added broadcast_1953_piece0 in memory on localhost:37833 (size: 80.6 KiB, free: 1918.7 MiB)
16:50:13.305 INFO  SparkContext - Created broadcast 1953 from broadcast at DAGScheduler.scala:1580
16:50:13.305 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1215 (MapPartitionsRDD[5390] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:50:13.305 INFO  TaskSchedulerImpl - Adding task set 1215.0 with 1 tasks resource profile 0
16:50:13.305 INFO  TaskSetManager - Starting task 0.0 in stage 1215.0 (TID 1043) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.305 INFO  Executor - Running task 0.0 in stage 1215.0 (TID 1043)
16:50:13.311 INFO  ShuffleBlockFetcherIterator - Getting 1 (14.8 KiB) non-empty blocks including 1 (14.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.311 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.314 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.314 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.314 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.321 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271650136266807371417881767_5390_m_000000_0' to file:/tmp/MarkDups2202294482584128653/MarkDups.sam.parts/_temporary/0/task_202505271650136266807371417881767_5390_m_000000
16:50:13.321 INFO  SparkHadoopMapRedUtil - attempt_202505271650136266807371417881767_5390_m_000000_0: Committed. Elapsed time: 0 ms.
16:50:13.321 INFO  Executor - Finished task 0.0 in stage 1215.0 (TID 1043). 1858 bytes result sent to driver
16:50:13.321 INFO  TaskSetManager - Finished task 0.0 in stage 1215.0 (TID 1043) in 16 ms on localhost (executor driver) (1/1)
16:50:13.321 INFO  TaskSchedulerImpl - Removed TaskSet 1215.0, whose tasks have all completed, from pool 
16:50:13.322 INFO  DAGScheduler - ResultStage 1215 (runJob at SparkHadoopWriter.scala:83) finished in 0.025 s
16:50:13.322 INFO  DAGScheduler - Job 551 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.322 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1215: Stage finished
16:50:13.322 INFO  DAGScheduler - Job 551 finished: runJob at SparkHadoopWriter.scala:83, took 0.035379 s
16:50:13.322 INFO  SparkHadoopWriter - Start to commit write Job job_202505271650136266807371417881767_5390.
16:50:13.325 INFO  SparkHadoopWriter - Write Job job_202505271650136266807371417881767_5390 committed. Elapsed time: 2 ms.
16:50:13.329 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups2202294482584128653/MarkDups.sam
16:50:13.331 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups2202294482584128653/MarkDups.sam done
16:50:13.331 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:50:13 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:50:13.346 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.346 INFO  MarkDuplicatesSpark - The Genome Analysis Toolkit (GATK) v113e5d7-SNAPSHOT
16:50:13.346 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:50:13.347 INFO  MarkDuplicatesSpark - Executing as root@1c4c36ffc4f6 on Linux v6.11.0-1014-azure amd64
16:50:13.347 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.12+7-Ubuntu-1ubuntu222.04
16:50:13.347 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:50:13 PM GMT
16:50:13.347 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.347 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.347 INFO  MarkDuplicatesSpark - HTSJDK Version: test_cram_wiring-SNAPSHOT
16:50:13.347 INFO  MarkDuplicatesSpark - Picard Version: 3.4.0
16:50:13.347 INFO  MarkDuplicatesSpark - Built for Spark Version: 3.5.0
16:50:13.347 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:50:13.347 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:50:13.347 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:50:13.347 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:50:13.347 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:50:13.347 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:50:13.347 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:50:13.347 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:50:13.347 INFO  MarkDuplicatesSpark - Initializing engine
16:50:13.347 INFO  MarkDuplicatesSpark - Done initializing engine
16:50:13.347 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:50:13.349 INFO  MemoryStore - Block broadcast_1954 stored as values in memory (estimated size 305.5 KiB, free 1914.3 MiB)
16:50:13.356 INFO  MemoryStore - Block broadcast_1954_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1914.2 MiB)
16:50:13.356 INFO  BlockManagerInfo - Added broadcast_1954_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.6 MiB)
16:50:13.356 INFO  SparkContext - Created broadcast 1954 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.376 INFO  MemoryStore - Block broadcast_1955 stored as values in memory (estimated size 305.5 KiB, free 1913.9 MiB)
16:50:13.383 INFO  MemoryStore - Block broadcast_1955_piece0 stored as bytes in memory (estimated size 64.0 KiB, free 1913.9 MiB)
16:50:13.383 INFO  BlockManagerInfo - Added broadcast_1955_piece0 in memory on localhost:37833 (size: 64.0 KiB, free: 1918.6 MiB)
16:50:13.383 INFO  SparkContext - Created broadcast 1955 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.415 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:50:13.415 INFO  FileInputFormat - Total input files to process : 1
16:50:13.415 INFO  DAGScheduler - Registering RDD 5404 (mapToPair at SparkUtils.java:161) as input to shuffle 302
16:50:13.415 INFO  DAGScheduler - Got job 552 (collect at SparkUtils.java:205) with 1 output partitions
16:50:13.415 INFO  DAGScheduler - Final stage: ResultStage 1217 (collect at SparkUtils.java:205)
16:50:13.415 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1216)
16:50:13.415 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1216)
16:50:13.416 INFO  DAGScheduler - Submitting ShuffleMapStage 1216 (MapPartitionsRDD[5404] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.432 INFO  MemoryStore - Block broadcast_1956 stored as values in memory (estimated size 456.1 KiB, free 1913.4 MiB)
16:50:13.435 INFO  MemoryStore - Block broadcast_1956_piece0 stored as bytes in memory (estimated size 200.0 KiB, free 1913.2 MiB)
16:50:13.435 INFO  BlockManagerInfo - Added broadcast_1956_piece0 in memory on localhost:37833 (size: 200.0 KiB, free: 1918.4 MiB)
16:50:13.435 INFO  SparkContext - Created broadcast 1956 from broadcast at DAGScheduler.scala:1580
16:50:13.435 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1216 (MapPartitionsRDD[5404] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.435 INFO  TaskSchedulerImpl - Adding task set 1216.0 with 1 tasks resource profile 0
16:50:13.435 INFO  TaskSetManager - Starting task 0.0 in stage 1216.0 (TID 1044) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9653 bytes) 
16:50:13.435 INFO  Executor - Running task 0.0 in stage 1216.0 (TID 1044)
16:50:13.464 INFO  NewHadoopRDD - Input split: file:/gatkCloneMountPoint/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam:0+678
16:50:13.468 INFO  Executor - Finished task 0.0 in stage 1216.0 (TID 1044). 1148 bytes result sent to driver
16:50:13.469 INFO  TaskSetManager - Finished task 0.0 in stage 1216.0 (TID 1044) in 34 ms on localhost (executor driver) (1/1)
16:50:13.469 INFO  TaskSchedulerImpl - Removed TaskSet 1216.0, whose tasks have all completed, from pool 
16:50:13.469 INFO  DAGScheduler - ShuffleMapStage 1216 (mapToPair at SparkUtils.java:161) finished in 0.053 s
16:50:13.469 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.469 INFO  DAGScheduler - running: HashSet()
16:50:13.469 INFO  DAGScheduler - waiting: HashSet(ResultStage 1217)
16:50:13.469 INFO  DAGScheduler - failed: HashSet()
16:50:13.469 INFO  DAGScheduler - Submitting ResultStage 1217 (MapPartitionsRDD[5407] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:50:13.469 INFO  MemoryStore - Block broadcast_1957 stored as values in memory (estimated size 7.4 KiB, free 1913.2 MiB)
16:50:13.470 INFO  MemoryStore - Block broadcast_1957_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1913.2 MiB)
16:50:13.470 INFO  BlockManagerInfo - Added broadcast_1957_piece0 in memory on localhost:37833 (size: 4.1 KiB, free: 1918.4 MiB)
16:50:13.470 INFO  SparkContext - Created broadcast 1957 from broadcast at DAGScheduler.scala:1580
16:50:13.470 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1217 (MapPartitionsRDD[5407] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:50:13.470 INFO  TaskSchedulerImpl - Adding task set 1217.0 with 1 tasks resource profile 0
16:50:13.471 INFO  TaskSetManager - Starting task 0.0 in stage 1217.0 (TID 1045) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.471 INFO  Executor - Running task 0.0 in stage 1217.0 (TID 1045)
16:50:13.472 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.472 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.473 INFO  Executor - Finished task 0.0 in stage 1217.0 (TID 1045). 2269 bytes result sent to driver
16:50:13.473 INFO  TaskSetManager - Finished task 0.0 in stage 1217.0 (TID 1045) in 2 ms on localhost (executor driver) (1/1)
16:50:13.473 INFO  TaskSchedulerImpl - Removed TaskSet 1217.0, whose tasks have all completed, from pool 
16:50:13.473 INFO  DAGScheduler - ResultStage 1217 (collect at SparkUtils.java:205) finished in 0.004 s
16:50:13.473 INFO  DAGScheduler - Job 552 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.473 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1217: Stage finished
16:50:13.474 INFO  DAGScheduler - Job 552 finished: collect at SparkUtils.java:205, took 0.058973 s
16:50:13.479 INFO  MemoryStore - Block broadcast_1958 stored as values in memory (estimated size 136.0 B, free 1913.2 MiB)
16:50:13.479 INFO  MemoryStore - Block broadcast_1958_piece0 stored as bytes in memory (estimated size 24.0 B, free 1913.2 MiB)
16:50:13.479 INFO  BlockManagerInfo - Added broadcast_1958_piece0 in memory on localhost:37833 (size: 24.0 B, free: 1918.4 MiB)
16:50:13.479 INFO  SparkContext - Created broadcast 1958 from broadcast at MarkDuplicatesSparkUtils.java:126
16:50:13.480 INFO  MemoryStore - Block broadcast_1959 stored as values in memory (estimated size 136.0 B, free 1913.2 MiB)
16:50:13.480 INFO  MemoryStore - Block broadcast_1959_piece0 stored as bytes in memory (estimated size 21.0 B, free 1913.2 MiB)
16:50:13.480 INFO  BlockManagerInfo - Added broadcast_1959_piece0 in memory on localhost:37833 (size: 21.0 B, free: 1918.4 MiB)
16:50:13.480 INFO  SparkContext - Created broadcast 1959 from broadcast at MarkDuplicatesSparkUtils.java:127
16:50:13.491 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:50:13.492 INFO  DAGScheduler - Registering RDD 5416 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 305
16:50:13.492 INFO  DAGScheduler - Registering RDD 5420 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 304
16:50:13.492 INFO  DAGScheduler - Registering RDD 5424 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 303
16:50:13.492 INFO  DAGScheduler - Got job 553 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:50:13.492 INFO  DAGScheduler - Final stage: ResultStage 1222 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:50:13.492 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1221)
16:50:13.492 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1221)
16:50:13.492 INFO  DAGScheduler - Submitting ShuffleMapStage 1219 (MapPartitionsRDD[5416] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:50:13.493 INFO  MemoryStore - Block broadcast_1960 stored as values in memory (estimated size 21.1 KiB, free 1913.2 MiB)
16:50:13.493 INFO  MemoryStore - Block broadcast_1960_piece0 stored as bytes in memory (estimated size 10.9 KiB, free 1913.2 MiB)
16:50:13.493 INFO  BlockManagerInfo - Added broadcast_1960_piece0 in memory on localhost:37833 (size: 10.9 KiB, free: 1918.4 MiB)
16:50:13.493 INFO  SparkContext - Created broadcast 1960 from broadcast at DAGScheduler.scala:1580
16:50:13.494 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1219 (MapPartitionsRDD[5416] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:50:13.494 INFO  TaskSchedulerImpl - Adding task set 1219.0 with 1 tasks resource profile 0
16:50:13.494 INFO  TaskSetManager - Starting task 0.0 in stage 1219.0 (TID 1046) (localhost, executor driver, partition 0, NODE_LOCAL, 9813 bytes) 
16:50:13.494 INFO  Executor - Running task 0.0 in stage 1219.0 (TID 1046)
16:50:13.495 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.495 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.498 INFO  Executor - Finished task 0.0 in stage 1219.0 (TID 1046). 1879 bytes result sent to driver
16:50:13.498 INFO  TaskSetManager - Finished task 0.0 in stage 1219.0 (TID 1046) in 4 ms on localhost (executor driver) (1/1)
16:50:13.498 INFO  TaskSchedulerImpl - Removed TaskSet 1219.0, whose tasks have all completed, from pool 
16:50:13.498 INFO  DAGScheduler - ShuffleMapStage 1219 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
16:50:13.498 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.498 INFO  DAGScheduler - running: HashSet()
16:50:13.498 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1220, ShuffleMapStage 1221, ResultStage 1222)
16:50:13.498 INFO  DAGScheduler - failed: HashSet()
16:50:13.498 INFO  DAGScheduler - Submitting ShuffleMapStage 1220 (MapPartitionsRDD[5420] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:50:13.499 INFO  MemoryStore - Block broadcast_1961 stored as values in memory (estimated size 25.2 KiB, free 1913.2 MiB)
16:50:13.499 INFO  MemoryStore - Block broadcast_1961_piece0 stored as bytes in memory (estimated size 13.1 KiB, free 1913.2 MiB)
16:50:13.499 INFO  BlockManagerInfo - Added broadcast_1961_piece0 in memory on localhost:37833 (size: 13.1 KiB, free: 1918.4 MiB)
16:50:13.499 INFO  SparkContext - Created broadcast 1961 from broadcast at DAGScheduler.scala:1580
16:50:13.500 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1220 (MapPartitionsRDD[5420] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:50:13.500 INFO  TaskSchedulerImpl - Adding task set 1220.0 with 1 tasks resource profile 0
16:50:13.500 INFO  TaskSetManager - Starting task 0.0 in stage 1220.0 (TID 1047) (localhost, executor driver, partition 0, NODE_LOCAL, 9288 bytes) 
16:50:13.500 INFO  Executor - Running task 0.0 in stage 1220.0 (TID 1047)
16:50:13.501 INFO  ShuffleBlockFetcherIterator - Getting 1 (106.0 B) non-empty blocks including 1 (106.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.501 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.504 INFO  Executor - Finished task 0.0 in stage 1220.0 (TID 1047). 1879 bytes result sent to driver
16:50:13.504 INFO  TaskSetManager - Finished task 0.0 in stage 1220.0 (TID 1047) in 4 ms on localhost (executor driver) (1/1)
16:50:13.505 INFO  TaskSchedulerImpl - Removed TaskSet 1220.0, whose tasks have all completed, from pool 
16:50:13.505 INFO  DAGScheduler - ShuffleMapStage 1220 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
16:50:13.505 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.505 INFO  DAGScheduler - running: HashSet()
16:50:13.505 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1221, ResultStage 1222)
16:50:13.505 INFO  DAGScheduler - failed: HashSet()
16:50:13.505 INFO  DAGScheduler - Submitting ShuffleMapStage 1221 (MapPartitionsRDD[5424] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:50:13.505 INFO  MemoryStore - Block broadcast_1962 stored as values in memory (estimated size 15.9 KiB, free 1913.1 MiB)
16:50:13.506 INFO  MemoryStore - Block broadcast_1962_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1913.1 MiB)
16:50:13.506 INFO  BlockManagerInfo - Added broadcast_1962_piece0 in memory on localhost:37833 (size: 8.3 KiB, free: 1918.4 MiB)
16:50:13.506 INFO  SparkContext - Created broadcast 1962 from broadcast at DAGScheduler.scala:1580
16:50:13.506 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1221 (MapPartitionsRDD[5424] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:50:13.506 INFO  TaskSchedulerImpl - Adding task set 1221.0 with 1 tasks resource profile 0
16:50:13.507 INFO  TaskSetManager - Starting task 0.0 in stage 1221.0 (TID 1048) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.507 INFO  Executor - Running task 0.0 in stage 1221.0 (TID 1048)
16:50:13.508 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.508 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.510 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.510 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.511 INFO  Executor - Finished task 0.0 in stage 1221.0 (TID 1048). 1922 bytes result sent to driver
16:50:13.511 INFO  TaskSetManager - Finished task 0.0 in stage 1221.0 (TID 1048) in 5 ms on localhost (executor driver) (1/1)
16:50:13.511 INFO  TaskSchedulerImpl - Removed TaskSet 1221.0, whose tasks have all completed, from pool 
16:50:13.511 INFO  DAGScheduler - ShuffleMapStage 1221 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.006 s
16:50:13.512 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.512 INFO  DAGScheduler - running: HashSet()
16:50:13.512 INFO  DAGScheduler - waiting: HashSet(ResultStage 1222)
16:50:13.512 INFO  DAGScheduler - failed: HashSet()
16:50:13.512 INFO  DAGScheduler - Submitting ResultStage 1222 (MapPartitionsRDD[5426] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:50:13.512 INFO  MemoryStore - Block broadcast_1963 stored as values in memory (estimated size 17.1 KiB, free 1913.1 MiB)
16:50:13.513 INFO  MemoryStore - Block broadcast_1963_piece0 stored as bytes in memory (estimated size 9.0 KiB, free 1913.1 MiB)
16:50:13.513 INFO  BlockManagerInfo - Added broadcast_1963_piece0 in memory on localhost:37833 (size: 9.0 KiB, free: 1918.3 MiB)
16:50:13.513 INFO  SparkContext - Created broadcast 1963 from broadcast at DAGScheduler.scala:1580
16:50:13.513 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1222 (MapPartitionsRDD[5426] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:50:13.513 INFO  TaskSchedulerImpl - Adding task set 1222.0 with 1 tasks resource profile 0
16:50:13.513 INFO  TaskSetManager - Starting task 0.0 in stage 1222.0 (TID 1049) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.513 INFO  Executor - Running task 0.0 in stage 1222.0 (TID 1049)
16:50:13.514 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.514 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.515 INFO  Executor - Finished task 0.0 in stage 1222.0 (TID 1049). 1887 bytes result sent to driver
16:50:13.516 INFO  TaskSetManager - Finished task 0.0 in stage 1222.0 (TID 1049) in 3 ms on localhost (executor driver) (1/1)
16:50:13.516 INFO  TaskSchedulerImpl - Removed TaskSet 1222.0, whose tasks have all completed, from pool 
16:50:13.516 INFO  DAGScheduler - ResultStage 1222 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
16:50:13.516 INFO  DAGScheduler - Job 553 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.516 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1222: Stage finished
16:50:13.516 INFO  DAGScheduler - Job 553 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.024635 s
16:50:13.525 INFO  MemoryStore - Block broadcast_1964 stored as values in memory (estimated size 5.6 KiB, free 1913.1 MiB)
16:50:13.525 INFO  MemoryStore - Block broadcast_1964_piece0 stored as bytes in memory (estimated size 500.0 B, free 1913.1 MiB)
16:50:13.525 INFO  BlockManagerInfo - Added broadcast_1964_piece0 in memory on localhost:37833 (size: 500.0 B, free: 1918.3 MiB)
16:50:13.526 INFO  SparkContext - Created broadcast 1964 from broadcast at ReadsSparkSink.java:133
16:50:13.529 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.529 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.529 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.540 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:50:13.540 INFO  DAGScheduler - Registering RDD 5428 (mapToPair at SparkUtils.java:161) as input to shuffle 306
16:50:13.540 INFO  DAGScheduler - Got job 554 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:50:13.540 INFO  DAGScheduler - Final stage: ResultStage 1227 (runJob at SparkHadoopWriter.scala:83)
16:50:13.540 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1226)
16:50:13.540 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1226)
16:50:13.540 INFO  DAGScheduler - Submitting ShuffleMapStage 1226 (MapPartitionsRDD[5428] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.541 INFO  MemoryStore - Block broadcast_1965 stored as values in memory (estimated size 15.0 KiB, free 1913.1 MiB)
16:50:13.541 INFO  MemoryStore - Block broadcast_1965_piece0 stored as bytes in memory (estimated size 7.8 KiB, free 1913.1 MiB)
16:50:13.541 INFO  BlockManagerInfo - Added broadcast_1965_piece0 in memory on localhost:37833 (size: 7.8 KiB, free: 1918.3 MiB)
16:50:13.542 INFO  SparkContext - Created broadcast 1965 from broadcast at DAGScheduler.scala:1580
16:50:13.542 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1226 (MapPartitionsRDD[5428] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.542 INFO  TaskSchedulerImpl - Adding task set 1226.0 with 1 tasks resource profile 0
16:50:13.542 INFO  TaskSetManager - Starting task 0.0 in stage 1226.0 (TID 1050) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.542 INFO  Executor - Running task 0.0 in stage 1226.0 (TID 1050)
16:50:13.543 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.543 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.545 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.545 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.546 INFO  Executor - Finished task 0.0 in stage 1226.0 (TID 1050). 1879 bytes result sent to driver
16:50:13.546 INFO  TaskSetManager - Finished task 0.0 in stage 1226.0 (TID 1050) in 4 ms on localhost (executor driver) (1/1)
16:50:13.546 INFO  TaskSchedulerImpl - Removed TaskSet 1226.0, whose tasks have all completed, from pool 
16:50:13.546 INFO  DAGScheduler - ShuffleMapStage 1226 (mapToPair at SparkUtils.java:161) finished in 0.005 s
16:50:13.546 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.546 INFO  DAGScheduler - running: HashSet()
16:50:13.546 INFO  DAGScheduler - waiting: HashSet(ResultStage 1227)
16:50:13.546 INFO  DAGScheduler - failed: HashSet()
16:50:13.546 INFO  DAGScheduler - Submitting ResultStage 1227 (MapPartitionsRDD[5434] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:50:13.552 INFO  MemoryStore - Block broadcast_1966 stored as values in memory (estimated size 159.8 KiB, free 1912.9 MiB)
16:50:13.553 INFO  MemoryStore - Block broadcast_1966_piece0 stored as bytes in memory (estimated size 71.6 KiB, free 1912.9 MiB)
16:50:13.553 INFO  BlockManagerInfo - Added broadcast_1966_piece0 in memory on localhost:37833 (size: 71.6 KiB, free: 1918.3 MiB)
16:50:13.554 INFO  SparkContext - Created broadcast 1966 from broadcast at DAGScheduler.scala:1580
16:50:13.554 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1227 (MapPartitionsRDD[5434] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:50:13.554 INFO  TaskSchedulerImpl - Adding task set 1227.0 with 1 tasks resource profile 0
16:50:13.554 INFO  TaskSetManager - Starting task 0.0 in stage 1227.0 (TID 1051) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.554 INFO  Executor - Running task 0.0 in stage 1227.0 (TID 1051)
16:50:13.557 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.557 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.558 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.558 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.559 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.565 INFO  FileOutputCommitter - Saved output of task 'attempt_20250527165013140784430702453953_5434_m_000000_0' to file:/tmp/MarkDups6674715274264137310/MarkDups.sam.parts/_temporary/0/task_20250527165013140784430702453953_5434_m_000000
16:50:13.565 INFO  SparkHadoopMapRedUtil - attempt_20250527165013140784430702453953_5434_m_000000_0: Committed. Elapsed time: 0 ms.
16:50:13.565 INFO  Executor - Finished task 0.0 in stage 1227.0 (TID 1051). 1858 bytes result sent to driver
16:50:13.565 INFO  TaskSetManager - Finished task 0.0 in stage 1227.0 (TID 1051) in 11 ms on localhost (executor driver) (1/1)
16:50:13.565 INFO  TaskSchedulerImpl - Removed TaskSet 1227.0, whose tasks have all completed, from pool 
16:50:13.565 INFO  DAGScheduler - ResultStage 1227 (runJob at SparkHadoopWriter.scala:83) finished in 0.018 s
16:50:13.565 INFO  DAGScheduler - Job 554 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.565 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1227: Stage finished
16:50:13.566 INFO  DAGScheduler - Job 554 finished: runJob at SparkHadoopWriter.scala:83, took 0.025633 s
16:50:13.566 INFO  SparkHadoopWriter - Start to commit write Job job_20250527165013140784430702453953_5434.
16:50:13.569 INFO  SparkHadoopWriter - Write Job job_20250527165013140784430702453953_5434 committed. Elapsed time: 2 ms.
16:50:13.573 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups6674715274264137310/MarkDups.sam
16:50:13.575 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups6674715274264137310/MarkDups.sam done
16:50:13.575 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:50:13 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:50:13.596 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.596 INFO  MarkDuplicatesSpark - The Genome Analysis Toolkit (GATK) v113e5d7-SNAPSHOT
16:50:13.596 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
16:50:13.596 INFO  MarkDuplicatesSpark - Executing as root@1c4c36ffc4f6 on Linux v6.11.0-1014-azure amd64
16:50:13.596 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.12+7-Ubuntu-1ubuntu222.04
16:50:13.596 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 4:50:13 PM GMT
16:50:13.596 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.596 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
16:50:13.597 INFO  MarkDuplicatesSpark - HTSJDK Version: test_cram_wiring-SNAPSHOT
16:50:13.597 INFO  MarkDuplicatesSpark - Picard Version: 3.4.0
16:50:13.597 INFO  MarkDuplicatesSpark - Built for Spark Version: 3.5.0
16:50:13.597 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
16:50:13.597 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
16:50:13.597 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
16:50:13.597 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
16:50:13.597 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
16:50:13.597 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
16:50:13.597 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
16:50:13.597 INFO  MarkDuplicatesSpark - Requester pays: disabled
16:50:13.597 INFO  MarkDuplicatesSpark - Initializing engine
16:50:13.597 INFO  MarkDuplicatesSpark - Done initializing engine
16:50:13.597 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
16:50:13.599 INFO  MemoryStore - Block broadcast_1967 stored as values in memory (estimated size 305.5 KiB, free 1912.6 MiB)
16:50:13.607 INFO  BlockManagerInfo - Removed broadcast_1943_piece0 on localhost:37833 in memory (size: 208.9 KiB, free: 1918.5 MiB)
16:50:13.607 INFO  BlockManagerInfo - Removed broadcast_1950_piece0 on localhost:37833 in memory (size: 18.0 KiB, free: 1918.5 MiB)
16:50:13.607 INFO  BlockManagerInfo - Removed broadcast_1954_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1918.6 MiB)
16:50:13.608 INFO  BlockManagerInfo - Removed broadcast_1925_piece0 on localhost:37833 in memory (size: 663.0 B, free: 1918.6 MiB)
16:50:13.608 INFO  BlockManagerInfo - Removed broadcast_1916_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1918.6 MiB)
16:50:13.609 INFO  BlockManagerInfo - Removed broadcast_1942_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1918.7 MiB)
16:50:13.609 INFO  BlockManagerInfo - Removed broadcast_1957_piece0 on localhost:37833 in memory (size: 4.1 KiB, free: 1918.7 MiB)
16:50:13.609 INFO  BlockManagerInfo - Removed broadcast_1940_piece0 on localhost:37833 in memory (size: 71.6 KiB, free: 1918.8 MiB)
16:50:13.610 INFO  BlockManagerInfo - Removed broadcast_1963_piece0 on localhost:37833 in memory (size: 9.0 KiB, free: 1918.8 MiB)
16:50:13.610 INFO  BlockManagerInfo - Removed broadcast_1961_piece0 on localhost:37833 in memory (size: 13.1 KiB, free: 1918.8 MiB)
16:50:13.611 INFO  BlockManagerInfo - Removed broadcast_1928_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1918.8 MiB)
16:50:13.611 INFO  BlockManagerInfo - Removed broadcast_1920_piece0 on localhost:37833 in memory (size: 31.0 B, free: 1918.8 MiB)
16:50:13.613 INFO  BlockManagerInfo - Removed broadcast_1959_piece0 on localhost:37833 in memory (size: 21.0 B, free: 1918.8 MiB)
16:50:13.613 INFO  BlockManagerInfo - Removed broadcast_1941_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1918.9 MiB)
16:50:13.613 INFO  BlockManagerInfo - Removed broadcast_1949_piece0 on localhost:37833 in memory (size: 17.3 KiB, free: 1918.9 MiB)
16:50:13.614 INFO  BlockManagerInfo - Removed broadcast_1937_piece0 on localhost:37833 in memory (size: 9.1 KiB, free: 1918.9 MiB)
16:50:13.614 INFO  BlockManagerInfo - Removed broadcast_1934_piece0 on localhost:37833 in memory (size: 11.2 KiB, free: 1918.9 MiB)
16:50:13.614 INFO  BlockManagerInfo - Removed broadcast_1966_piece0 on localhost:37833 in memory (size: 71.6 KiB, free: 1919.0 MiB)
16:50:13.615 INFO  MemoryStore - Block broadcast_1967_piece0 stored as bytes in memory (estimated size 64.1 KiB, free 1915.7 MiB)
16:50:13.615 INFO  BlockManagerInfo - Added broadcast_1967_piece0 in memory on localhost:37833 (size: 64.1 KiB, free: 1918.9 MiB)
16:50:13.615 INFO  SparkContext - Created broadcast 1967 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.615 INFO  BlockManagerInfo - Removed broadcast_1946_piece0 on localhost:37833 in memory (size: 209.0 B, free: 1918.9 MiB)
16:50:13.615 INFO  BlockManagerInfo - Removed broadcast_1948_piece0 on localhost:37833 in memory (size: 22.0 KiB, free: 1919.0 MiB)
16:50:13.616 INFO  BlockManagerInfo - Removed broadcast_1919_piece0 on localhost:37833 in memory (size: 19.0 B, free: 1919.0 MiB)
16:50:13.616 INFO  BlockManagerInfo - Removed broadcast_1952_piece0 on localhost:37833 in memory (size: 16.8 KiB, free: 1919.0 MiB)
16:50:13.617 INFO  BlockManagerInfo - Removed broadcast_1955_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1919.0 MiB)
16:50:13.617 INFO  BlockManagerInfo - Removed broadcast_1936_piece0 on localhost:37833 in memory (size: 8.5 KiB, free: 1919.1 MiB)
16:50:13.618 INFO  BlockManagerInfo - Removed broadcast_1944_piece0 on localhost:37833 in memory (size: 4.1 KiB, free: 1919.1 MiB)
16:50:13.618 INFO  BlockManagerInfo - Removed broadcast_1960_piece0 on localhost:37833 in memory (size: 10.9 KiB, free: 1919.1 MiB)
16:50:13.619 INFO  BlockManagerInfo - Removed broadcast_1929_piece0 on localhost:37833 in memory (size: 64.0 KiB, free: 1919.1 MiB)
16:50:13.619 INFO  BlockManagerInfo - Removed broadcast_1939_piece0 on localhost:37833 in memory (size: 8.0 KiB, free: 1919.1 MiB)
16:50:13.619 INFO  BlockManagerInfo - Removed broadcast_1951_piece0 on localhost:37833 in memory (size: 7.9 KiB, free: 1919.1 MiB)
16:50:13.619 INFO  BlockManagerInfo - Removed broadcast_1962_piece0 on localhost:37833 in memory (size: 8.3 KiB, free: 1919.2 MiB)
16:50:13.620 INFO  BlockManagerInfo - Removed broadcast_1931_piece0 on localhost:37833 in memory (size: 4.1 KiB, free: 1919.2 MiB)
16:50:13.620 INFO  BlockManagerInfo - Removed broadcast_1956_piece0 on localhost:37833 in memory (size: 200.0 KiB, free: 1919.4 MiB)
16:50:13.621 INFO  BlockManagerInfo - Removed broadcast_1958_piece0 on localhost:37833 in memory (size: 24.0 B, free: 1919.4 MiB)
16:50:13.621 INFO  BlockManagerInfo - Removed broadcast_1933_piece0 on localhost:37833 in memory (size: 31.0 B, free: 1919.4 MiB)
16:50:13.621 INFO  BlockManagerInfo - Removed broadcast_1953_piece0 on localhost:37833 in memory (size: 80.6 KiB, free: 1919.4 MiB)
16:50:13.622 INFO  BlockManagerInfo - Removed broadcast_1945_piece0 on localhost:37833 in memory (size: 540.0 B, free: 1919.4 MiB)
16:50:13.622 INFO  BlockManagerInfo - Removed broadcast_1965_piece0 on localhost:37833 in memory (size: 7.8 KiB, free: 1919.4 MiB)
16:50:13.622 INFO  BlockManagerInfo - Removed broadcast_1938_piece0 on localhost:37833 in memory (size: 663.0 B, free: 1919.4 MiB)
16:50:13.622 INFO  BlockManagerInfo - Removed broadcast_1935_piece0 on localhost:37833 in memory (size: 13.2 KiB, free: 1919.5 MiB)
16:50:13.623 INFO  BlockManagerInfo - Removed broadcast_1930_piece0 on localhost:37833 in memory (size: 200.3 KiB, free: 1919.6 MiB)
16:50:13.623 INFO  BlockManagerInfo - Removed broadcast_1964_piece0 on localhost:37833 in memory (size: 500.0 B, free: 1919.6 MiB)
16:50:13.624 INFO  BlockManagerInfo - Removed broadcast_1932_piece0 on localhost:37833 in memory (size: 19.0 B, free: 1919.6 MiB)
16:50:13.624 INFO  BlockManagerInfo - Removed broadcast_1947_piece0 on localhost:37833 in memory (size: 19.8 KiB, free: 1919.7 MiB)
16:50:13.625 INFO  BlockManagerInfo - Removed broadcast_1927_piece0 on localhost:37833 in memory (size: 71.6 KiB, free: 1919.7 MiB)
16:50:13.645 INFO  MemoryStore - Block broadcast_1968 stored as values in memory (estimated size 305.5 KiB, free 1918.5 MiB)
16:50:13.651 INFO  MemoryStore - Block broadcast_1968_piece0 stored as bytes in memory (estimated size 64.1 KiB, free 1918.5 MiB)
16:50:13.651 INFO  BlockManagerInfo - Added broadcast_1968_piece0 in memory on localhost:37833 (size: 64.1 KiB, free: 1919.7 MiB)
16:50:13.652 INFO  SparkContext - Created broadcast 1968 from newAPIHadoopFile at PathSplitSource.java:96
16:50:13.684 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
16:50:13.685 INFO  FileInputFormat - Total input files to process : 1
16:50:13.685 INFO  DAGScheduler - Registering RDD 5448 (mapToPair at SparkUtils.java:161) as input to shuffle 307
16:50:13.685 INFO  DAGScheduler - Got job 555 (collect at SparkUtils.java:205) with 1 output partitions
16:50:13.685 INFO  DAGScheduler - Final stage: ResultStage 1229 (collect at SparkUtils.java:205)
16:50:13.685 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1228)
16:50:13.685 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1228)
16:50:13.685 INFO  DAGScheduler - Submitting ShuffleMapStage 1228 (MapPartitionsRDD[5448] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.703 INFO  MemoryStore - Block broadcast_1969 stored as values in memory (estimated size 480.2 KiB, free 1918.0 MiB)
16:50:13.705 INFO  MemoryStore - Block broadcast_1969_piece0 stored as bytes in memory (estimated size 206.2 KiB, free 1917.8 MiB)
16:50:13.705 INFO  BlockManagerInfo - Added broadcast_1969_piece0 in memory on localhost:37833 (size: 206.2 KiB, free: 1919.5 MiB)
16:50:13.705 INFO  SparkContext - Created broadcast 1969 from broadcast at DAGScheduler.scala:1580
16:50:13.705 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1228 (MapPartitionsRDD[5448] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.706 INFO  TaskSchedulerImpl - Adding task set 1228.0 with 1 tasks resource profile 0
16:50:13.706 INFO  TaskSetManager - Starting task 0.0 in stage 1228.0 (TID 1052) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9673 bytes) 
16:50:13.706 INFO  Executor - Running task 0.0 in stage 1228.0 (TID 1052)
16:50:13.734 INFO  NewHadoopRDD - Input split: file:/gatkCloneMountPoint/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam:0+5416
16:50:13.739 INFO  Executor - Finished task 0.0 in stage 1228.0 (TID 1052). 1148 bytes result sent to driver
16:50:13.739 INFO  TaskSetManager - Finished task 0.0 in stage 1228.0 (TID 1052) in 33 ms on localhost (executor driver) (1/1)
16:50:13.739 INFO  TaskSchedulerImpl - Removed TaskSet 1228.0, whose tasks have all completed, from pool 
16:50:13.739 INFO  DAGScheduler - ShuffleMapStage 1228 (mapToPair at SparkUtils.java:161) finished in 0.053 s
16:50:13.739 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.739 INFO  DAGScheduler - running: HashSet()
16:50:13.739 INFO  DAGScheduler - waiting: HashSet(ResultStage 1229)
16:50:13.739 INFO  DAGScheduler - failed: HashSet()
16:50:13.740 INFO  DAGScheduler - Submitting ResultStage 1229 (MapPartitionsRDD[5451] at mapPartitions at SparkUtils.java:188), which has no missing parents
16:50:13.740 INFO  MemoryStore - Block broadcast_1970 stored as values in memory (estimated size 7.4 KiB, free 1917.8 MiB)
16:50:13.740 INFO  MemoryStore - Block broadcast_1970_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1917.8 MiB)
16:50:13.741 INFO  BlockManagerInfo - Added broadcast_1970_piece0 in memory on localhost:37833 (size: 4.1 KiB, free: 1919.5 MiB)
16:50:13.741 INFO  SparkContext - Created broadcast 1970 from broadcast at DAGScheduler.scala:1580
16:50:13.741 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1229 (MapPartitionsRDD[5451] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
16:50:13.741 INFO  TaskSchedulerImpl - Adding task set 1229.0 with 1 tasks resource profile 0
16:50:13.741 INFO  TaskSetManager - Starting task 0.0 in stage 1229.0 (TID 1053) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.741 INFO  Executor - Running task 0.0 in stage 1229.0 (TID 1053)
16:50:13.742 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.742 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.744 INFO  Executor - Finished task 0.0 in stage 1229.0 (TID 1053). 2039 bytes result sent to driver
16:50:13.744 INFO  TaskSetManager - Finished task 0.0 in stage 1229.0 (TID 1053) in 3 ms on localhost (executor driver) (1/1)
16:50:13.744 INFO  TaskSchedulerImpl - Removed TaskSet 1229.0, whose tasks have all completed, from pool 
16:50:13.744 INFO  DAGScheduler - ResultStage 1229 (collect at SparkUtils.java:205) finished in 0.004 s
16:50:13.744 INFO  DAGScheduler - Job 555 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.744 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1229: Stage finished
16:50:13.744 INFO  DAGScheduler - Job 555 finished: collect at SparkUtils.java:205, took 0.059833 s
16:50:13.748 INFO  MemoryStore - Block broadcast_1971 stored as values in memory (estimated size 392.0 B, free 1917.8 MiB)
16:50:13.748 INFO  MemoryStore - Block broadcast_1971_piece0 stored as bytes in memory (estimated size 33.0 B, free 1917.8 MiB)
16:50:13.748 INFO  BlockManagerInfo - Added broadcast_1971_piece0 in memory on localhost:37833 (size: 33.0 B, free: 1919.5 MiB)
16:50:13.748 INFO  SparkContext - Created broadcast 1971 from broadcast at MarkDuplicatesSparkUtils.java:126
16:50:13.749 INFO  MemoryStore - Block broadcast_1972 stored as values in memory (estimated size 144.0 B, free 1917.8 MiB)
16:50:13.749 INFO  MemoryStore - Block broadcast_1972_piece0 stored as bytes in memory (estimated size 28.0 B, free 1917.8 MiB)
16:50:13.749 INFO  BlockManagerInfo - Added broadcast_1972_piece0 in memory on localhost:37833 (size: 28.0 B, free: 1919.5 MiB)
16:50:13.749 INFO  SparkContext - Created broadcast 1972 from broadcast at MarkDuplicatesSparkUtils.java:127
16:50:13.761 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
16:50:13.762 INFO  DAGScheduler - Registering RDD 5460 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 310
16:50:13.762 INFO  DAGScheduler - Registering RDD 5464 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 309
16:50:13.762 INFO  DAGScheduler - Registering RDD 5468 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 308
16:50:13.762 INFO  DAGScheduler - Got job 556 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
16:50:13.762 INFO  DAGScheduler - Final stage: ResultStage 1234 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
16:50:13.762 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1233)
16:50:13.762 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1233)
16:50:13.762 INFO  DAGScheduler - Submitting ShuffleMapStage 1231 (MapPartitionsRDD[5460] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
16:50:13.763 INFO  MemoryStore - Block broadcast_1973 stored as values in memory (estimated size 45.2 KiB, free 1917.8 MiB)
16:50:13.764 INFO  MemoryStore - Block broadcast_1973_piece0 stored as bytes in memory (estimated size 17.1 KiB, free 1917.7 MiB)
16:50:13.764 INFO  BlockManagerInfo - Added broadcast_1973_piece0 in memory on localhost:37833 (size: 17.1 KiB, free: 1919.5 MiB)
16:50:13.764 INFO  SparkContext - Created broadcast 1973 from broadcast at DAGScheduler.scala:1580
16:50:13.764 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1231 (MapPartitionsRDD[5460] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
16:50:13.764 INFO  TaskSchedulerImpl - Adding task set 1231.0 with 1 tasks resource profile 0
16:50:13.764 INFO  TaskSetManager - Starting task 0.0 in stage 1231.0 (TID 1054) (localhost, executor driver, partition 0, NODE_LOCAL, 9813 bytes) 
16:50:13.765 INFO  Executor - Running task 0.0 in stage 1231.0 (TID 1054)
16:50:13.766 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.766 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.768 INFO  Executor - Finished task 0.0 in stage 1231.0 (TID 1054). 1922 bytes result sent to driver
16:50:13.769 INFO  TaskSetManager - Finished task 0.0 in stage 1231.0 (TID 1054) in 5 ms on localhost (executor driver) (1/1)
16:50:13.769 INFO  TaskSchedulerImpl - Removed TaskSet 1231.0, whose tasks have all completed, from pool 
16:50:13.769 INFO  DAGScheduler - ShuffleMapStage 1231 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
16:50:13.769 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.769 INFO  DAGScheduler - running: HashSet()
16:50:13.769 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1232, ShuffleMapStage 1233, ResultStage 1234)
16:50:13.769 INFO  DAGScheduler - failed: HashSet()
16:50:13.769 INFO  DAGScheduler - Submitting ShuffleMapStage 1232 (MapPartitionsRDD[5464] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
16:50:13.770 INFO  MemoryStore - Block broadcast_1974 stored as values in memory (estimated size 49.3 KiB, free 1917.7 MiB)
16:50:13.770 INFO  MemoryStore - Block broadcast_1974_piece0 stored as bytes in memory (estimated size 19.3 KiB, free 1917.7 MiB)
16:50:13.770 INFO  BlockManagerInfo - Added broadcast_1974_piece0 in memory on localhost:37833 (size: 19.3 KiB, free: 1919.4 MiB)
16:50:13.771 INFO  SparkContext - Created broadcast 1974 from broadcast at DAGScheduler.scala:1580
16:50:13.771 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1232 (MapPartitionsRDD[5464] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
16:50:13.771 INFO  TaskSchedulerImpl - Adding task set 1232.0 with 1 tasks resource profile 0
16:50:13.771 INFO  TaskSetManager - Starting task 0.0 in stage 1232.0 (TID 1055) (localhost, executor driver, partition 0, NODE_LOCAL, 9288 bytes) 
16:50:13.771 INFO  Executor - Running task 0.0 in stage 1232.0 (TID 1055)
16:50:13.773 INFO  ShuffleBlockFetcherIterator - Getting 1 (593.0 B) non-empty blocks including 1 (593.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.773 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.776 INFO  Executor - Finished task 0.0 in stage 1232.0 (TID 1055). 1922 bytes result sent to driver
16:50:13.776 INFO  TaskSetManager - Finished task 0.0 in stage 1232.0 (TID 1055) in 5 ms on localhost (executor driver) (1/1)
16:50:13.776 INFO  TaskSchedulerImpl - Removed TaskSet 1232.0, whose tasks have all completed, from pool 
16:50:13.776 INFO  DAGScheduler - ShuffleMapStage 1232 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
16:50:13.776 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.776 INFO  DAGScheduler - running: HashSet()
16:50:13.776 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1233, ResultStage 1234)
16:50:13.776 INFO  DAGScheduler - failed: HashSet()
16:50:13.776 INFO  DAGScheduler - Submitting ShuffleMapStage 1233 (MapPartitionsRDD[5468] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
16:50:13.777 INFO  MemoryStore - Block broadcast_1975 stored as values in memory (estimated size 40.0 KiB, free 1917.6 MiB)
16:50:13.777 INFO  MemoryStore - Block broadcast_1975_piece0 stored as bytes in memory (estimated size 14.5 KiB, free 1917.6 MiB)
16:50:13.777 INFO  BlockManagerInfo - Added broadcast_1975_piece0 in memory on localhost:37833 (size: 14.5 KiB, free: 1919.4 MiB)
16:50:13.778 INFO  SparkContext - Created broadcast 1975 from broadcast at DAGScheduler.scala:1580
16:50:13.778 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1233 (MapPartitionsRDD[5468] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
16:50:13.778 INFO  TaskSchedulerImpl - Adding task set 1233.0 with 1 tasks resource profile 0
16:50:13.778 INFO  TaskSetManager - Starting task 0.0 in stage 1233.0 (TID 1056) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.778 INFO  Executor - Running task 0.0 in stage 1233.0 (TID 1056)
16:50:13.779 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.779 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.781 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.781 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.782 INFO  Executor - Finished task 0.0 in stage 1233.0 (TID 1056). 1879 bytes result sent to driver
16:50:13.782 INFO  TaskSetManager - Finished task 0.0 in stage 1233.0 (TID 1056) in 4 ms on localhost (executor driver) (1/1)
16:50:13.782 INFO  TaskSchedulerImpl - Removed TaskSet 1233.0, whose tasks have all completed, from pool 
16:50:13.782 INFO  DAGScheduler - ShuffleMapStage 1233 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.006 s
16:50:13.782 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.782 INFO  DAGScheduler - running: HashSet()
16:50:13.782 INFO  DAGScheduler - waiting: HashSet(ResultStage 1234)
16:50:13.782 INFO  DAGScheduler - failed: HashSet()
16:50:13.782 INFO  DAGScheduler - Submitting ResultStage 1234 (MapPartitionsRDD[5470] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
16:50:13.783 INFO  MemoryStore - Block broadcast_1976 stored as values in memory (estimated size 41.2 KiB, free 1917.6 MiB)
16:50:13.784 INFO  MemoryStore - Block broadcast_1976_piece0 stored as bytes in memory (estimated size 15.3 KiB, free 1917.6 MiB)
16:50:13.784 INFO  BlockManagerInfo - Added broadcast_1976_piece0 in memory on localhost:37833 (size: 15.3 KiB, free: 1919.4 MiB)
16:50:13.784 INFO  SparkContext - Created broadcast 1976 from broadcast at DAGScheduler.scala:1580
16:50:13.784 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1234 (MapPartitionsRDD[5470] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
16:50:13.784 INFO  TaskSchedulerImpl - Adding task set 1234.0 with 1 tasks resource profile 0
16:50:13.784 INFO  TaskSetManager - Starting task 0.0 in stage 1234.0 (TID 1057) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.784 INFO  Executor - Running task 0.0 in stage 1234.0 (TID 1057)
16:50:13.786 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.786 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.787 INFO  Executor - Finished task 0.0 in stage 1234.0 (TID 1057). 1944 bytes result sent to driver
16:50:13.787 INFO  TaskSetManager - Finished task 0.0 in stage 1234.0 (TID 1057) in 3 ms on localhost (executor driver) (1/1)
16:50:13.787 INFO  TaskSchedulerImpl - Removed TaskSet 1234.0, whose tasks have all completed, from pool 
16:50:13.787 INFO  DAGScheduler - ResultStage 1234 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
16:50:13.787 INFO  DAGScheduler - Job 556 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.787 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1234: Stage finished
16:50:13.787 INFO  DAGScheduler - Job 556 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.026009 s
16:50:13.797 INFO  MemoryStore - Block broadcast_1977 stored as values in memory (estimated size 93.5 KiB, free 1917.5 MiB)
16:50:13.798 INFO  MemoryStore - Block broadcast_1977_piece0 stored as bytes in memory (estimated size 5.6 KiB, free 1917.5 MiB)
16:50:13.798 INFO  BlockManagerInfo - Added broadcast_1977_piece0 in memory on localhost:37833 (size: 5.6 KiB, free: 1919.4 MiB)
16:50:13.798 INFO  SparkContext - Created broadcast 1977 from broadcast at ReadsSparkSink.java:133
16:50:13.801 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.801 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.801 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.813 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
16:50:13.813 INFO  DAGScheduler - Registering RDD 5472 (mapToPair at SparkUtils.java:161) as input to shuffle 311
16:50:13.813 INFO  DAGScheduler - Got job 557 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
16:50:13.813 INFO  DAGScheduler - Final stage: ResultStage 1239 (runJob at SparkHadoopWriter.scala:83)
16:50:13.813 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1238)
16:50:13.813 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1238)
16:50:13.813 INFO  DAGScheduler - Submitting ShuffleMapStage 1238 (MapPartitionsRDD[5472] at mapToPair at SparkUtils.java:161), which has no missing parents
16:50:13.814 INFO  MemoryStore - Block broadcast_1978 stored as values in memory (estimated size 39.1 KiB, free 1917.4 MiB)
16:50:13.815 INFO  MemoryStore - Block broadcast_1978_piece0 stored as bytes in memory (estimated size 14.1 KiB, free 1917.4 MiB)
16:50:13.815 INFO  BlockManagerInfo - Added broadcast_1978_piece0 in memory on localhost:37833 (size: 14.1 KiB, free: 1919.4 MiB)
16:50:13.815 INFO  SparkContext - Created broadcast 1978 from broadcast at DAGScheduler.scala:1580
16:50:13.815 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1238 (MapPartitionsRDD[5472] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
16:50:13.815 INFO  TaskSchedulerImpl - Adding task set 1238.0 with 1 tasks resource profile 0
16:50:13.815 INFO  TaskSetManager - Starting task 0.0 in stage 1238.0 (TID 1058) (localhost, executor driver, partition 0, NODE_LOCAL, 9871 bytes) 
16:50:13.815 INFO  Executor - Running task 0.0 in stage 1238.0 (TID 1058)
16:50:13.817 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.817 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.818 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.818 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.820 INFO  Executor - Finished task 0.0 in stage 1238.0 (TID 1058). 1922 bytes result sent to driver
16:50:13.820 INFO  TaskSetManager - Finished task 0.0 in stage 1238.0 (TID 1058) in 5 ms on localhost (executor driver) (1/1)
16:50:13.820 INFO  TaskSchedulerImpl - Removed TaskSet 1238.0, whose tasks have all completed, from pool 
16:50:13.820 INFO  DAGScheduler - ShuffleMapStage 1238 (mapToPair at SparkUtils.java:161) finished in 0.006 s
16:50:13.820 INFO  DAGScheduler - looking for newly runnable stages
16:50:13.820 INFO  DAGScheduler - running: HashSet()
16:50:13.820 INFO  DAGScheduler - waiting: HashSet(ResultStage 1239)
16:50:13.820 INFO  DAGScheduler - failed: HashSet()
16:50:13.820 INFO  DAGScheduler - Submitting ResultStage 1239 (MapPartitionsRDD[5478] at saveAsTextFile at SamSink.java:65), which has no missing parents
16:50:13.826 INFO  MemoryStore - Block broadcast_1979 stored as values in memory (estimated size 183.9 KiB, free 1917.2 MiB)
16:50:13.827 INFO  MemoryStore - Block broadcast_1979_piece0 stored as bytes in memory (estimated size 77.8 KiB, free 1917.2 MiB)
16:50:13.827 INFO  BlockManagerInfo - Added broadcast_1979_piece0 in memory on localhost:37833 (size: 77.8 KiB, free: 1919.3 MiB)
16:50:13.827 INFO  SparkContext - Created broadcast 1979 from broadcast at DAGScheduler.scala:1580
16:50:13.828 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1239 (MapPartitionsRDD[5478] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
16:50:13.828 INFO  TaskSchedulerImpl - Adding task set 1239.0 with 1 tasks resource profile 0
16:50:13.828 INFO  TaskSetManager - Starting task 0.0 in stage 1239.0 (TID 1059) (localhost, executor driver, partition 0, NODE_LOCAL, 9299 bytes) 
16:50:13.828 INFO  Executor - Running task 0.0 in stage 1239.0 (TID 1059)
16:50:13.831 INFO  ShuffleBlockFetcherIterator - Getting 1 (3.9 KiB) non-empty blocks including 1 (3.9 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
16:50:13.831 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
16:50:13.833 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
16:50:13.833 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
16:50:13.833 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
16:50:13.838 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271650132946151908841940664_5478_m_000000_0' to file:/tmp/MarkDups3995958212395690460/MarkDups.sam.parts/_temporary/0/task_202505271650132946151908841940664_5478_m_000000
16:50:13.838 INFO  SparkHadoopMapRedUtil - attempt_202505271650132946151908841940664_5478_m_000000_0: Committed. Elapsed time: 0 ms.
16:50:13.839 INFO  Executor - Finished task 0.0 in stage 1239.0 (TID 1059). 1858 bytes result sent to driver
16:50:13.839 INFO  TaskSetManager - Finished task 0.0 in stage 1239.0 (TID 1059) in 11 ms on localhost (executor driver) (1/1)
16:50:13.839 INFO  TaskSchedulerImpl - Removed TaskSet 1239.0, whose tasks have all completed, from pool 
16:50:13.839 INFO  DAGScheduler - ResultStage 1239 (runJob at SparkHadoopWriter.scala:83) finished in 0.019 s
16:50:13.839 INFO  DAGScheduler - Job 557 is finished. Cancelling potential speculative or zombie tasks for this job
16:50:13.839 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1239: Stage finished
16:50:13.839 INFO  DAGScheduler - Job 557 finished: runJob at SparkHadoopWriter.scala:83, took 0.026275 s
16:50:13.839 INFO  SparkHadoopWriter - Start to commit write Job job_202505271650132946151908841940664_5478.
16:50:13.842 INFO  SparkHadoopWriter - Write Job job_202505271650132946151908841940664_5478 committed. Elapsed time: 2 ms.
16:50:13.846 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups3995958212395690460/MarkDups.sam
16:50:13.848 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups3995958212395690460/MarkDups.sam done
16:50:13.848 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 4:50:13 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:14 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:14 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:14 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:15 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:15 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:15 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:15 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:16 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:16 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:16 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:17 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:17 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:17 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:18 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:18 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:18 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:19 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:19 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:50:19.537 ERROR Executor - Exception in task 0.0 in stage 1459.0 (TID 1206)
org.broadinstitute.hellbender.exceptions.UserException$ReadMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 is missing the read group (RG) tag, which is required by the GATK.  Please use http://gatkforums.broadinstitute.org/discussion/59/companion-utilities-replacereadgroups to fix this problem
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:64) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:840) [?:?]
16:50:19.551 ERROR TaskSetManager - Task 0 in stage 1459.0 failed 1 times; aborting job
[May 27, 2025 at 4:50:19 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
16:50:19.800 ERROR Executor - Exception in task 0.0 in stage 1466.0 (TID 1209)
org.broadinstitute.hellbender.exceptions.UserException$HeaderMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 contains an (RG) tag with the group NotADuplicateGroup which is not found in the file header.
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:66) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [gatk-package-113e5d7-SNAPSHOT-local.jar:113e5d7-SNAPSHOT]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:840) [?:?]
16:50:19.802 ERROR TaskSetManager - Task 0 in stage 1466.0 failed 1 times; aborting job
[May 27, 2025 at 4:50:19 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:20 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:20 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:20 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:21 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:21 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:21 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:21 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:22 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:22 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:22 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:23 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:23 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:23 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:24 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:24 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:24 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:25 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:25 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:25 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:25 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:26 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:26 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:26 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:27 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:27 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:27 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:28 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:28 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:28 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:28 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:29 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:29 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:29 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:30 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:30 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:30 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:31 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:31 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:31 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:31 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:32 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:32 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:32 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:33 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:33 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:33 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:34 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:34 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:34 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:34 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:35 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:35 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:35 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:36 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:36 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:36 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:37 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:37 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:37 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:38 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:38 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:38 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:38 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:39 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:39 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:39 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:40 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:40 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:40 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:41 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:41 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:41 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:41 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:42 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:42 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:42 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:43 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:43 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:43 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:43 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424
[May 27, 2025 at 4:50:44 PM GMT] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1702887424