Class org.broadinstitute.hellbender.tools.spark.pipelines.MarkDuplicatesSparkIntegrationTest

152

tests

0

failures

0

ignored

1m0.92s

duration

100%

successful

Tests

Test Duration Result
assertThatExpectedOutputUpdateToggleIsDisabled 0s passed
testAssertAssumeUnsortedFilesAreQueryGroupedFiles 0.588s passed
testAssertCorrectSortOrderMultipleBams 0.130s passed
testAssertCorrectSortOrderMultipleBamsOverriding 0.509s passed
testBulkFragmentsNoDuplicates 0.900s passed
testBulkFragmentsWithDuplicates 2.047s passed
testDifferentChromosomesInOppositeOrder 0.309s passed
testDuplicateDetectionDataProviderWithMetrics[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.chr1.1-1K.unmarkedDups.markDuplicate.metrics) 0.274s passed
testDuplicateDetectionDataProviderWithMetrics[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.optical_dupes.markDuplicate.metrics) 0.252s passed
testDuplicateDetectionDataProviderWithMetrics[2](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.picard-2.15.0.sorted.inputSingleLibrarySolexa16404.metrics) 0.261s passed
testFlowModeFlag 1.679s passed
testHashCollisionHandling 1.132s passed
testMDOrder[0](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug.bam) 0.340s passed
testMDOrder[1](src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam, src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/expected.mdOrderBug2.bam) 0.315s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[0](true, true, true) 0.306s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[1](true, false, true) 0.318s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[2](false, true, true) 0.320s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[3](true, true, false) 0.315s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[4](true, false, false) 0.308s passed
testManyOpticalDuplicateClusterOneEndSamePositionOneCluster[5](false, true, false) 0.300s passed
testMappedFragmentAndMappedPairFirstOfPairNonPrimary 0.301s passed
testMappedFragmentAndMatePairFirstUnmapped 0.308s passed
testMappedFragmentAndMatePairSecondUnmapped 0.302s passed
testMappedPairAndMappedFragmentAndMatePairFirstUnmapped 0.303s passed
testMappedPairAndMappedFragmentAndMatePairSecondUnmapped 0.331s passed
testMappedPairAndMatePairFirstOppositeStrandSecondUnmapped 0.303s passed
testMappedPairAndMatePairFirstUnmapped 0.299s passed
testMappedPairAndMatePairSecondUnmapped 0.296s passed
testMappedPairWithFirstEndSamePositionAndOther 0.313s passed
testMappedPairWithSamePosition 0.294s passed
testMappedPairWithSamePositionSameCigar 0.301s passed
testMarkDuplicatesSparkDeletingDuplicateReads[0]([Ljava.io.File;@52596f2, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.370s passed
testMarkDuplicatesSparkDeletingDuplicateReads[1]([Ljava.io.File;@4c243e05, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.412s passed
testMarkDuplicatesSparkDeletingDuplicateReads[2]([Ljava.io.File;@5cf6592b, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.389s passed
testMarkDuplicatesSparkDeletingDuplicateReads[3]([Ljava.io.File;@4dfbd7b8, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.352s passed
testMarkDuplicatesSparkDeletingDuplicateReads[4]([Ljava.io.File;@37bba632, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.342s passed
testMarkDuplicatesSparkDeletingDuplicateReads[5]([Ljava.io.File;@12c7a28c, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.201s passed
testMarkDuplicatesSparkDeletingDuplicateReads[6]([Ljava.io.File;@b32c6eb, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.219s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[0]([Ljava.io.File;@7dd39595, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.362s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[1]([Ljava.io.File;@6fe2eb27, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.441s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[2]([Ljava.io.File;@27da7b5, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.411s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[3]([Ljava.io.File;@1bc8b55c, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.330s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[4]([Ljava.io.File;@5f4ae106, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.331s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[5]([Ljava.io.File;@7a02a376, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.207s passed
testMarkDuplicatesSparkDeletingOpticalDuplicateReads[6]([Ljava.io.File;@5311246, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.166s passed
testMarkDuplicatesSparkIntegrationTestLocal[0]([Ljava.io.File;@46d4e3de, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.365s passed
testMarkDuplicatesSparkIntegrationTestLocal[1]([Ljava.io.File;@346df8a, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.359s passed
testMarkDuplicatesSparkIntegrationTestLocal[2]([Ljava.io.File;@537a652e, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.358s passed
testMarkDuplicatesSparkIntegrationTestLocal[3]([Ljava.io.File;@4b145168, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.345s passed
testMarkDuplicatesSparkIntegrationTestLocal[4]([Ljava.io.File;@809d037, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.351s passed
testMarkDuplicatesSparkIntegrationTestLocal[5]([Ljava.io.File;@45c09e14, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.193s passed
testMarkDuplicatesSparkIntegrationTestLocal[6]([Ljava.io.File;@6b803e0, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.203s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[0]([Ljava.io.File;@10f905ab, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.364s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[1]([Ljava.io.File;@59700a95, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.357s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[2]([Ljava.io.File;@4a106c51, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.350s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[3]([Ljava.io.File;@1e5bf691, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.327s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[4]([Ljava.io.File;@73b8f68a, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.333s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[5]([Ljava.io.File;@1a1a7a77, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.200s passed
testMarkDuplicatesSparkMarkingAllDuplicatesWithTagging[6]([Ljava.io.File;@34c1baca, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.182s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[0]([Ljava.io.File;@65e7e93e, 20, 0, {Solexa-16419=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16416=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16404=[0, 3, 0, 0, 0, 0, 0.0, 0], Solexa-16406=[0, 1, 0, 0, 0, 0, 0.0, 0], Solexa-16412=[0, 1, 0, 0, 0, 0, 0.0, 0]}) 0.348s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[1]([Ljava.io.File;@492ffa28, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.374s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[2]([Ljava.io.File;@97aae39, 90, 6, {Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 0.364s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[3]([Ljava.io.File;@2f95e7f1, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.331s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[4]([Ljava.io.File;@7b082c6d, 4, 2, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0]}) 0.345s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[5]([Ljava.io.File;@bc6b92d, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.170s passed
testMarkDuplicatesSparkMarkingOpticalDuplicatesWithTagging[6]([Ljava.io.File;@154daf75, 94, 8, {mylib=[0, 2, 0, 0, 1, 1, 0.5, 0], Solexa-16419=[4, 4, 4, 0, 0, 0, 0.0, 0], Solexa-16416=[2, 2, 2, 0, 0, 0, 0.0, 0], Solexa-16404=[3, 9, 3, 0, 2, 0, 0.190476, 17], Solexa-16406=[1, 10, 1, 0, 0, 0, 0.0, 0], Solexa-16412=[3, 6, 3, 0, 1, 0, 0.133333, 15]}) 1.172s passed
testMatePairFirstUnmapped 0.307s passed
testMatePairSecondUnmapped 0.307s passed
testNoReadGroupInRead 0.229s passed
testNonExistantReadGroupInRead 0.216s passed
testNullOpticalDuplicates 0.325s passed
testOpticalDuplicateClusterOneEndSamePositionOneCluster 0.337s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicatesWithinPixelDistance 0.310s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.300s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.312s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.310s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.298s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.302s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.306s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.305s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.303s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.299s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.321s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.298s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.309s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.297s passed
testOpticalDuplicateClusterSamePositionNoOpticalDuplicates[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.300s passed
testOpticalDuplicateClusterSamePositionOneOpticalDuplicatesWithinPixelDistance 0.310s passed
testOpticalDuplicateClustersAddingSecondEndFirstSameCoordinate 0.301s passed
testOpticalDuplicateFinding 0.301s passed
testOpticalDuplicateFindingPxelDistance 0.303s passed
testOpticalDuplicatesAndPCRDuplicatesOrientationDifference 0.299s passed
testOpticalDuplicatesDifferentReadGroups 0.295s passed
testOpticalDuplicatesTheSameReadGroup 0.312s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[0](RUNID:7:1203:2886:82292, RUNID:7:1205:3886:16834) 0.297s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[10](RUNID:7:1203:6886:16756, RUNID:7:1203:4886:26756) 0.297s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[11](RUNID:7:1203:2886:34756, RUNID:7:1203:2886:36756) 0.339s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[12](RUNID:7:1203:2886:35756, RUNID:7:1203:2886:36756) 0.332s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[13](RUNID:7:1203:2886:37756, RUNID:7:1203:2886:36756) 0.302s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[14](RUNID:7:1203:2886:38756, RUNID:7:1203:2886:36756) 0.306s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[15](HK3T5CCXX160204:3:1112:11586:37067, HK3T5CCXX160204:3:1112:11586:32144) 0.302s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[1](RUNID:7:1203:2886:16756, RUNID:7:1205:3886:16756) 0.303s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[2](RUNID:7:1204:2886:16756, RUNID:7:1205:3886:16756) 0.299s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[3](RUNID:7:1205:2886:16756, RUNID:7:1205:3886:16756) 0.293s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[4](RUNID:7:1206:2886:16756, RUNID:7:1205:3886:16756) 0.296s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[5](RUNID:7:1207:2886:16756, RUNID:7:1205:3886:16756) 0.314s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[6](RUNID:7:1203:2886:16756, RUNID:7:1203:4886:26756) 0.296s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[7](RUNID:7:1203:3886:16756, RUNID:7:1203:4886:26756) 0.313s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[8](RUNID:7:1203:4886:16756, RUNID:7:1203:4886:26756) 0.303s passed
testOpticalDuplicatesTiebrokenByPhysicalLocationNotStartPosition[9](RUNID:7:1203:5886:16756, RUNID:7:1203:4886:26756) 0.301s passed
testPathologicalOrderingAtTheSamePosition 0.304s passed
testReadSameStartPositionOrientationOverride 0.308s passed
testReadsHaveSameStartPositionButDifferentChromosomeNonEquivalence 0.315s passed
testSecondEndIsBeforeFirstInCoordinate 0.290s passed
testSingleMappedFragment 0.301s passed
testSingleMappedFragmentAndSingleMappedPair 0.301s passed
testSingleMappedFragmentAndTwoMappedPairs 0.312s passed
testSingleMappedPair 0.301s passed
testSingleUnmappedFragment 0.315s passed
testSingleUnmappedPair 0.295s passed
testStackOverFlowPairSetSwap 0.302s passed
testSupplementaryReadUnmappedMate 0.300s passed
testThreeGroupsOnDifferentChromosomesOfThreeMappedPairs 0.298s passed
testThreeMappedPairs 0.298s passed
testThreeMappedPairsWithMatchingSecondMate 0.324s passed
testTwoGroupsOnDifferentChromosomesOfThreeMappedPairs 0.311s passed
testTwoGroupsOnDifferentChromosomesOfTwoFragments 0.302s passed
testTwoGroupsOnDifferentChromosomesOfTwoMappedPairs 0.303s passed
testTwoMappedFragments 0.320s passed
testTwoMappedPairWithSamePosition 0.291s passed
testTwoMappedPairWithSamePositionDifferentStrands 0.313s passed
testTwoMappedPairWithSamePositionDifferentStrands2 0.299s passed
testTwoMappedPairs 0.298s passed
testTwoMappedPairsAndMappedSecondaryFragment 0.302s passed
testTwoMappedPairsAndTerminalUnmappedFragment 0.302s passed
testTwoMappedPairsAndTerminalUnmappedPair 0.298s passed
testTwoMappedPairsMatesSoftClipped 0.312s passed
testTwoMappedPairsWithOppositeOrientations 0.299s passed
testTwoMappedPairsWithOppositeOrientationsNumberTwo 0.300s passed
testTwoMappedPairsWithSoftClipping 0.298s passed
testTwoMappedPairsWithSoftClippingBoth 0.298s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnly 0.294s passed
testTwoMappedPairsWithSoftClippingFirstOfPairOnlyNoMateCigar 0.311s passed
testTwoMappedPairsWithSupplementaryReads[0](true, true, true) 0.296s passed
testTwoMappedPairsWithSupplementaryReads[1](true, false, true) 0.298s passed
testTwoMappedPairsWithSupplementaryReads[2](false, true, true) 0.302s passed
testTwoMappedPairsWithSupplementaryReads[3](true, true, false) 0.302s passed
testTwoMappedPairsWithSupplementaryReads[4](true, false, false) 0.296s passed
testTwoMappedPairsWithSupplementaryReads[5](false, true, false) 0.317s passed
testTwoUnmappedFragments 0.292s passed

Standard error

[May 27, 2025 at 1:49:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:40 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:41 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
13:49:41.723 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:41.723 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
13:49:41.724 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
13:49:41.724 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
13:49:41.724 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
13:49:41.724 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 1:49:41 PM UTC
13:49:41.724 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:41.724 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:41.724 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
13:49:41.724 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
13:49:41.724 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
13:49:41.724 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
13:49:41.724 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
13:49:41.724 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
13:49:41.724 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
13:49:41.724 INFO  MarkDuplicatesSpark - Requester pays: disabled
13:49:41.724 INFO  MarkDuplicatesSpark - Initializing engine
13:49:41.724 INFO  MarkDuplicatesSpark - Done initializing engine
13:49:41.725 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
13:49:41.728 INFO  MemoryStore - Block broadcast_1445 stored as values in memory (estimated size 306.3 KiB, free 1899.3 MiB)
13:49:41.735 INFO  MemoryStore - Block broadcast_1445_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1899.2 MiB)
13:49:41.736 INFO  BlockManagerInfo - Added broadcast_1445_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.9 MiB)
13:49:41.736 INFO  SparkContext - Created broadcast 1445 from newAPIHadoopFile at PathSplitSource.java:96
13:49:41.757 INFO  MemoryStore - Block broadcast_1446 stored as values in memory (estimated size 306.3 KiB, free 1898.9 MiB)
13:49:41.764 INFO  MemoryStore - Block broadcast_1446_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1898.9 MiB)
13:49:41.764 INFO  BlockManagerInfo - Added broadcast_1446_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.9 MiB)
13:49:41.764 INFO  SparkContext - Created broadcast 1446 from newAPIHadoopFile at PathSplitSource.java:96
13:49:41.798 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
13:49:41.799 INFO  FileInputFormat - Total input files to process : 1
13:49:41.799 INFO  DAGScheduler - Registering RDD 3701 (mapToPair at SparkUtils.java:161) as input to shuffle 100
13:49:41.800 INFO  DAGScheduler - Got job 443 (collect at SparkUtils.java:205) with 1 output partitions
13:49:41.800 INFO  DAGScheduler - Final stage: ResultStage 755 (collect at SparkUtils.java:205)
13:49:41.800 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 754)
13:49:41.800 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 754)
13:49:41.800 INFO  DAGScheduler - Submitting ShuffleMapStage 754 (MapPartitionsRDD[3701] at mapToPair at SparkUtils.java:161), which has no missing parents
13:49:41.818 INFO  MemoryStore - Block broadcast_1447 stored as values in memory (estimated size 460.4 KiB, free 1898.4 MiB)
13:49:41.820 INFO  MemoryStore - Block broadcast_1447_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1898.2 MiB)
13:49:41.821 INFO  BlockManagerInfo - Added broadcast_1447_piece0 in memory on localhost:43843 (size: 202.5 KiB, free: 1918.7 MiB)
13:49:41.821 INFO  SparkContext - Created broadcast 1447 from broadcast at DAGScheduler.scala:1580
13:49:41.822 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 754 (MapPartitionsRDD[3701] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:49:41.822 INFO  TaskSchedulerImpl - Adding task set 754.0 with 1 tasks resource profile 0
13:49:41.822 INFO  TaskSetManager - Starting task 0.0 in stage 754.0 (TID 696) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9735 bytes) 
13:49:41.823 INFO  Executor - Running task 0.0 in stage 754.0 (TID 696)
13:49:41.855 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug.bam:0+1507
13:49:41.860 INFO  Executor - Finished task 0.0 in stage 754.0 (TID 696). 1148 bytes result sent to driver
13:49:41.861 INFO  TaskSetManager - Finished task 0.0 in stage 754.0 (TID 696) in 39 ms on localhost (executor driver) (1/1)
13:49:41.861 INFO  TaskSchedulerImpl - Removed TaskSet 754.0, whose tasks have all completed, from pool 
13:49:41.861 INFO  DAGScheduler - ShuffleMapStage 754 (mapToPair at SparkUtils.java:161) finished in 0.060 s
13:49:41.861 INFO  DAGScheduler - looking for newly runnable stages
13:49:41.861 INFO  DAGScheduler - running: HashSet()
13:49:41.861 INFO  DAGScheduler - waiting: HashSet(ResultStage 755)
13:49:41.862 INFO  DAGScheduler - failed: HashSet()
13:49:41.862 INFO  DAGScheduler - Submitting ResultStage 755 (MapPartitionsRDD[3704] at mapPartitions at SparkUtils.java:188), which has no missing parents
13:49:41.862 INFO  MemoryStore - Block broadcast_1448 stored as values in memory (estimated size 7.4 KiB, free 1898.2 MiB)
13:49:41.863 INFO  MemoryStore - Block broadcast_1448_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1898.2 MiB)
13:49:41.863 INFO  BlockManagerInfo - Added broadcast_1448_piece0 in memory on localhost:43843 (size: 4.1 KiB, free: 1918.7 MiB)
13:49:41.863 INFO  SparkContext - Created broadcast 1448 from broadcast at DAGScheduler.scala:1580
13:49:41.864 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 755 (MapPartitionsRDD[3704] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
13:49:41.864 INFO  TaskSchedulerImpl - Adding task set 755.0 with 1 tasks resource profile 0
13:49:41.864 INFO  TaskSetManager - Starting task 0.0 in stage 755.0 (TID 697) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:41.864 INFO  Executor - Running task 0.0 in stage 755.0 (TID 697)
13:49:41.866 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.866 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.869 INFO  Executor - Finished task 0.0 in stage 755.0 (TID 697). 2238 bytes result sent to driver
13:49:41.869 INFO  TaskSetManager - Finished task 0.0 in stage 755.0 (TID 697) in 5 ms on localhost (executor driver) (1/1)
13:49:41.870 INFO  DAGScheduler - ResultStage 755 (collect at SparkUtils.java:205) finished in 0.008 s
13:49:41.870 INFO  DAGScheduler - Job 443 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:41.870 INFO  TaskSchedulerImpl - Removed TaskSet 755.0, whose tasks have all completed, from pool 
13:49:41.870 INFO  TaskSchedulerImpl - Killing all running tasks in stage 755: Stage finished
13:49:41.870 INFO  DAGScheduler - Job 443 finished: collect at SparkUtils.java:205, took 0.072033 s
13:49:41.876 INFO  MemoryStore - Block broadcast_1449 stored as values in memory (estimated size 1632.0 B, free 1898.2 MiB)
13:49:41.876 INFO  MemoryStore - Block broadcast_1449_piece0 stored as bytes in memory (estimated size 145.0 B, free 1898.2 MiB)
13:49:41.876 INFO  BlockManagerInfo - Added broadcast_1449_piece0 in memory on localhost:43843 (size: 145.0 B, free: 1918.7 MiB)
13:49:41.876 INFO  SparkContext - Created broadcast 1449 from broadcast at MarkDuplicatesSparkUtils.java:126
13:49:41.877 INFO  MemoryStore - Block broadcast_1450 stored as values in memory (estimated size 304.0 B, free 1898.2 MiB)
13:49:41.877 INFO  MemoryStore - Block broadcast_1450_piece0 stored as bytes in memory (estimated size 37.0 B, free 1898.2 MiB)
13:49:41.877 INFO  BlockManagerInfo - Added broadcast_1450_piece0 in memory on localhost:43843 (size: 37.0 B, free: 1918.7 MiB)
13:49:41.877 INFO  SparkContext - Created broadcast 1450 from broadcast at MarkDuplicatesSparkUtils.java:127
13:49:41.890 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
13:49:41.890 INFO  DAGScheduler - Registering RDD 3713 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 103
13:49:41.890 INFO  DAGScheduler - Registering RDD 3717 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 102
13:49:41.890 INFO  DAGScheduler - Registering RDD 3721 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 101
13:49:41.891 INFO  DAGScheduler - Got job 444 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
13:49:41.891 INFO  DAGScheduler - Final stage: ResultStage 760 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
13:49:41.891 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 759)
13:49:41.891 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 759)
13:49:41.891 INFO  DAGScheduler - Submitting ShuffleMapStage 757 (MapPartitionsRDD[3713] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
13:49:41.892 INFO  MemoryStore - Block broadcast_1451 stored as values in memory (estimated size 24.2 KiB, free 1898.2 MiB)
13:49:41.893 INFO  MemoryStore - Block broadcast_1451_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1898.2 MiB)
13:49:41.893 INFO  BlockManagerInfo - Added broadcast_1451_piece0 in memory on localhost:43843 (size: 12.6 KiB, free: 1918.7 MiB)
13:49:41.893 INFO  SparkContext - Created broadcast 1451 from broadcast at DAGScheduler.scala:1580
13:49:41.893 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 757 (MapPartitionsRDD[3713] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
13:49:41.893 INFO  TaskSchedulerImpl - Adding task set 757.0 with 1 tasks resource profile 0
13:49:41.894 INFO  TaskSetManager - Starting task 0.0 in stage 757.0 (TID 698) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
13:49:41.894 INFO  Executor - Running task 0.0 in stage 757.0 (TID 698)
13:49:41.896 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.896 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.900 INFO  Executor - Finished task 0.0 in stage 757.0 (TID 698). 1922 bytes result sent to driver
13:49:41.901 INFO  TaskSetManager - Finished task 0.0 in stage 757.0 (TID 698) in 7 ms on localhost (executor driver) (1/1)
13:49:41.901 INFO  TaskSchedulerImpl - Removed TaskSet 757.0, whose tasks have all completed, from pool 
13:49:41.901 INFO  DAGScheduler - ShuffleMapStage 757 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.010 s
13:49:41.901 INFO  DAGScheduler - looking for newly runnable stages
13:49:41.901 INFO  DAGScheduler - running: HashSet()
13:49:41.901 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 758, ShuffleMapStage 759, ResultStage 760)
13:49:41.901 INFO  DAGScheduler - failed: HashSet()
13:49:41.902 INFO  DAGScheduler - Submitting ShuffleMapStage 758 (MapPartitionsRDD[3717] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
13:49:41.903 INFO  MemoryStore - Block broadcast_1452 stored as values in memory (estimated size 28.3 KiB, free 1898.2 MiB)
13:49:41.903 INFO  MemoryStore - Block broadcast_1452_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1898.1 MiB)
13:49:41.904 INFO  BlockManagerInfo - Added broadcast_1452_piece0 in memory on localhost:43843 (size: 14.8 KiB, free: 1918.6 MiB)
13:49:41.904 INFO  SparkContext - Created broadcast 1452 from broadcast at DAGScheduler.scala:1580
13:49:41.904 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 758 (MapPartitionsRDD[3717] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
13:49:41.904 INFO  TaskSchedulerImpl - Adding task set 758.0 with 1 tasks resource profile 0
13:49:41.905 INFO  TaskSetManager - Starting task 0.0 in stage 758.0 (TID 699) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
13:49:41.905 INFO  Executor - Running task 0.0 in stage 758.0 (TID 699)
13:49:41.907 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.907 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.912 INFO  Executor - Finished task 0.0 in stage 758.0 (TID 699). 1922 bytes result sent to driver
13:49:41.913 INFO  TaskSetManager - Finished task 0.0 in stage 758.0 (TID 699) in 9 ms on localhost (executor driver) (1/1)
13:49:41.913 INFO  TaskSchedulerImpl - Removed TaskSet 758.0, whose tasks have all completed, from pool 
13:49:41.913 INFO  DAGScheduler - ShuffleMapStage 758 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.011 s
13:49:41.913 INFO  DAGScheduler - looking for newly runnable stages
13:49:41.913 INFO  DAGScheduler - running: HashSet()
13:49:41.913 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 759, ResultStage 760)
13:49:41.913 INFO  DAGScheduler - failed: HashSet()
13:49:41.913 INFO  DAGScheduler - Submitting ShuffleMapStage 759 (MapPartitionsRDD[3721] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
13:49:41.914 INFO  MemoryStore - Block broadcast_1453 stored as values in memory (estimated size 19.0 KiB, free 1898.1 MiB)
13:49:41.915 INFO  MemoryStore - Block broadcast_1453_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1898.1 MiB)
13:49:41.915 INFO  BlockManagerInfo - Added broadcast_1453_piece0 in memory on localhost:43843 (size: 9.7 KiB, free: 1918.6 MiB)
13:49:41.915 INFO  SparkContext - Created broadcast 1453 from broadcast at DAGScheduler.scala:1580
13:49:41.916 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 759 (MapPartitionsRDD[3721] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
13:49:41.916 INFO  TaskSchedulerImpl - Adding task set 759.0 with 1 tasks resource profile 0
13:49:41.916 INFO  TaskSetManager - Starting task 0.0 in stage 759.0 (TID 700) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:49:41.917 INFO  Executor - Running task 0.0 in stage 759.0 (TID 700)
13:49:41.919 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.919 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.922 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.922 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.923 INFO  Executor - Finished task 0.0 in stage 759.0 (TID 700). 1922 bytes result sent to driver
13:49:41.923 INFO  TaskSetManager - Finished task 0.0 in stage 759.0 (TID 700) in 7 ms on localhost (executor driver) (1/1)
13:49:41.924 INFO  TaskSchedulerImpl - Removed TaskSet 759.0, whose tasks have all completed, from pool 
13:49:41.924 INFO  DAGScheduler - ShuffleMapStage 759 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.010 s
13:49:41.924 INFO  DAGScheduler - looking for newly runnable stages
13:49:41.924 INFO  DAGScheduler - running: HashSet()
13:49:41.924 INFO  DAGScheduler - waiting: HashSet(ResultStage 760)
13:49:41.924 INFO  DAGScheduler - failed: HashSet()
13:49:41.924 INFO  DAGScheduler - Submitting ResultStage 760 (MapPartitionsRDD[3723] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
13:49:41.925 INFO  MemoryStore - Block broadcast_1454 stored as values in memory (estimated size 20.2 KiB, free 1898.1 MiB)
13:49:41.926 INFO  MemoryStore - Block broadcast_1454_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1898.1 MiB)
13:49:41.926 INFO  BlockManagerInfo - Added broadcast_1454_piece0 in memory on localhost:43843 (size: 10.6 KiB, free: 1918.6 MiB)
13:49:41.926 INFO  SparkContext - Created broadcast 1454 from broadcast at DAGScheduler.scala:1580
13:49:41.926 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 760 (MapPartitionsRDD[3723] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
13:49:41.926 INFO  TaskSchedulerImpl - Adding task set 760.0 with 1 tasks resource profile 0
13:49:41.927 INFO  TaskSetManager - Starting task 0.0 in stage 760.0 (TID 701) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:41.927 INFO  Executor - Running task 0.0 in stage 760.0 (TID 701)
13:49:41.929 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.929 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.932 INFO  Executor - Finished task 0.0 in stage 760.0 (TID 701). 1944 bytes result sent to driver
13:49:41.932 INFO  TaskSetManager - Finished task 0.0 in stage 760.0 (TID 701) in 5 ms on localhost (executor driver) (1/1)
13:49:41.933 INFO  TaskSchedulerImpl - Removed TaskSet 760.0, whose tasks have all completed, from pool 
13:49:41.933 INFO  DAGScheduler - ResultStage 760 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.009 s
13:49:41.933 INFO  DAGScheduler - Job 444 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:41.933 INFO  TaskSchedulerImpl - Killing all running tasks in stage 760: Stage finished
13:49:41.933 INFO  DAGScheduler - Job 444 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.043358 s
13:49:41.948 INFO  MemoryStore - Block broadcast_1455 stored as values in memory (estimated size 20.3 KiB, free 1898.1 MiB)
13:49:41.949 INFO  MemoryStore - Block broadcast_1455_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1898.1 MiB)
13:49:41.949 INFO  BlockManagerInfo - Added broadcast_1455_piece0 in memory on localhost:43843 (size: 1850.0 B, free: 1918.6 MiB)
13:49:41.949 INFO  SparkContext - Created broadcast 1455 from broadcast at ReadsSparkSink.java:133
13:49:41.950 INFO  MemoryStore - Block broadcast_1456 stored as values in memory (estimated size 20.4 KiB, free 1898.0 MiB)
13:49:41.950 INFO  MemoryStore - Block broadcast_1456_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1898.0 MiB)
13:49:41.951 INFO  BlockManagerInfo - Added broadcast_1456_piece0 in memory on localhost:43843 (size: 1850.0 B, free: 1918.6 MiB)
13:49:41.951 INFO  SparkContext - Created broadcast 1456 from broadcast at BamSink.java:76
13:49:41.953 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:41.953 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:41.953 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:41.974 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
13:49:41.975 INFO  DAGScheduler - Registering RDD 3725 (mapToPair at SparkUtils.java:161) as input to shuffle 104
13:49:41.975 INFO  DAGScheduler - Got job 445 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
13:49:41.975 INFO  DAGScheduler - Final stage: ResultStage 765 (runJob at SparkHadoopWriter.scala:83)
13:49:41.975 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 764)
13:49:41.975 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 764)
13:49:41.975 INFO  DAGScheduler - Submitting ShuffleMapStage 764 (MapPartitionsRDD[3725] at mapToPair at SparkUtils.java:161), which has no missing parents
13:49:41.976 INFO  MemoryStore - Block broadcast_1457 stored as values in memory (estimated size 18.1 KiB, free 1898.0 MiB)
13:49:41.977 INFO  MemoryStore - Block broadcast_1457_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1898.0 MiB)
13:49:41.977 INFO  BlockManagerInfo - Added broadcast_1457_piece0 in memory on localhost:43843 (size: 9.5 KiB, free: 1918.6 MiB)
13:49:41.977 INFO  SparkContext - Created broadcast 1457 from broadcast at DAGScheduler.scala:1580
13:49:41.977 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 764 (MapPartitionsRDD[3725] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:49:41.977 INFO  TaskSchedulerImpl - Adding task set 764.0 with 1 tasks resource profile 0
13:49:41.978 INFO  TaskSetManager - Starting task 0.0 in stage 764.0 (TID 702) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:49:41.978 INFO  Executor - Running task 0.0 in stage 764.0 (TID 702)
13:49:41.980 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.980 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.983 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:41.983 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:41.985 INFO  Executor - Finished task 0.0 in stage 764.0 (TID 702). 1922 bytes result sent to driver
13:49:41.985 INFO  TaskSetManager - Finished task 0.0 in stage 764.0 (TID 702) in 7 ms on localhost (executor driver) (1/1)
13:49:41.985 INFO  DAGScheduler - ShuffleMapStage 764 (mapToPair at SparkUtils.java:161) finished in 0.009 s
13:49:41.985 INFO  DAGScheduler - looking for newly runnable stages
13:49:41.985 INFO  DAGScheduler - running: HashSet()
13:49:41.985 INFO  DAGScheduler - waiting: HashSet(ResultStage 765)
13:49:41.985 INFO  DAGScheduler - failed: HashSet()
13:49:41.986 INFO  DAGScheduler - Submitting ResultStage 765 (MapPartitionsRDD[3730] at mapToPair at BamSink.java:91), which has no missing parents
13:49:41.986 INFO  TaskSchedulerImpl - Removed TaskSet 764.0, whose tasks have all completed, from pool 
13:49:41.993 INFO  MemoryStore - Block broadcast_1458 stored as values in memory (estimated size 163.7 KiB, free 1897.9 MiB)
13:49:41.994 INFO  MemoryStore - Block broadcast_1458_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1897.8 MiB)
13:49:41.994 INFO  BlockManagerInfo - Added broadcast_1458_piece0 in memory on localhost:43843 (size: 73.9 KiB, free: 1918.5 MiB)
13:49:41.995 INFO  SparkContext - Created broadcast 1458 from broadcast at DAGScheduler.scala:1580
13:49:41.995 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 765 (MapPartitionsRDD[3730] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
13:49:41.995 INFO  TaskSchedulerImpl - Adding task set 765.0 with 1 tasks resource profile 0
13:49:41.995 INFO  TaskSetManager - Starting task 0.0 in stage 765.0 (TID 703) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:41.996 INFO  Executor - Running task 0.0 in stage 765.0 (TID 703)
13:49:42.002 INFO  ShuffleBlockFetcherIterator - Getting 1 (405.0 B) non-empty blocks including 1 (405.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.002 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.005 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:42.005 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:42.005 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:42.005 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:42.005 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:42.005 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:42.025 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271349415642730059884177934_3730_r_000000_0' to file:/tmp/local15243704403060221796/markdups9713771435829449223.bam.parts/_temporary/0/task_202505271349415642730059884177934_3730_r_000000
13:49:42.025 INFO  SparkHadoopMapRedUtil - attempt_202505271349415642730059884177934_3730_r_000000_0: Committed. Elapsed time: 0 ms.
13:49:42.026 INFO  Executor - Finished task 0.0 in stage 765.0 (TID 703). 1858 bytes result sent to driver
13:49:42.026 INFO  TaskSetManager - Finished task 0.0 in stage 765.0 (TID 703) in 31 ms on localhost (executor driver) (1/1)
13:49:42.026 INFO  TaskSchedulerImpl - Removed TaskSet 765.0, whose tasks have all completed, from pool 
13:49:42.026 INFO  DAGScheduler - ResultStage 765 (runJob at SparkHadoopWriter.scala:83) finished in 0.040 s
13:49:42.026 INFO  DAGScheduler - Job 445 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:42.026 INFO  TaskSchedulerImpl - Killing all running tasks in stage 765: Stage finished
13:49:42.027 INFO  DAGScheduler - Job 445 finished: runJob at SparkHadoopWriter.scala:83, took 0.052137 s
13:49:42.027 INFO  SparkHadoopWriter - Start to commit write Job job_202505271349415642730059884177934_3730.
13:49:42.032 INFO  SparkHadoopWriter - Write Job job_202505271349415642730059884177934_3730 committed. Elapsed time: 4 ms.
13:49:42.042 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local15243704403060221796/markdups9713771435829449223.bam
13:49:42.046 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local15243704403060221796/markdups9713771435829449223.bam done
13:49:42.046 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local15243704403060221796/markdups9713771435829449223.bam.parts/ to /tmp/local15243704403060221796/markdups9713771435829449223.bam.sbi
13:49:42.050 INFO  IndexFileMerger - Done merging .sbi files
13:49:42.050 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local15243704403060221796/markdups9713771435829449223.bam.parts/ to /tmp/local15243704403060221796/markdups9713771435829449223.bam.bai
13:49:42.055 INFO  IndexFileMerger - Done merging .bai files
13:49:42.055 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 1:49:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
13:49:42.063 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:42.063 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
13:49:42.063 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
13:49:42.063 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
13:49:42.063 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
13:49:42.064 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 1:49:42 PM UTC
13:49:42.064 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:42.064 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:49:42.064 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
13:49:42.064 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
13:49:42.064 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
13:49:42.064 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
13:49:42.064 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
13:49:42.064 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
13:49:42.064 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
13:49:42.064 INFO  MarkDuplicatesSpark - Requester pays: disabled
13:49:42.064 INFO  MarkDuplicatesSpark - Initializing engine
13:49:42.064 INFO  MarkDuplicatesSpark - Done initializing engine
13:49:42.064 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
13:49:42.066 INFO  MemoryStore - Block broadcast_1459 stored as values in memory (estimated size 306.3 KiB, free 1897.5 MiB)
13:49:42.073 INFO  MemoryStore - Block broadcast_1459_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1897.4 MiB)
13:49:42.073 INFO  BlockManagerInfo - Added broadcast_1459_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.5 MiB)
13:49:42.074 INFO  SparkContext - Created broadcast 1459 from newAPIHadoopFile at PathSplitSource.java:96
13:49:42.104 INFO  MemoryStore - Block broadcast_1460 stored as values in memory (estimated size 306.3 KiB, free 1897.1 MiB)
13:49:42.111 INFO  MemoryStore - Block broadcast_1460_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1897.1 MiB)
13:49:42.111 INFO  BlockManagerInfo - Added broadcast_1460_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.4 MiB)
13:49:42.112 INFO  SparkContext - Created broadcast 1460 from newAPIHadoopFile at PathSplitSource.java:96
13:49:42.144 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
13:49:42.145 INFO  FileInputFormat - Total input files to process : 1
13:49:42.145 INFO  DAGScheduler - Registering RDD 3744 (mapToPair at SparkUtils.java:161) as input to shuffle 105
13:49:42.145 INFO  DAGScheduler - Got job 446 (collect at SparkUtils.java:205) with 1 output partitions
13:49:42.146 INFO  DAGScheduler - Final stage: ResultStage 767 (collect at SparkUtils.java:205)
13:49:42.146 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 766)
13:49:42.146 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 766)
13:49:42.146 INFO  DAGScheduler - Submitting ShuffleMapStage 766 (MapPartitionsRDD[3744] at mapToPair at SparkUtils.java:161), which has no missing parents
13:49:42.166 INFO  MemoryStore - Block broadcast_1461 stored as values in memory (estimated size 460.4 KiB, free 1896.6 MiB)
13:49:42.168 INFO  MemoryStore - Block broadcast_1461_piece0 stored as bytes in memory (estimated size 202.5 KiB, free 1896.4 MiB)
13:49:42.168 INFO  BlockManagerInfo - Added broadcast_1461_piece0 in memory on localhost:43843 (size: 202.5 KiB, free: 1918.2 MiB)
13:49:42.168 INFO  SparkContext - Created broadcast 1461 from broadcast at DAGScheduler.scala:1580
13:49:42.169 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 766 (MapPartitionsRDD[3744] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:49:42.169 INFO  TaskSchedulerImpl - Adding task set 766.0 with 1 tasks resource profile 0
13:49:42.169 INFO  TaskSetManager - Starting task 0.0 in stage 766.0 (TID 704) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9736 bytes) 
13:49:42.169 INFO  Executor - Running task 0.0 in stage 766.0 (TID 704)
13:49:42.199 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/mdOrderBug2.bam:0+1856
13:49:42.204 INFO  Executor - Finished task 0.0 in stage 766.0 (TID 704). 1148 bytes result sent to driver
13:49:42.204 INFO  TaskSetManager - Finished task 0.0 in stage 766.0 (TID 704) in 35 ms on localhost (executor driver) (1/1)
13:49:42.204 INFO  TaskSchedulerImpl - Removed TaskSet 766.0, whose tasks have all completed, from pool 
13:49:42.204 INFO  DAGScheduler - ShuffleMapStage 766 (mapToPair at SparkUtils.java:161) finished in 0.058 s
13:49:42.204 INFO  DAGScheduler - looking for newly runnable stages
13:49:42.204 INFO  DAGScheduler - running: HashSet()
13:49:42.205 INFO  DAGScheduler - waiting: HashSet(ResultStage 767)
13:49:42.205 INFO  DAGScheduler - failed: HashSet()
13:49:42.205 INFO  DAGScheduler - Submitting ResultStage 767 (MapPartitionsRDD[3747] at mapPartitions at SparkUtils.java:188), which has no missing parents
13:49:42.205 INFO  MemoryStore - Block broadcast_1462 stored as values in memory (estimated size 7.4 KiB, free 1896.4 MiB)
13:49:42.206 INFO  MemoryStore - Block broadcast_1462_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1896.4 MiB)
13:49:42.206 INFO  BlockManagerInfo - Added broadcast_1462_piece0 in memory on localhost:43843 (size: 4.1 KiB, free: 1918.2 MiB)
13:49:42.206 INFO  SparkContext - Created broadcast 1462 from broadcast at DAGScheduler.scala:1580
13:49:42.206 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 767 (MapPartitionsRDD[3747] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
13:49:42.206 INFO  TaskSchedulerImpl - Adding task set 767.0 with 1 tasks resource profile 0
13:49:42.207 INFO  TaskSetManager - Starting task 0.0 in stage 767.0 (TID 705) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:42.207 INFO  Executor - Running task 0.0 in stage 767.0 (TID 705)
13:49:42.208 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.209 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.211 INFO  Executor - Finished task 0.0 in stage 767.0 (TID 705). 2495 bytes result sent to driver
13:49:42.211 INFO  TaskSetManager - Finished task 0.0 in stage 767.0 (TID 705) in 4 ms on localhost (executor driver) (1/1)
13:49:42.211 INFO  TaskSchedulerImpl - Removed TaskSet 767.0, whose tasks have all completed, from pool 
13:49:42.211 INFO  DAGScheduler - ResultStage 767 (collect at SparkUtils.java:205) finished in 0.006 s
13:49:42.211 INFO  DAGScheduler - Job 446 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:42.211 INFO  TaskSchedulerImpl - Killing all running tasks in stage 767: Stage finished
13:49:42.212 INFO  DAGScheduler - Job 446 finished: collect at SparkUtils.java:205, took 0.067099 s
13:49:42.216 INFO  MemoryStore - Block broadcast_1463 stored as values in memory (estimated size 1632.0 B, free 1896.4 MiB)
13:49:42.216 INFO  MemoryStore - Block broadcast_1463_piece0 stored as bytes in memory (estimated size 145.0 B, free 1896.4 MiB)
13:49:42.216 INFO  BlockManagerInfo - Added broadcast_1463_piece0 in memory on localhost:43843 (size: 145.0 B, free: 1918.2 MiB)
13:49:42.216 INFO  SparkContext - Created broadcast 1463 from broadcast at MarkDuplicatesSparkUtils.java:126
13:49:42.217 INFO  MemoryStore - Block broadcast_1464 stored as values in memory (estimated size 304.0 B, free 1896.4 MiB)
13:49:42.217 INFO  MemoryStore - Block broadcast_1464_piece0 stored as bytes in memory (estimated size 37.0 B, free 1896.4 MiB)
13:49:42.217 INFO  BlockManagerInfo - Added broadcast_1464_piece0 in memory on localhost:43843 (size: 37.0 B, free: 1918.2 MiB)
13:49:42.217 INFO  SparkContext - Created broadcast 1464 from broadcast at MarkDuplicatesSparkUtils.java:127
13:49:42.229 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
13:49:42.230 INFO  DAGScheduler - Registering RDD 3756 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 108
13:49:42.230 INFO  DAGScheduler - Registering RDD 3760 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 107
13:49:42.230 INFO  DAGScheduler - Registering RDD 3764 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 106
13:49:42.230 INFO  DAGScheduler - Got job 447 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
13:49:42.230 INFO  DAGScheduler - Final stage: ResultStage 772 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
13:49:42.230 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 771)
13:49:42.231 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 771)
13:49:42.231 INFO  DAGScheduler - Submitting ShuffleMapStage 769 (MapPartitionsRDD[3756] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
13:49:42.232 INFO  MemoryStore - Block broadcast_1465 stored as values in memory (estimated size 24.2 KiB, free 1896.4 MiB)
13:49:42.232 INFO  MemoryStore - Block broadcast_1465_piece0 stored as bytes in memory (estimated size 12.6 KiB, free 1896.4 MiB)
13:49:42.232 INFO  BlockManagerInfo - Added broadcast_1465_piece0 in memory on localhost:43843 (size: 12.6 KiB, free: 1918.2 MiB)
13:49:42.233 INFO  SparkContext - Created broadcast 1465 from broadcast at DAGScheduler.scala:1580
13:49:42.233 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 769 (MapPartitionsRDD[3756] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
13:49:42.233 INFO  TaskSchedulerImpl - Adding task set 769.0 with 1 tasks resource profile 0
13:49:42.233 INFO  TaskSetManager - Starting task 0.0 in stage 769.0 (TID 706) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
13:49:42.234 INFO  Executor - Running task 0.0 in stage 769.0 (TID 706)
13:49:42.235 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.235 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.239 INFO  Executor - Finished task 0.0 in stage 769.0 (TID 706). 1922 bytes result sent to driver
13:49:42.240 INFO  TaskSetManager - Finished task 0.0 in stage 769.0 (TID 706) in 7 ms on localhost (executor driver) (1/1)
13:49:42.240 INFO  TaskSchedulerImpl - Removed TaskSet 769.0, whose tasks have all completed, from pool 
13:49:42.240 INFO  DAGScheduler - ShuffleMapStage 769 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.009 s
13:49:42.240 INFO  DAGScheduler - looking for newly runnable stages
13:49:42.240 INFO  DAGScheduler - running: HashSet()
13:49:42.240 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 770, ShuffleMapStage 771, ResultStage 772)
13:49:42.240 INFO  DAGScheduler - failed: HashSet()
13:49:42.241 INFO  DAGScheduler - Submitting ShuffleMapStage 770 (MapPartitionsRDD[3760] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
13:49:42.241 INFO  MemoryStore - Block broadcast_1466 stored as values in memory (estimated size 28.3 KiB, free 1896.3 MiB)
13:49:42.242 INFO  MemoryStore - Block broadcast_1466_piece0 stored as bytes in memory (estimated size 14.8 KiB, free 1896.3 MiB)
13:49:42.242 INFO  BlockManagerInfo - Added broadcast_1466_piece0 in memory on localhost:43843 (size: 14.8 KiB, free: 1918.2 MiB)
13:49:42.242 INFO  SparkContext - Created broadcast 1466 from broadcast at DAGScheduler.scala:1580
13:49:42.243 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 770 (MapPartitionsRDD[3760] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
13:49:42.243 INFO  TaskSchedulerImpl - Adding task set 770.0 with 1 tasks resource profile 0
13:49:42.243 INFO  TaskSetManager - Starting task 0.0 in stage 770.0 (TID 707) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
13:49:42.243 INFO  Executor - Running task 0.0 in stage 770.0 (TID 707)
13:49:42.245 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.245 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.249 INFO  Executor - Finished task 0.0 in stage 770.0 (TID 707). 1922 bytes result sent to driver
13:49:42.250 INFO  TaskSetManager - Finished task 0.0 in stage 770.0 (TID 707) in 7 ms on localhost (executor driver) (1/1)
13:49:42.250 INFO  TaskSchedulerImpl - Removed TaskSet 770.0, whose tasks have all completed, from pool 
13:49:42.250 INFO  DAGScheduler - ShuffleMapStage 770 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.009 s
13:49:42.250 INFO  DAGScheduler - looking for newly runnable stages
13:49:42.250 INFO  DAGScheduler - running: HashSet()
13:49:42.250 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 771, ResultStage 772)
13:49:42.250 INFO  DAGScheduler - failed: HashSet()
13:49:42.250 INFO  DAGScheduler - Submitting ShuffleMapStage 771 (MapPartitionsRDD[3764] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
13:49:42.251 INFO  MemoryStore - Block broadcast_1467 stored as values in memory (estimated size 19.0 KiB, free 1896.3 MiB)
13:49:42.251 INFO  MemoryStore - Block broadcast_1467_piece0 stored as bytes in memory (estimated size 9.7 KiB, free 1896.3 MiB)
13:49:42.252 INFO  BlockManagerInfo - Added broadcast_1467_piece0 in memory on localhost:43843 (size: 9.7 KiB, free: 1918.2 MiB)
13:49:42.252 INFO  SparkContext - Created broadcast 1467 from broadcast at DAGScheduler.scala:1580
13:49:42.252 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 771 (MapPartitionsRDD[3764] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
13:49:42.252 INFO  TaskSchedulerImpl - Adding task set 771.0 with 1 tasks resource profile 0
13:49:42.252 INFO  TaskSetManager - Starting task 0.0 in stage 771.0 (TID 708) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:49:42.253 INFO  Executor - Running task 0.0 in stage 771.0 (TID 708)
13:49:42.255 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.255 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.257 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.258 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.259 INFO  Executor - Finished task 0.0 in stage 771.0 (TID 708). 1922 bytes result sent to driver
13:49:42.259 INFO  TaskSetManager - Finished task 0.0 in stage 771.0 (TID 708) in 7 ms on localhost (executor driver) (1/1)
13:49:42.260 INFO  TaskSchedulerImpl - Removed TaskSet 771.0, whose tasks have all completed, from pool 
13:49:42.260 INFO  DAGScheduler - ShuffleMapStage 771 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.009 s
13:49:42.260 INFO  DAGScheduler - looking for newly runnable stages
13:49:42.260 INFO  DAGScheduler - running: HashSet()
13:49:42.260 INFO  DAGScheduler - waiting: HashSet(ResultStage 772)
13:49:42.260 INFO  DAGScheduler - failed: HashSet()
13:49:42.260 INFO  DAGScheduler - Submitting ResultStage 772 (MapPartitionsRDD[3766] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
13:49:42.261 INFO  MemoryStore - Block broadcast_1468 stored as values in memory (estimated size 20.2 KiB, free 1896.3 MiB)
13:49:42.261 INFO  MemoryStore - Block broadcast_1468_piece0 stored as bytes in memory (estimated size 10.6 KiB, free 1896.3 MiB)
13:49:42.261 INFO  BlockManagerInfo - Added broadcast_1468_piece0 in memory on localhost:43843 (size: 10.6 KiB, free: 1918.2 MiB)
13:49:42.261 INFO  SparkContext - Created broadcast 1468 from broadcast at DAGScheduler.scala:1580
13:49:42.262 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 772 (MapPartitionsRDD[3766] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
13:49:42.262 INFO  TaskSchedulerImpl - Adding task set 772.0 with 1 tasks resource profile 0
13:49:42.262 INFO  TaskSetManager - Starting task 0.0 in stage 772.0 (TID 709) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:42.262 INFO  Executor - Running task 0.0 in stage 772.0 (TID 709)
13:49:42.264 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.264 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.266 INFO  Executor - Finished task 0.0 in stage 772.0 (TID 709). 2053 bytes result sent to driver
13:49:42.266 INFO  TaskSetManager - Finished task 0.0 in stage 772.0 (TID 709) in 4 ms on localhost (executor driver) (1/1)
13:49:42.266 INFO  TaskSchedulerImpl - Removed TaskSet 772.0, whose tasks have all completed, from pool 
13:49:42.266 INFO  DAGScheduler - ResultStage 772 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.006 s
13:49:42.266 INFO  DAGScheduler - Job 447 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:42.266 INFO  TaskSchedulerImpl - Killing all running tasks in stage 772: Stage finished
13:49:42.266 INFO  DAGScheduler - Job 447 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.037004 s
13:49:42.277 INFO  MemoryStore - Block broadcast_1469 stored as values in memory (estimated size 20.3 KiB, free 1896.2 MiB)
13:49:42.278 INFO  MemoryStore - Block broadcast_1469_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1896.2 MiB)
13:49:42.278 INFO  BlockManagerInfo - Added broadcast_1469_piece0 in memory on localhost:43843 (size: 1850.0 B, free: 1918.2 MiB)
13:49:42.278 INFO  SparkContext - Created broadcast 1469 from broadcast at ReadsSparkSink.java:133
13:49:42.279 INFO  MemoryStore - Block broadcast_1470 stored as values in memory (estimated size 20.4 KiB, free 1896.2 MiB)
13:49:42.279 INFO  MemoryStore - Block broadcast_1470_piece0 stored as bytes in memory (estimated size 1850.0 B, free 1896.2 MiB)
13:49:42.279 INFO  BlockManagerInfo - Added broadcast_1470_piece0 in memory on localhost:43843 (size: 1850.0 B, free: 1918.2 MiB)
13:49:42.279 INFO  SparkContext - Created broadcast 1470 from broadcast at BamSink.java:76
13:49:42.281 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:42.281 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:42.281 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:42.298 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
13:49:42.298 INFO  DAGScheduler - Registering RDD 3768 (mapToPair at SparkUtils.java:161) as input to shuffle 109
13:49:42.299 INFO  DAGScheduler - Got job 448 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
13:49:42.299 INFO  DAGScheduler - Final stage: ResultStage 777 (runJob at SparkHadoopWriter.scala:83)
13:49:42.299 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 776)
13:49:42.299 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 776)
13:49:42.299 INFO  DAGScheduler - Submitting ShuffleMapStage 776 (MapPartitionsRDD[3768] at mapToPair at SparkUtils.java:161), which has no missing parents
13:49:42.300 INFO  MemoryStore - Block broadcast_1471 stored as values in memory (estimated size 18.1 KiB, free 1896.2 MiB)
13:49:42.300 INFO  MemoryStore - Block broadcast_1471_piece0 stored as bytes in memory (estimated size 9.5 KiB, free 1896.2 MiB)
13:49:42.301 INFO  BlockManagerInfo - Added broadcast_1471_piece0 in memory on localhost:43843 (size: 9.5 KiB, free: 1918.2 MiB)
13:49:42.301 INFO  SparkContext - Created broadcast 1471 from broadcast at DAGScheduler.scala:1580
13:49:42.301 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 776 (MapPartitionsRDD[3768] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:49:42.301 INFO  TaskSchedulerImpl - Adding task set 776.0 with 1 tasks resource profile 0
13:49:42.302 INFO  TaskSetManager - Starting task 0.0 in stage 776.0 (TID 710) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:49:42.302 INFO  Executor - Running task 0.0 in stage 776.0 (TID 710)
13:49:42.303 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.303 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.305 INFO  ShuffleBlockFetcherIterator - Getting 1 (117.0 B) non-empty blocks including 1 (117.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.305 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.307 INFO  Executor - Finished task 0.0 in stage 776.0 (TID 710). 1922 bytes result sent to driver
13:49:42.307 INFO  TaskSetManager - Finished task 0.0 in stage 776.0 (TID 710) in 6 ms on localhost (executor driver) (1/1)
13:49:42.307 INFO  TaskSchedulerImpl - Removed TaskSet 776.0, whose tasks have all completed, from pool 
13:49:42.307 INFO  DAGScheduler - ShuffleMapStage 776 (mapToPair at SparkUtils.java:161) finished in 0.008 s
13:49:42.307 INFO  DAGScheduler - looking for newly runnable stages
13:49:42.307 INFO  DAGScheduler - running: HashSet()
13:49:42.307 INFO  DAGScheduler - waiting: HashSet(ResultStage 777)
13:49:42.307 INFO  DAGScheduler - failed: HashSet()
13:49:42.308 INFO  DAGScheduler - Submitting ResultStage 777 (MapPartitionsRDD[3773] at mapToPair at BamSink.java:91), which has no missing parents
13:49:42.315 INFO  MemoryStore - Block broadcast_1472 stored as values in memory (estimated size 163.7 KiB, free 1896.0 MiB)
13:49:42.316 INFO  MemoryStore - Block broadcast_1472_piece0 stored as bytes in memory (estimated size 73.9 KiB, free 1896.0 MiB)
13:49:42.316 INFO  BlockManagerInfo - Added broadcast_1472_piece0 in memory on localhost:43843 (size: 73.9 KiB, free: 1918.1 MiB)
13:49:42.316 INFO  SparkContext - Created broadcast 1472 from broadcast at DAGScheduler.scala:1580
13:49:42.316 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 777 (MapPartitionsRDD[3773] at mapToPair at BamSink.java:91) (first 15 tasks are for partitions Vector(0))
13:49:42.316 INFO  TaskSchedulerImpl - Adding task set 777.0 with 1 tasks resource profile 0
13:49:42.317 INFO  TaskSetManager - Starting task 0.0 in stage 777.0 (TID 711) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:49:42.317 INFO  Executor - Running task 0.0 in stage 777.0 (TID 711)
13:49:42.321 INFO  ShuffleBlockFetcherIterator - Getting 1 (868.0 B) non-empty blocks including 1 (868.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:49:42.321 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:49:42.324 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:42.324 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:42.324 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:42.324 INFO  PathOutputCommitterFactory - No output committer factory defined, defaulting to FileOutputCommitterFactory
13:49:42.324 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:49:42.324 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:49:42.343 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271349429171202871234177313_3773_r_000000_0' to file:/tmp/local15243704403060221796/markdups1368780207861754654.bam.parts/_temporary/0/task_202505271349429171202871234177313_3773_r_000000
13:49:42.343 INFO  SparkHadoopMapRedUtil - attempt_202505271349429171202871234177313_3773_r_000000_0: Committed. Elapsed time: 0 ms.
13:49:42.343 INFO  Executor - Finished task 0.0 in stage 777.0 (TID 711). 1858 bytes result sent to driver
13:49:42.344 INFO  TaskSetManager - Finished task 0.0 in stage 777.0 (TID 711) in 26 ms on localhost (executor driver) (1/1)
13:49:42.344 INFO  TaskSchedulerImpl - Removed TaskSet 777.0, whose tasks have all completed, from pool 
13:49:42.344 INFO  DAGScheduler - ResultStage 777 (runJob at SparkHadoopWriter.scala:83) finished in 0.036 s
13:49:42.344 INFO  DAGScheduler - Job 448 is finished. Cancelling potential speculative or zombie tasks for this job
13:49:42.344 INFO  TaskSchedulerImpl - Killing all running tasks in stage 777: Stage finished
13:49:42.344 INFO  DAGScheduler - Job 448 finished: runJob at SparkHadoopWriter.scala:83, took 0.045951 s
13:49:42.344 INFO  SparkHadoopWriter - Start to commit write Job job_202505271349429171202871234177313_3773.
13:49:42.348 INFO  SparkHadoopWriter - Write Job job_202505271349429171202871234177313_3773 committed. Elapsed time: 4 ms.
13:49:42.358 INFO  HadoopFileSystemWrapper - Concatenating 3 parts to /tmp/local15243704403060221796/markdups1368780207861754654.bam
13:49:42.362 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/local15243704403060221796/markdups1368780207861754654.bam done
13:49:42.362 INFO  IndexFileMerger - Merging .sbi files in temp directory /tmp/local15243704403060221796/markdups1368780207861754654.bam.parts/ to /tmp/local15243704403060221796/markdups1368780207861754654.bam.sbi
13:49:42.367 INFO  IndexFileMerger - Done merging .sbi files
13:49:42.367 INFO  IndexFileMerger - Merging .bai files in temp directory /tmp/local15243704403060221796/markdups1368780207861754654.bam.parts/ to /tmp/local15243704403060221796/markdups1368780207861754654.bam.bai
13:49:42.371 INFO  IndexFileMerger - Done merging .bai files
13:49:42.371 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 1:49:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:42 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:43 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:44 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:45 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:46 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:47 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:48 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:50 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:51 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:52 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:53 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:54 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:55 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:56 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:57 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:58 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:49:59 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:00 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:01 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:02 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.02 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:03 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:04 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.01 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:06 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.03 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
13:50:07.231 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.231 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
13:50:07.231 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
13:50:07.231 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
13:50:07.231 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
13:50:07.231 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 1:50:07 PM UTC
13:50:07.231 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.231 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.231 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
13:50:07.231 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
13:50:07.231 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
13:50:07.231 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
13:50:07.231 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
13:50:07.231 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
13:50:07.231 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
13:50:07.231 INFO  MarkDuplicatesSpark - Requester pays: disabled
13:50:07.231 INFO  MarkDuplicatesSpark - Initializing engine
13:50:07.231 INFO  MarkDuplicatesSpark - Done initializing engine
13:50:07.231 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
13:50:07.234 INFO  MemoryStore - Block broadcast_2049 stored as values in memory (estimated size 306.3 KiB, free 1912.6 MiB)
13:50:07.240 INFO  MemoryStore - Block broadcast_2049_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1912.5 MiB)
13:50:07.241 INFO  BlockManagerInfo - Added broadcast_2049_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1919.2 MiB)
13:50:07.241 INFO  SparkContext - Created broadcast 2049 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.262 INFO  MemoryStore - Block broadcast_2050 stored as values in memory (estimated size 306.3 KiB, free 1912.2 MiB)
13:50:07.268 INFO  MemoryStore - Block broadcast_2050_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1912.1 MiB)
13:50:07.269 INFO  BlockManagerInfo - Added broadcast_2050_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1919.1 MiB)
13:50:07.269 INFO  SparkContext - Created broadcast 2050 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.302 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
13:50:07.303 INFO  FileInputFormat - Total input files to process : 1
13:50:07.303 INFO  DAGScheduler - Registering RDD 5665 (mapToPair at SparkUtils.java:161) as input to shuffle 300
13:50:07.303 INFO  DAGScheduler - Got job 579 (collect at SparkUtils.java:205) with 1 output partitions
13:50:07.303 INFO  DAGScheduler - Final stage: ResultStage 1239 (collect at SparkUtils.java:205)
13:50:07.303 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1238)
13:50:07.304 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1238)
13:50:07.304 INFO  DAGScheduler - Submitting ShuffleMapStage 1238 (MapPartitionsRDD[5665] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.321 INFO  MemoryStore - Block broadcast_2051 stored as values in memory (estimated size 493.8 KiB, free 1911.7 MiB)
13:50:07.323 INFO  MemoryStore - Block broadcast_2051_piece0 stored as bytes in memory (estimated size 210.1 KiB, free 1911.5 MiB)
13:50:07.323 INFO  BlockManagerInfo - Added broadcast_2051_piece0 in memory on localhost:43843 (size: 210.1 KiB, free: 1918.9 MiB)
13:50:07.324 INFO  SparkContext - Created broadcast 2051 from broadcast at DAGScheduler.scala:1580
13:50:07.324 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1238 (MapPartitionsRDD[5665] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.324 INFO  TaskSchedulerImpl - Adding task set 1238.0 with 1 tasks resource profile 0
13:50:07.324 INFO  TaskSetManager - Starting task 0.0 in stage 1238.0 (TID 1082) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9758 bytes) 
13:50:07.324 INFO  Executor - Running task 0.0 in stage 1238.0 (TID 1082)
13:50:07.354 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/example.chr1.1-1K.unmarkedDups.bam:0+13412
13:50:07.359 INFO  Executor - Finished task 0.0 in stage 1238.0 (TID 1082). 1148 bytes result sent to driver
13:50:07.359 INFO  TaskSetManager - Finished task 0.0 in stage 1238.0 (TID 1082) in 35 ms on localhost (executor driver) (1/1)
13:50:07.359 INFO  TaskSchedulerImpl - Removed TaskSet 1238.0, whose tasks have all completed, from pool 
13:50:07.360 INFO  DAGScheduler - ShuffleMapStage 1238 (mapToPair at SparkUtils.java:161) finished in 0.056 s
13:50:07.360 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.360 INFO  DAGScheduler - running: HashSet()
13:50:07.360 INFO  DAGScheduler - waiting: HashSet(ResultStage 1239)
13:50:07.360 INFO  DAGScheduler - failed: HashSet()
13:50:07.360 INFO  DAGScheduler - Submitting ResultStage 1239 (MapPartitionsRDD[5668] at mapPartitions at SparkUtils.java:188), which has no missing parents
13:50:07.361 INFO  MemoryStore - Block broadcast_2052 stored as values in memory (estimated size 7.4 KiB, free 1911.4 MiB)
13:50:07.361 INFO  MemoryStore - Block broadcast_2052_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1911.4 MiB)
13:50:07.361 INFO  BlockManagerInfo - Added broadcast_2052_piece0 in memory on localhost:43843 (size: 4.1 KiB, free: 1918.9 MiB)
13:50:07.361 INFO  SparkContext - Created broadcast 2052 from broadcast at DAGScheduler.scala:1580
13:50:07.361 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1239 (MapPartitionsRDD[5668] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
13:50:07.362 INFO  TaskSchedulerImpl - Adding task set 1239.0 with 1 tasks resource profile 0
13:50:07.362 INFO  TaskSetManager - Starting task 0.0 in stage 1239.0 (TID 1083) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.362 INFO  Executor - Running task 0.0 in stage 1239.0 (TID 1083)
13:50:07.363 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.363 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.365 INFO  Executor - Finished task 0.0 in stage 1239.0 (TID 1083). 2329 bytes result sent to driver
13:50:07.366 INFO  TaskSetManager - Finished task 0.0 in stage 1239.0 (TID 1083) in 4 ms on localhost (executor driver) (1/1)
13:50:07.366 INFO  TaskSchedulerImpl - Removed TaskSet 1239.0, whose tasks have all completed, from pool 
13:50:07.366 INFO  DAGScheduler - ResultStage 1239 (collect at SparkUtils.java:205) finished in 0.006 s
13:50:07.366 INFO  DAGScheduler - Job 579 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.366 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1239: Stage finished
13:50:07.366 INFO  DAGScheduler - Job 579 finished: collect at SparkUtils.java:205, took 0.063544 s
13:50:07.370 INFO  MemoryStore - Block broadcast_2053 stored as values in memory (estimated size 7.8 KiB, free 1911.4 MiB)
13:50:07.370 INFO  MemoryStore - Block broadcast_2053_piece0 stored as bytes in memory (estimated size 540.0 B, free 1911.4 MiB)
13:50:07.371 INFO  BlockManagerInfo - Added broadcast_2053_piece0 in memory on localhost:43843 (size: 540.0 B, free: 1918.9 MiB)
13:50:07.371 INFO  SparkContext - Created broadcast 2053 from broadcast at MarkDuplicatesSparkUtils.java:126
13:50:07.371 INFO  MemoryStore - Block broadcast_2054 stored as values in memory (estimated size 2.6 KiB, free 1911.4 MiB)
13:50:07.371 INFO  MemoryStore - Block broadcast_2054_piece0 stored as bytes in memory (estimated size 209.0 B, free 1911.4 MiB)
13:50:07.371 INFO  BlockManagerInfo - Added broadcast_2054_piece0 in memory on localhost:43843 (size: 209.0 B, free: 1918.9 MiB)
13:50:07.372 INFO  SparkContext - Created broadcast 2054 from broadcast at MarkDuplicatesSparkUtils.java:127
13:50:07.385 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
13:50:07.385 INFO  DAGScheduler - Registering RDD 5677 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 303
13:50:07.385 INFO  DAGScheduler - Registering RDD 5681 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 302
13:50:07.386 INFO  DAGScheduler - Registering RDD 5685 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 301
13:50:07.386 INFO  DAGScheduler - Got job 580 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
13:50:07.386 INFO  DAGScheduler - Final stage: ResultStage 1244 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
13:50:07.386 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1243)
13:50:07.386 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1243)
13:50:07.386 INFO  DAGScheduler - Submitting ShuffleMapStage 1241 (MapPartitionsRDD[5677] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
13:50:07.387 INFO  MemoryStore - Block broadcast_2055 stored as values in memory (estimated size 57.6 KiB, free 1911.4 MiB)
13:50:07.387 INFO  MemoryStore - Block broadcast_2055_piece0 stored as bytes in memory (estimated size 19.8 KiB, free 1911.4 MiB)
13:50:07.387 INFO  BlockManagerInfo - Added broadcast_2055_piece0 in memory on localhost:43843 (size: 19.8 KiB, free: 1918.9 MiB)
13:50:07.388 INFO  SparkContext - Created broadcast 2055 from broadcast at DAGScheduler.scala:1580
13:50:07.388 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1241 (MapPartitionsRDD[5677] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
13:50:07.388 INFO  TaskSchedulerImpl - Adding task set 1241.0 with 1 tasks resource profile 0
13:50:07.388 INFO  TaskSetManager - Starting task 0.0 in stage 1241.0 (TID 1084) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
13:50:07.388 INFO  Executor - Running task 0.0 in stage 1241.0 (TID 1084)
13:50:07.390 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.390 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.394 INFO  Executor - Finished task 0.0 in stage 1241.0 (TID 1084). 1922 bytes result sent to driver
13:50:07.394 INFO  TaskSetManager - Finished task 0.0 in stage 1241.0 (TID 1084) in 6 ms on localhost (executor driver) (1/1)
13:50:07.394 INFO  TaskSchedulerImpl - Removed TaskSet 1241.0, whose tasks have all completed, from pool 
13:50:07.394 INFO  DAGScheduler - ShuffleMapStage 1241 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.008 s
13:50:07.394 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.394 INFO  DAGScheduler - running: HashSet()
13:50:07.394 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1242, ShuffleMapStage 1243, ResultStage 1244)
13:50:07.394 INFO  DAGScheduler - failed: HashSet()
13:50:07.394 INFO  DAGScheduler - Submitting ShuffleMapStage 1242 (MapPartitionsRDD[5681] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
13:50:07.395 INFO  MemoryStore - Block broadcast_2056 stored as values in memory (estimated size 61.7 KiB, free 1911.3 MiB)
13:50:07.396 INFO  MemoryStore - Block broadcast_2056_piece0 stored as bytes in memory (estimated size 22.0 KiB, free 1911.3 MiB)
13:50:07.396 INFO  BlockManagerInfo - Added broadcast_2056_piece0 in memory on localhost:43843 (size: 22.0 KiB, free: 1918.9 MiB)
13:50:07.396 INFO  SparkContext - Created broadcast 2056 from broadcast at DAGScheduler.scala:1580
13:50:07.396 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1242 (MapPartitionsRDD[5681] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
13:50:07.397 INFO  TaskSchedulerImpl - Adding task set 1242.0 with 1 tasks resource profile 0
13:50:07.397 INFO  TaskSetManager - Starting task 0.0 in stage 1242.0 (TID 1085) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
13:50:07.397 INFO  Executor - Running task 0.0 in stage 1242.0 (TID 1085)
13:50:07.399 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.0 KiB) non-empty blocks including 1 (2.0 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.399 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.404 INFO  Executor - Finished task 0.0 in stage 1242.0 (TID 1085). 1922 bytes result sent to driver
13:50:07.404 INFO  TaskSetManager - Finished task 0.0 in stage 1242.0 (TID 1085) in 7 ms on localhost (executor driver) (1/1)
13:50:07.404 INFO  TaskSchedulerImpl - Removed TaskSet 1242.0, whose tasks have all completed, from pool 
13:50:07.404 INFO  DAGScheduler - ShuffleMapStage 1242 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.009 s
13:50:07.404 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.405 INFO  DAGScheduler - running: HashSet()
13:50:07.405 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1243, ResultStage 1244)
13:50:07.405 INFO  DAGScheduler - failed: HashSet()
13:50:07.405 INFO  DAGScheduler - Submitting ShuffleMapStage 1243 (MapPartitionsRDD[5685] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
13:50:07.406 INFO  MemoryStore - Block broadcast_2057 stored as values in memory (estimated size 52.4 KiB, free 1911.2 MiB)
13:50:07.406 INFO  MemoryStore - Block broadcast_2057_piece0 stored as bytes in memory (estimated size 17.3 KiB, free 1911.2 MiB)
13:50:07.406 INFO  BlockManagerInfo - Added broadcast_2057_piece0 in memory on localhost:43843 (size: 17.3 KiB, free: 1918.9 MiB)
13:50:07.406 INFO  SparkContext - Created broadcast 2057 from broadcast at DAGScheduler.scala:1580
13:50:07.406 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1243 (MapPartitionsRDD[5685] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
13:50:07.406 INFO  TaskSchedulerImpl - Adding task set 1243.0 with 1 tasks resource profile 0
13:50:07.407 INFO  TaskSetManager - Starting task 0.0 in stage 1243.0 (TID 1086) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.407 INFO  Executor - Running task 0.0 in stage 1243.0 (TID 1086)
13:50:07.408 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.408 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.411 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.411 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.412 INFO  Executor - Finished task 0.0 in stage 1243.0 (TID 1086). 1922 bytes result sent to driver
13:50:07.412 INFO  TaskSetManager - Finished task 0.0 in stage 1243.0 (TID 1086) in 5 ms on localhost (executor driver) (1/1)
13:50:07.412 INFO  TaskSchedulerImpl - Removed TaskSet 1243.0, whose tasks have all completed, from pool 
13:50:07.412 INFO  DAGScheduler - ShuffleMapStage 1243 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.007 s
13:50:07.412 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.412 INFO  DAGScheduler - running: HashSet()
13:50:07.412 INFO  DAGScheduler - waiting: HashSet(ResultStage 1244)
13:50:07.413 INFO  DAGScheduler - failed: HashSet()
13:50:07.413 INFO  DAGScheduler - Submitting ResultStage 1244 (MapPartitionsRDD[5687] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
13:50:07.414 INFO  MemoryStore - Block broadcast_2058 stored as values in memory (estimated size 53.6 KiB, free 1911.2 MiB)
13:50:07.414 INFO  MemoryStore - Block broadcast_2058_piece0 stored as bytes in memory (estimated size 18.0 KiB, free 1911.1 MiB)
13:50:07.414 INFO  BlockManagerInfo - Added broadcast_2058_piece0 in memory on localhost:43843 (size: 18.0 KiB, free: 1918.8 MiB)
13:50:07.414 INFO  SparkContext - Created broadcast 2058 from broadcast at DAGScheduler.scala:1580
13:50:07.414 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1244 (MapPartitionsRDD[5687] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
13:50:07.414 INFO  TaskSchedulerImpl - Adding task set 1244.0 with 1 tasks resource profile 0
13:50:07.415 INFO  TaskSetManager - Starting task 0.0 in stage 1244.0 (TID 1087) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.415 INFO  Executor - Running task 0.0 in stage 1244.0 (TID 1087)
13:50:07.416 INFO  ShuffleBlockFetcherIterator - Getting 1 (251.0 B) non-empty blocks including 1 (251.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.416 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.417 INFO  Executor - Finished task 0.0 in stage 1244.0 (TID 1087). 2371 bytes result sent to driver
13:50:07.418 INFO  TaskSetManager - Finished task 0.0 in stage 1244.0 (TID 1087) in 3 ms on localhost (executor driver) (1/1)
13:50:07.418 INFO  TaskSchedulerImpl - Removed TaskSet 1244.0, whose tasks have all completed, from pool 
13:50:07.418 INFO  DAGScheduler - ResultStage 1244 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.005 s
13:50:07.418 INFO  DAGScheduler - Job 580 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.418 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1244: Stage finished
13:50:07.418 INFO  DAGScheduler - Job 580 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.033038 s
13:50:07.429 INFO  MemoryStore - Block broadcast_2059 stored as values in memory (estimated size 179.4 KiB, free 1911.0 MiB)
13:50:07.430 INFO  MemoryStore - Block broadcast_2059_piece0 stored as bytes in memory (estimated size 7.9 KiB, free 1911.0 MiB)
13:50:07.430 INFO  BlockManagerInfo - Added broadcast_2059_piece0 in memory on localhost:43843 (size: 7.9 KiB, free: 1918.8 MiB)
13:50:07.430 INFO  SparkContext - Created broadcast 2059 from broadcast at ReadsSparkSink.java:133
13:50:07.433 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.433 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.433 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.449 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
13:50:07.450 INFO  DAGScheduler - Registering RDD 5689 (mapToPair at SparkUtils.java:161) as input to shuffle 304
13:50:07.450 INFO  DAGScheduler - Got job 581 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
13:50:07.450 INFO  DAGScheduler - Final stage: ResultStage 1249 (runJob at SparkHadoopWriter.scala:83)
13:50:07.450 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1248)
13:50:07.450 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1248)
13:50:07.450 INFO  DAGScheduler - Submitting ShuffleMapStage 1248 (MapPartitionsRDD[5689] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.451 INFO  MemoryStore - Block broadcast_2060 stored as values in memory (estimated size 51.5 KiB, free 1910.9 MiB)
13:50:07.451 INFO  MemoryStore - Block broadcast_2060_piece0 stored as bytes in memory (estimated size 16.8 KiB, free 1910.9 MiB)
13:50:07.452 INFO  BlockManagerInfo - Added broadcast_2060_piece0 in memory on localhost:43843 (size: 16.8 KiB, free: 1918.8 MiB)
13:50:07.452 INFO  SparkContext - Created broadcast 2060 from broadcast at DAGScheduler.scala:1580
13:50:07.452 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1248 (MapPartitionsRDD[5689] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.452 INFO  TaskSchedulerImpl - Adding task set 1248.0 with 1 tasks resource profile 0
13:50:07.452 INFO  TaskSetManager - Starting task 0.0 in stage 1248.0 (TID 1088) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.452 INFO  Executor - Running task 0.0 in stage 1248.0 (TID 1088)
13:50:07.454 INFO  ShuffleBlockFetcherIterator - Getting 1 (16.3 KiB) non-empty blocks including 1 (16.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.454 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.457 INFO  ShuffleBlockFetcherIterator - Getting 1 (2.7 KiB) non-empty blocks including 1 (2.7 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.457 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.458 INFO  Executor - Finished task 0.0 in stage 1248.0 (TID 1088). 1922 bytes result sent to driver
13:50:07.458 INFO  TaskSetManager - Finished task 0.0 in stage 1248.0 (TID 1088) in 6 ms on localhost (executor driver) (1/1)
13:50:07.458 INFO  TaskSchedulerImpl - Removed TaskSet 1248.0, whose tasks have all completed, from pool 
13:50:07.459 INFO  DAGScheduler - ShuffleMapStage 1248 (mapToPair at SparkUtils.java:161) finished in 0.009 s
13:50:07.459 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.459 INFO  DAGScheduler - running: HashSet()
13:50:07.459 INFO  DAGScheduler - waiting: HashSet(ResultStage 1249)
13:50:07.459 INFO  DAGScheduler - failed: HashSet()
13:50:07.459 INFO  DAGScheduler - Submitting ResultStage 1249 (MapPartitionsRDD[5695] at saveAsTextFile at SamSink.java:65), which has no missing parents
13:50:07.465 INFO  MemoryStore - Block broadcast_2061 stored as values in memory (estimated size 196.7 KiB, free 1910.7 MiB)
13:50:07.467 INFO  MemoryStore - Block broadcast_2061_piece0 stored as bytes in memory (estimated size 80.9 KiB, free 1910.6 MiB)
13:50:07.467 INFO  BlockManagerInfo - Added broadcast_2061_piece0 in memory on localhost:43843 (size: 80.9 KiB, free: 1918.7 MiB)
13:50:07.467 INFO  SparkContext - Created broadcast 2061 from broadcast at DAGScheduler.scala:1580
13:50:07.467 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1249 (MapPartitionsRDD[5695] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
13:50:07.467 INFO  TaskSchedulerImpl - Adding task set 1249.0 with 1 tasks resource profile 0
13:50:07.467 INFO  TaskSetManager - Starting task 0.0 in stage 1249.0 (TID 1089) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.467 INFO  Executor - Running task 0.0 in stage 1249.0 (TID 1089)
13:50:07.471 INFO  ShuffleBlockFetcherIterator - Getting 1 (14.8 KiB) non-empty blocks including 1 (14.8 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.471 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.473 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.473 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.473 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.484 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271350076386563487132680748_5695_m_000000_0' to file:/tmp/MarkDups7565437022154810113/MarkDups.sam.parts/_temporary/0/task_202505271350076386563487132680748_5695_m_000000
13:50:07.484 INFO  SparkHadoopMapRedUtil - attempt_202505271350076386563487132680748_5695_m_000000_0: Committed. Elapsed time: 0 ms.
13:50:07.484 INFO  Executor - Finished task 0.0 in stage 1249.0 (TID 1089). 1858 bytes result sent to driver
13:50:07.484 INFO  TaskSetManager - Finished task 0.0 in stage 1249.0 (TID 1089) in 17 ms on localhost (executor driver) (1/1)
13:50:07.484 INFO  TaskSchedulerImpl - Removed TaskSet 1249.0, whose tasks have all completed, from pool 
13:50:07.484 INFO  DAGScheduler - ResultStage 1249 (runJob at SparkHadoopWriter.scala:83) finished in 0.025 s
13:50:07.484 INFO  DAGScheduler - Job 581 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.484 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1249: Stage finished
13:50:07.484 INFO  DAGScheduler - Job 581 finished: runJob at SparkHadoopWriter.scala:83, took 0.035097 s
13:50:07.485 INFO  SparkHadoopWriter - Start to commit write Job job_202505271350076386563487132680748_5695.
13:50:07.489 INFO  SparkHadoopWriter - Write Job job_202505271350076386563487132680748_5695 committed. Elapsed time: 4 ms.
13:50:07.496 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups7565437022154810113/MarkDups.sam
13:50:07.500 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups7565437022154810113/MarkDups.sam done
13:50:07.500 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 1:50:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
13:50:07.505 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.505 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
13:50:07.505 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
13:50:07.505 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
13:50:07.505 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
13:50:07.505 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 1:50:07 PM UTC
13:50:07.505 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.505 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.505 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
13:50:07.505 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
13:50:07.505 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
13:50:07.505 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
13:50:07.506 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
13:50:07.506 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
13:50:07.506 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
13:50:07.506 INFO  MarkDuplicatesSpark - Requester pays: disabled
13:50:07.506 INFO  MarkDuplicatesSpark - Initializing engine
13:50:07.506 INFO  MarkDuplicatesSpark - Done initializing engine
13:50:07.506 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
13:50:07.508 INFO  MemoryStore - Block broadcast_2062 stored as values in memory (estimated size 306.3 KiB, free 1910.3 MiB)
13:50:07.514 INFO  MemoryStore - Block broadcast_2062_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1910.3 MiB)
13:50:07.514 INFO  BlockManagerInfo - Added broadcast_2062_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.7 MiB)
13:50:07.515 INFO  SparkContext - Created broadcast 2062 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.534 INFO  MemoryStore - Block broadcast_2063 stored as values in memory (estimated size 306.3 KiB, free 1910.0 MiB)
13:50:07.541 INFO  MemoryStore - Block broadcast_2063_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1909.9 MiB)
13:50:07.541 INFO  BlockManagerInfo - Added broadcast_2063_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.6 MiB)
13:50:07.541 INFO  SparkContext - Created broadcast 2063 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.573 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
13:50:07.573 INFO  FileInputFormat - Total input files to process : 1
13:50:07.574 INFO  DAGScheduler - Registering RDD 5709 (mapToPair at SparkUtils.java:161) as input to shuffle 305
13:50:07.574 INFO  DAGScheduler - Got job 582 (collect at SparkUtils.java:205) with 1 output partitions
13:50:07.574 INFO  DAGScheduler - Final stage: ResultStage 1251 (collect at SparkUtils.java:205)
13:50:07.574 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1250)
13:50:07.574 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1250)
13:50:07.574 INFO  DAGScheduler - Submitting ShuffleMapStage 1250 (MapPartitionsRDD[5709] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.591 INFO  MemoryStore - Block broadcast_2064 stored as values in memory (estimated size 457.4 KiB, free 1909.4 MiB)
13:50:07.593 INFO  MemoryStore - Block broadcast_2064_piece0 stored as bytes in memory (estimated size 201.1 KiB, free 1909.3 MiB)
13:50:07.593 INFO  BlockManagerInfo - Added broadcast_2064_piece0 in memory on localhost:43843 (size: 201.1 KiB, free: 1918.4 MiB)
13:50:07.594 INFO  SparkContext - Created broadcast 2064 from broadcast at DAGScheduler.scala:1580
13:50:07.594 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1250 (MapPartitionsRDD[5709] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.594 INFO  TaskSchedulerImpl - Adding task set 1250.0 with 1 tasks resource profile 0
13:50:07.594 INFO  TaskSetManager - Starting task 0.0 in stage 1250.0 (TID 1090) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9738 bytes) 
13:50:07.594 INFO  Executor - Running task 0.0 in stage 1250.0 (TID 1090)
13:50:07.625 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/optical_dupes.bam:0+678
13:50:07.629 INFO  Executor - Finished task 0.0 in stage 1250.0 (TID 1090). 1148 bytes result sent to driver
13:50:07.629 INFO  TaskSetManager - Finished task 0.0 in stage 1250.0 (TID 1090) in 35 ms on localhost (executor driver) (1/1)
13:50:07.629 INFO  TaskSchedulerImpl - Removed TaskSet 1250.0, whose tasks have all completed, from pool 
13:50:07.629 INFO  DAGScheduler - ShuffleMapStage 1250 (mapToPair at SparkUtils.java:161) finished in 0.055 s
13:50:07.629 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.629 INFO  DAGScheduler - running: HashSet()
13:50:07.629 INFO  DAGScheduler - waiting: HashSet(ResultStage 1251)
13:50:07.629 INFO  DAGScheduler - failed: HashSet()
13:50:07.629 INFO  DAGScheduler - Submitting ResultStage 1251 (MapPartitionsRDD[5712] at mapPartitions at SparkUtils.java:188), which has no missing parents
13:50:07.630 INFO  MemoryStore - Block broadcast_2065 stored as values in memory (estimated size 7.4 KiB, free 1909.2 MiB)
13:50:07.630 INFO  MemoryStore - Block broadcast_2065_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1909.2 MiB)
13:50:07.630 INFO  BlockManagerInfo - Added broadcast_2065_piece0 in memory on localhost:43843 (size: 4.1 KiB, free: 1918.4 MiB)
13:50:07.631 INFO  SparkContext - Created broadcast 2065 from broadcast at DAGScheduler.scala:1580
13:50:07.631 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1251 (MapPartitionsRDD[5712] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
13:50:07.631 INFO  TaskSchedulerImpl - Adding task set 1251.0 with 1 tasks resource profile 0
13:50:07.631 INFO  TaskSetManager - Starting task 0.0 in stage 1251.0 (TID 1091) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.631 INFO  Executor - Running task 0.0 in stage 1251.0 (TID 1091)
13:50:07.632 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.632 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.634 INFO  Executor - Finished task 0.0 in stage 1251.0 (TID 1091). 2269 bytes result sent to driver
13:50:07.634 INFO  TaskSetManager - Finished task 0.0 in stage 1251.0 (TID 1091) in 3 ms on localhost (executor driver) (1/1)
13:50:07.634 INFO  TaskSchedulerImpl - Removed TaskSet 1251.0, whose tasks have all completed, from pool 
13:50:07.634 INFO  DAGScheduler - ResultStage 1251 (collect at SparkUtils.java:205) finished in 0.004 s
13:50:07.634 INFO  DAGScheduler - Job 582 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.634 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1251: Stage finished
13:50:07.634 INFO  DAGScheduler - Job 582 finished: collect at SparkUtils.java:205, took 0.061233 s
13:50:07.638 INFO  MemoryStore - Block broadcast_2066 stored as values in memory (estimated size 136.0 B, free 1909.2 MiB)
13:50:07.638 INFO  MemoryStore - Block broadcast_2066_piece0 stored as bytes in memory (estimated size 24.0 B, free 1909.2 MiB)
13:50:07.638 INFO  BlockManagerInfo - Added broadcast_2066_piece0 in memory on localhost:43843 (size: 24.0 B, free: 1918.4 MiB)
13:50:07.639 INFO  SparkContext - Created broadcast 2066 from broadcast at MarkDuplicatesSparkUtils.java:126
13:50:07.639 INFO  MemoryStore - Block broadcast_2067 stored as values in memory (estimated size 136.0 B, free 1909.2 MiB)
13:50:07.639 INFO  MemoryStore - Block broadcast_2067_piece0 stored as bytes in memory (estimated size 21.0 B, free 1909.2 MiB)
13:50:07.639 INFO  BlockManagerInfo - Added broadcast_2067_piece0 in memory on localhost:43843 (size: 21.0 B, free: 1918.4 MiB)
13:50:07.639 INFO  SparkContext - Created broadcast 2067 from broadcast at MarkDuplicatesSparkUtils.java:127
13:50:07.651 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
13:50:07.651 INFO  DAGScheduler - Registering RDD 5721 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 308
13:50:07.651 INFO  DAGScheduler - Registering RDD 5725 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 307
13:50:07.651 INFO  DAGScheduler - Registering RDD 5729 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 306
13:50:07.651 INFO  DAGScheduler - Got job 583 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
13:50:07.651 INFO  DAGScheduler - Final stage: ResultStage 1256 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
13:50:07.651 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1255)
13:50:07.652 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1255)
13:50:07.652 INFO  DAGScheduler - Submitting ShuffleMapStage 1253 (MapPartitionsRDD[5721] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
13:50:07.652 INFO  MemoryStore - Block broadcast_2068 stored as values in memory (estimated size 21.1 KiB, free 1909.2 MiB)
13:50:07.653 INFO  MemoryStore - Block broadcast_2068_piece0 stored as bytes in memory (estimated size 10.9 KiB, free 1909.2 MiB)
13:50:07.653 INFO  BlockManagerInfo - Added broadcast_2068_piece0 in memory on localhost:43843 (size: 10.9 KiB, free: 1918.4 MiB)
13:50:07.653 INFO  SparkContext - Created broadcast 2068 from broadcast at DAGScheduler.scala:1580
13:50:07.653 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1253 (MapPartitionsRDD[5721] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
13:50:07.653 INFO  TaskSchedulerImpl - Adding task set 1253.0 with 1 tasks resource profile 0
13:50:07.654 INFO  TaskSetManager - Starting task 0.0 in stage 1253.0 (TID 1092) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
13:50:07.654 INFO  Executor - Running task 0.0 in stage 1253.0 (TID 1092)
13:50:07.656 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.656 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.659 INFO  Executor - Finished task 0.0 in stage 1253.0 (TID 1092). 1922 bytes result sent to driver
13:50:07.659 INFO  TaskSetManager - Finished task 0.0 in stage 1253.0 (TID 1092) in 5 ms on localhost (executor driver) (1/1)
13:50:07.659 INFO  TaskSchedulerImpl - Removed TaskSet 1253.0, whose tasks have all completed, from pool 
13:50:07.659 INFO  DAGScheduler - ShuffleMapStage 1253 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.007 s
13:50:07.659 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.659 INFO  DAGScheduler - running: HashSet()
13:50:07.659 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1254, ShuffleMapStage 1255, ResultStage 1256)
13:50:07.659 INFO  DAGScheduler - failed: HashSet()
13:50:07.659 INFO  DAGScheduler - Submitting ShuffleMapStage 1254 (MapPartitionsRDD[5725] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
13:50:07.660 INFO  MemoryStore - Block broadcast_2069 stored as values in memory (estimated size 25.2 KiB, free 1909.2 MiB)
13:50:07.660 INFO  MemoryStore - Block broadcast_2069_piece0 stored as bytes in memory (estimated size 13.1 KiB, free 1909.2 MiB)
13:50:07.661 INFO  BlockManagerInfo - Added broadcast_2069_piece0 in memory on localhost:43843 (size: 13.1 KiB, free: 1918.4 MiB)
13:50:07.661 INFO  SparkContext - Created broadcast 2069 from broadcast at DAGScheduler.scala:1580
13:50:07.661 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1254 (MapPartitionsRDD[5725] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
13:50:07.661 INFO  TaskSchedulerImpl - Adding task set 1254.0 with 1 tasks resource profile 0
13:50:07.661 INFO  TaskSetManager - Starting task 0.0 in stage 1254.0 (TID 1093) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
13:50:07.661 INFO  Executor - Running task 0.0 in stage 1254.0 (TID 1093)
13:50:07.663 INFO  ShuffleBlockFetcherIterator - Getting 1 (106.0 B) non-empty blocks including 1 (106.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.663 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.666 INFO  Executor - Finished task 0.0 in stage 1254.0 (TID 1093). 1922 bytes result sent to driver
13:50:07.667 INFO  TaskSetManager - Finished task 0.0 in stage 1254.0 (TID 1093) in 6 ms on localhost (executor driver) (1/1)
13:50:07.667 INFO  TaskSchedulerImpl - Removed TaskSet 1254.0, whose tasks have all completed, from pool 
13:50:07.667 INFO  DAGScheduler - ShuffleMapStage 1254 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
13:50:07.667 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.667 INFO  DAGScheduler - running: HashSet()
13:50:07.667 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1255, ResultStage 1256)
13:50:07.667 INFO  DAGScheduler - failed: HashSet()
13:50:07.667 INFO  DAGScheduler - Submitting ShuffleMapStage 1255 (MapPartitionsRDD[5729] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
13:50:07.668 INFO  MemoryStore - Block broadcast_2070 stored as values in memory (estimated size 15.9 KiB, free 1909.2 MiB)
13:50:07.668 INFO  MemoryStore - Block broadcast_2070_piece0 stored as bytes in memory (estimated size 8.3 KiB, free 1909.1 MiB)
13:50:07.668 INFO  BlockManagerInfo - Added broadcast_2070_piece0 in memory on localhost:43843 (size: 8.3 KiB, free: 1918.4 MiB)
13:50:07.668 INFO  SparkContext - Created broadcast 2070 from broadcast at DAGScheduler.scala:1580
13:50:07.668 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1255 (MapPartitionsRDD[5729] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
13:50:07.668 INFO  TaskSchedulerImpl - Adding task set 1255.0 with 1 tasks resource profile 0
13:50:07.669 INFO  TaskSetManager - Starting task 0.0 in stage 1255.0 (TID 1094) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.669 INFO  Executor - Running task 0.0 in stage 1255.0 (TID 1094)
13:50:07.670 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.670 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.671 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.671 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.672 INFO  Executor - Finished task 0.0 in stage 1255.0 (TID 1094). 1879 bytes result sent to driver
13:50:07.672 INFO  TaskSetManager - Finished task 0.0 in stage 1255.0 (TID 1094) in 3 ms on localhost (executor driver) (1/1)
13:50:07.672 INFO  TaskSchedulerImpl - Removed TaskSet 1255.0, whose tasks have all completed, from pool 
13:50:07.672 INFO  DAGScheduler - ShuffleMapStage 1255 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.005 s
13:50:07.672 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.672 INFO  DAGScheduler - running: HashSet()
13:50:07.672 INFO  DAGScheduler - waiting: HashSet(ResultStage 1256)
13:50:07.672 INFO  DAGScheduler - failed: HashSet()
13:50:07.672 INFO  DAGScheduler - Submitting ResultStage 1256 (MapPartitionsRDD[5731] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
13:50:07.673 INFO  MemoryStore - Block broadcast_2071 stored as values in memory (estimated size 17.1 KiB, free 1909.1 MiB)
13:50:07.673 INFO  MemoryStore - Block broadcast_2071_piece0 stored as bytes in memory (estimated size 9.0 KiB, free 1909.1 MiB)
13:50:07.674 INFO  BlockManagerInfo - Added broadcast_2071_piece0 in memory on localhost:43843 (size: 9.0 KiB, free: 1918.4 MiB)
13:50:07.674 INFO  SparkContext - Created broadcast 2071 from broadcast at DAGScheduler.scala:1580
13:50:07.674 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1256 (MapPartitionsRDD[5731] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
13:50:07.674 INFO  TaskSchedulerImpl - Adding task set 1256.0 with 1 tasks resource profile 0
13:50:07.674 INFO  TaskSetManager - Starting task 0.0 in stage 1256.0 (TID 1095) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.674 INFO  Executor - Running task 0.0 in stage 1256.0 (TID 1095)
13:50:07.676 INFO  ShuffleBlockFetcherIterator - Getting 1 (129.0 B) non-empty blocks including 1 (129.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.676 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.677 INFO  Executor - Finished task 0.0 in stage 1256.0 (TID 1095). 1887 bytes result sent to driver
13:50:07.677 INFO  TaskSetManager - Finished task 0.0 in stage 1256.0 (TID 1095) in 3 ms on localhost (executor driver) (1/1)
13:50:07.677 INFO  TaskSchedulerImpl - Removed TaskSet 1256.0, whose tasks have all completed, from pool 
13:50:07.677 INFO  DAGScheduler - ResultStage 1256 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
13:50:07.677 INFO  DAGScheduler - Job 583 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.677 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1256: Stage finished
13:50:07.677 INFO  DAGScheduler - Job 583 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.026781 s
13:50:07.687 INFO  MemoryStore - Block broadcast_2072 stored as values in memory (estimated size 5.6 KiB, free 1909.1 MiB)
13:50:07.687 INFO  MemoryStore - Block broadcast_2072_piece0 stored as bytes in memory (estimated size 500.0 B, free 1909.1 MiB)
13:50:07.688 INFO  BlockManagerInfo - Added broadcast_2072_piece0 in memory on localhost:43843 (size: 500.0 B, free: 1918.4 MiB)
13:50:07.688 INFO  SparkContext - Created broadcast 2072 from broadcast at ReadsSparkSink.java:133
13:50:07.691 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.691 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.691 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.707 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
13:50:07.708 INFO  DAGScheduler - Registering RDD 5733 (mapToPair at SparkUtils.java:161) as input to shuffle 309
13:50:07.708 INFO  DAGScheduler - Got job 584 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
13:50:07.708 INFO  DAGScheduler - Final stage: ResultStage 1261 (runJob at SparkHadoopWriter.scala:83)
13:50:07.708 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1260)
13:50:07.708 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1260)
13:50:07.708 INFO  DAGScheduler - Submitting ShuffleMapStage 1260 (MapPartitionsRDD[5733] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.709 INFO  MemoryStore - Block broadcast_2073 stored as values in memory (estimated size 15.0 KiB, free 1909.1 MiB)
13:50:07.709 INFO  MemoryStore - Block broadcast_2073_piece0 stored as bytes in memory (estimated size 7.8 KiB, free 1909.1 MiB)
13:50:07.709 INFO  BlockManagerInfo - Added broadcast_2073_piece0 in memory on localhost:43843 (size: 7.8 KiB, free: 1918.4 MiB)
13:50:07.709 INFO  SparkContext - Created broadcast 2073 from broadcast at DAGScheduler.scala:1580
13:50:07.710 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1260 (MapPartitionsRDD[5733] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.710 INFO  TaskSchedulerImpl - Adding task set 1260.0 with 1 tasks resource profile 0
13:50:07.710 INFO  TaskSetManager - Starting task 0.0 in stage 1260.0 (TID 1096) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.710 INFO  Executor - Running task 0.0 in stage 1260.0 (TID 1096)
13:50:07.711 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.711 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.713 INFO  ShuffleBlockFetcherIterator - Getting 1 (60.0 B) non-empty blocks including 1 (60.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.713 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.714 INFO  Executor - Finished task 0.0 in stage 1260.0 (TID 1096). 1879 bytes result sent to driver
13:50:07.714 INFO  TaskSetManager - Finished task 0.0 in stage 1260.0 (TID 1096) in 4 ms on localhost (executor driver) (1/1)
13:50:07.714 INFO  TaskSchedulerImpl - Removed TaskSet 1260.0, whose tasks have all completed, from pool 
13:50:07.714 INFO  DAGScheduler - ShuffleMapStage 1260 (mapToPair at SparkUtils.java:161) finished in 0.006 s
13:50:07.714 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.714 INFO  DAGScheduler - running: HashSet()
13:50:07.714 INFO  DAGScheduler - waiting: HashSet(ResultStage 1261)
13:50:07.714 INFO  DAGScheduler - failed: HashSet()
13:50:07.714 INFO  DAGScheduler - Submitting ResultStage 1261 (MapPartitionsRDD[5739] at saveAsTextFile at SamSink.java:65), which has no missing parents
13:50:07.720 INFO  MemoryStore - Block broadcast_2074 stored as values in memory (estimated size 160.3 KiB, free 1908.9 MiB)
13:50:07.721 INFO  MemoryStore - Block broadcast_2074_piece0 stored as bytes in memory (estimated size 72.1 KiB, free 1908.9 MiB)
13:50:07.721 INFO  BlockManagerInfo - Added broadcast_2074_piece0 in memory on localhost:43843 (size: 72.1 KiB, free: 1918.3 MiB)
13:50:07.721 INFO  SparkContext - Created broadcast 2074 from broadcast at DAGScheduler.scala:1580
13:50:07.721 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1261 (MapPartitionsRDD[5739] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
13:50:07.721 INFO  TaskSchedulerImpl - Adding task set 1261.0 with 1 tasks resource profile 0
13:50:07.722 INFO  TaskSetManager - Starting task 0.0 in stage 1261.0 (TID 1097) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.722 INFO  Executor - Running task 0.0 in stage 1261.0 (TID 1097)
13:50:07.725 INFO  ShuffleBlockFetcherIterator - Getting 1 (490.0 B) non-empty blocks including 1 (490.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.725 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.726 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.726 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.726 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.737 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271350075689873693648358048_5739_m_000000_0' to file:/tmp/MarkDups1346607179402067339/MarkDups.sam.parts/_temporary/0/task_202505271350075689873693648358048_5739_m_000000
13:50:07.737 INFO  SparkHadoopMapRedUtil - attempt_202505271350075689873693648358048_5739_m_000000_0: Committed. Elapsed time: 0 ms.
13:50:07.737 INFO  Executor - Finished task 0.0 in stage 1261.0 (TID 1097). 1858 bytes result sent to driver
13:50:07.738 INFO  TaskSetManager - Finished task 0.0 in stage 1261.0 (TID 1097) in 16 ms on localhost (executor driver) (1/1)
13:50:07.738 INFO  TaskSchedulerImpl - Removed TaskSet 1261.0, whose tasks have all completed, from pool 
13:50:07.738 INFO  DAGScheduler - ResultStage 1261 (runJob at SparkHadoopWriter.scala:83) finished in 0.024 s
13:50:07.738 INFO  DAGScheduler - Job 584 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.738 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1261: Stage finished
13:50:07.738 INFO  DAGScheduler - Job 584 finished: runJob at SparkHadoopWriter.scala:83, took 0.030413 s
13:50:07.738 INFO  SparkHadoopWriter - Start to commit write Job job_202505271350075689873693648358048_5739.
13:50:07.742 INFO  SparkHadoopWriter - Write Job job_202505271350075689873693648358048_5739 committed. Elapsed time: 3 ms.
13:50:07.749 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups1346607179402067339/MarkDups.sam
13:50:07.752 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups1346607179402067339/MarkDups.sam done
13:50:07.752 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 1:50:07 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
13:50:07.760 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.760 INFO  MarkDuplicatesSpark - org.broadinstitute.hellbender.tools.spark.transforms.markduplicates vUnavailable
13:50:07.760 INFO  MarkDuplicatesSpark - For support and documentation go to https://software.broadinstitute.org/gatk/
13:50:07.760 INFO  MarkDuplicatesSpark - Executing as runner@pkrvmf6wy0o8zjz on Linux v6.11.0-1014-azure amd64
13:50:07.760 INFO  MarkDuplicatesSpark - Java runtime: OpenJDK 64-Bit Server VM v17.0.6+10
13:50:07.760 INFO  MarkDuplicatesSpark - Start Date/Time: May 27, 2025 at 1:50:07 PM UTC
13:50:07.760 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.760 INFO  MarkDuplicatesSpark - ------------------------------------------------------------
13:50:07.760 INFO  MarkDuplicatesSpark - HTSJDK Defaults.COMPRESSION_LEVEL : 2
13:50:07.760 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : false
13:50:07.760 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : true
13:50:07.760 INFO  MarkDuplicatesSpark - HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : false
13:50:07.760 INFO  MarkDuplicatesSpark - Deflater: IntelDeflater
13:50:07.760 INFO  MarkDuplicatesSpark - Inflater: IntelInflater
13:50:07.760 INFO  MarkDuplicatesSpark - GCS max retries/reopens: 20
13:50:07.760 INFO  MarkDuplicatesSpark - Requester pays: disabled
13:50:07.760 INFO  MarkDuplicatesSpark - Initializing engine
13:50:07.760 INFO  MarkDuplicatesSpark - Done initializing engine
13:50:07.760 INFO  MarkDuplicatesSpark - Spark verbosity set to INFO (see --spark-verbosity argument)
13:50:07.763 INFO  MemoryStore - Block broadcast_2075 stored as values in memory (estimated size 306.3 KiB, free 1908.6 MiB)
13:50:07.772 INFO  MemoryStore - Block broadcast_2075_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1908.5 MiB)
13:50:07.772 INFO  BlockManagerInfo - Added broadcast_2075_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.2 MiB)
13:50:07.772 INFO  SparkContext - Created broadcast 2075 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.792 INFO  MemoryStore - Block broadcast_2076 stored as values in memory (estimated size 306.3 KiB, free 1908.2 MiB)
13:50:07.798 INFO  MemoryStore - Block broadcast_2076_piece0 stored as bytes in memory (estimated size 64.4 KiB, free 1908.1 MiB)
13:50:07.799 INFO  BlockManagerInfo - Added broadcast_2076_piece0 in memory on localhost:43843 (size: 64.4 KiB, free: 1918.2 MiB)
13:50:07.799 INFO  SparkContext - Created broadcast 2076 from newAPIHadoopFile at PathSplitSource.java:96
13:50:07.831 INFO  SparkContext - Starting job: collect at SparkUtils.java:205
13:50:07.831 INFO  FileInputFormat - Total input files to process : 1
13:50:07.832 INFO  DAGScheduler - Registering RDD 5753 (mapToPair at SparkUtils.java:161) as input to shuffle 310
13:50:07.832 INFO  DAGScheduler - Got job 585 (collect at SparkUtils.java:205) with 1 output partitions
13:50:07.832 INFO  DAGScheduler - Final stage: ResultStage 1263 (collect at SparkUtils.java:205)
13:50:07.832 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1262)
13:50:07.832 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1262)
13:50:07.832 INFO  DAGScheduler - Submitting ShuffleMapStage 1262 (MapPartitionsRDD[5753] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.849 INFO  MemoryStore - Block broadcast_2077 stored as values in memory (estimated size 481.4 KiB, free 1907.7 MiB)
13:50:07.851 INFO  MemoryStore - Block broadcast_2077_piece0 stored as bytes in memory (estimated size 207.4 KiB, free 1907.5 MiB)
13:50:07.851 INFO  BlockManagerInfo - Added broadcast_2077_piece0 in memory on localhost:43843 (size: 207.4 KiB, free: 1918.0 MiB)
13:50:07.851 INFO  SparkContext - Created broadcast 2077 from broadcast at DAGScheduler.scala:1580
13:50:07.852 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1262 (MapPartitionsRDD[5753] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.852 INFO  TaskSchedulerImpl - Adding task set 1262.0 with 1 tasks resource profile 0
13:50:07.852 INFO  TaskSetManager - Starting task 0.0 in stage 1262.0 (TID 1098) (localhost, executor driver, partition 0, PROCESS_LOCAL, 9757 bytes) 
13:50:07.852 INFO  Executor - Running task 0.0 in stage 1262.0 (TID 1098)
13:50:07.882 INFO  NewHadoopRDD - Input split: file:/home/runner/work/gatk/gatk/src/test/resources/org/broadinstitute/hellbender/tools/walkers/MarkDuplicatesGATK/inputSingleLibrarySolexa16404.bam:0+5416
13:50:07.887 INFO  Executor - Finished task 0.0 in stage 1262.0 (TID 1098). 1148 bytes result sent to driver
13:50:07.887 INFO  TaskSetManager - Finished task 0.0 in stage 1262.0 (TID 1098) in 35 ms on localhost (executor driver) (1/1)
13:50:07.887 INFO  TaskSchedulerImpl - Removed TaskSet 1262.0, whose tasks have all completed, from pool 
13:50:07.887 INFO  DAGScheduler - ShuffleMapStage 1262 (mapToPair at SparkUtils.java:161) finished in 0.055 s
13:50:07.887 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.887 INFO  DAGScheduler - running: HashSet()
13:50:07.887 INFO  DAGScheduler - waiting: HashSet(ResultStage 1263)
13:50:07.887 INFO  DAGScheduler - failed: HashSet()
13:50:07.888 INFO  DAGScheduler - Submitting ResultStage 1263 (MapPartitionsRDD[5756] at mapPartitions at SparkUtils.java:188), which has no missing parents
13:50:07.888 INFO  MemoryStore - Block broadcast_2078 stored as values in memory (estimated size 7.4 KiB, free 1907.5 MiB)
13:50:07.888 INFO  MemoryStore - Block broadcast_2078_piece0 stored as bytes in memory (estimated size 4.1 KiB, free 1907.5 MiB)
13:50:07.889 INFO  BlockManagerInfo - Added broadcast_2078_piece0 in memory on localhost:43843 (size: 4.1 KiB, free: 1918.0 MiB)
13:50:07.889 INFO  SparkContext - Created broadcast 2078 from broadcast at DAGScheduler.scala:1580
13:50:07.889 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1263 (MapPartitionsRDD[5756] at mapPartitions at SparkUtils.java:188) (first 15 tasks are for partitions Vector(0))
13:50:07.889 INFO  TaskSchedulerImpl - Adding task set 1263.0 with 1 tasks resource profile 0
13:50:07.889 INFO  TaskSetManager - Starting task 0.0 in stage 1263.0 (TID 1099) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.889 INFO  Executor - Running task 0.0 in stage 1263.0 (TID 1099)
13:50:07.890 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.890 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.892 INFO  Executor - Finished task 0.0 in stage 1263.0 (TID 1099). 2039 bytes result sent to driver
13:50:07.892 INFO  TaskSetManager - Finished task 0.0 in stage 1263.0 (TID 1099) in 3 ms on localhost (executor driver) (1/1)
13:50:07.892 INFO  TaskSchedulerImpl - Removed TaskSet 1263.0, whose tasks have all completed, from pool 
13:50:07.892 INFO  DAGScheduler - ResultStage 1263 (collect at SparkUtils.java:205) finished in 0.004 s
13:50:07.892 INFO  DAGScheduler - Job 585 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.892 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1263: Stage finished
13:50:07.892 INFO  DAGScheduler - Job 585 finished: collect at SparkUtils.java:205, took 0.061191 s
13:50:07.896 INFO  MemoryStore - Block broadcast_2079 stored as values in memory (estimated size 392.0 B, free 1907.5 MiB)
13:50:07.896 INFO  MemoryStore - Block broadcast_2079_piece0 stored as bytes in memory (estimated size 33.0 B, free 1907.5 MiB)
13:50:07.897 INFO  BlockManagerInfo - Added broadcast_2079_piece0 in memory on localhost:43843 (size: 33.0 B, free: 1918.0 MiB)
13:50:07.897 INFO  SparkContext - Created broadcast 2079 from broadcast at MarkDuplicatesSparkUtils.java:126
13:50:07.897 INFO  MemoryStore - Block broadcast_2080 stored as values in memory (estimated size 144.0 B, free 1907.5 MiB)
13:50:07.897 INFO  MemoryStore - Block broadcast_2080_piece0 stored as bytes in memory (estimated size 28.0 B, free 1907.5 MiB)
13:50:07.897 INFO  BlockManagerInfo - Added broadcast_2080_piece0 in memory on localhost:43843 (size: 28.0 B, free: 1918.0 MiB)
13:50:07.897 INFO  SparkContext - Created broadcast 2080 from broadcast at MarkDuplicatesSparkUtils.java:127
13:50:07.910 INFO  SparkContext - Starting job: collectAsMap at MarkDuplicatesSparkUtils.java:542
13:50:07.910 INFO  DAGScheduler - Registering RDD 5765 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) as input to shuffle 313
13:50:07.910 INFO  DAGScheduler - Registering RDD 5769 (mapToPair at MarkDuplicatesSpark.java:217) as input to shuffle 312
13:50:07.910 INFO  DAGScheduler - Registering RDD 5773 (mapToPair at MarkDuplicatesSparkUtils.java:497) as input to shuffle 311
13:50:07.910 INFO  DAGScheduler - Got job 586 (collectAsMap at MarkDuplicatesSparkUtils.java:542) with 1 output partitions
13:50:07.910 INFO  DAGScheduler - Final stage: ResultStage 1268 (collectAsMap at MarkDuplicatesSparkUtils.java:542)
13:50:07.910 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1267)
13:50:07.910 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1267)
13:50:07.911 INFO  DAGScheduler - Submitting ShuffleMapStage 1265 (MapPartitionsRDD[5765] at flatMapToPair at MarkDuplicatesSparkUtils.java:130), which has no missing parents
13:50:07.911 INFO  MemoryStore - Block broadcast_2081 stored as values in memory (estimated size 45.2 KiB, free 1907.4 MiB)
13:50:07.912 INFO  MemoryStore - Block broadcast_2081_piece0 stored as bytes in memory (estimated size 17.1 KiB, free 1907.4 MiB)
13:50:07.912 INFO  BlockManagerInfo - Added broadcast_2081_piece0 in memory on localhost:43843 (size: 17.1 KiB, free: 1917.9 MiB)
13:50:07.912 INFO  SparkContext - Created broadcast 2081 from broadcast at DAGScheduler.scala:1580
13:50:07.912 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1265 (MapPartitionsRDD[5765] at flatMapToPair at MarkDuplicatesSparkUtils.java:130) (first 15 tasks are for partitions Vector(0))
13:50:07.912 INFO  TaskSchedulerImpl - Adding task set 1265.0 with 1 tasks resource profile 0
13:50:07.913 INFO  TaskSetManager - Starting task 0.0 in stage 1265.0 (TID 1100) (localhost, executor driver, partition 0, NODE_LOCAL, 9890 bytes) 
13:50:07.913 INFO  Executor - Running task 0.0 in stage 1265.0 (TID 1100)
13:50:07.914 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.914 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.916 INFO  Executor - Finished task 0.0 in stage 1265.0 (TID 1100). 1922 bytes result sent to driver
13:50:07.917 INFO  TaskSetManager - Finished task 0.0 in stage 1265.0 (TID 1100) in 5 ms on localhost (executor driver) (1/1)
13:50:07.917 INFO  TaskSchedulerImpl - Removed TaskSet 1265.0, whose tasks have all completed, from pool 
13:50:07.917 INFO  DAGScheduler - ShuffleMapStage 1265 (flatMapToPair at MarkDuplicatesSparkUtils.java:130) finished in 0.006 s
13:50:07.917 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.917 INFO  DAGScheduler - running: HashSet()
13:50:07.917 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1266, ShuffleMapStage 1267, ResultStage 1268)
13:50:07.917 INFO  DAGScheduler - failed: HashSet()
13:50:07.917 INFO  DAGScheduler - Submitting ShuffleMapStage 1266 (MapPartitionsRDD[5769] at mapToPair at MarkDuplicatesSpark.java:217), which has no missing parents
13:50:07.918 INFO  MemoryStore - Block broadcast_2082 stored as values in memory (estimated size 49.3 KiB, free 1907.3 MiB)
13:50:07.918 INFO  MemoryStore - Block broadcast_2082_piece0 stored as bytes in memory (estimated size 19.3 KiB, free 1907.3 MiB)
13:50:07.918 INFO  BlockManagerInfo - Added broadcast_2082_piece0 in memory on localhost:43843 (size: 19.3 KiB, free: 1917.9 MiB)
13:50:07.918 INFO  SparkContext - Created broadcast 2082 from broadcast at DAGScheduler.scala:1580
13:50:07.918 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1266 (MapPartitionsRDD[5769] at mapToPair at MarkDuplicatesSpark.java:217) (first 15 tasks are for partitions Vector(0))
13:50:07.918 INFO  TaskSchedulerImpl - Adding task set 1266.0 with 1 tasks resource profile 0
13:50:07.919 INFO  TaskSetManager - Starting task 0.0 in stage 1266.0 (TID 1101) (localhost, executor driver, partition 0, NODE_LOCAL, 9365 bytes) 
13:50:07.919 INFO  Executor - Running task 0.0 in stage 1266.0 (TID 1101)
13:50:07.920 INFO  ShuffleBlockFetcherIterator - Getting 1 (593.0 B) non-empty blocks including 1 (593.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.920 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.924 INFO  Executor - Finished task 0.0 in stage 1266.0 (TID 1101). 1922 bytes result sent to driver
13:50:07.924 INFO  TaskSetManager - Finished task 0.0 in stage 1266.0 (TID 1101) in 5 ms on localhost (executor driver) (1/1)
13:50:07.924 INFO  TaskSchedulerImpl - Removed TaskSet 1266.0, whose tasks have all completed, from pool 
13:50:07.924 INFO  DAGScheduler - ShuffleMapStage 1266 (mapToPair at MarkDuplicatesSpark.java:217) finished in 0.007 s
13:50:07.924 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.924 INFO  DAGScheduler - running: HashSet()
13:50:07.924 INFO  DAGScheduler - waiting: HashSet(ShuffleMapStage 1267, ResultStage 1268)
13:50:07.924 INFO  DAGScheduler - failed: HashSet()
13:50:07.925 INFO  DAGScheduler - Submitting ShuffleMapStage 1267 (MapPartitionsRDD[5773] at mapToPair at MarkDuplicatesSparkUtils.java:497), which has no missing parents
13:50:07.925 INFO  MemoryStore - Block broadcast_2083 stored as values in memory (estimated size 40.0 KiB, free 1907.3 MiB)
13:50:07.926 INFO  MemoryStore - Block broadcast_2083_piece0 stored as bytes in memory (estimated size 14.5 KiB, free 1907.3 MiB)
13:50:07.926 INFO  BlockManagerInfo - Added broadcast_2083_piece0 in memory on localhost:43843 (size: 14.5 KiB, free: 1917.9 MiB)
13:50:07.926 INFO  SparkContext - Created broadcast 2083 from broadcast at DAGScheduler.scala:1580
13:50:07.926 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1267 (MapPartitionsRDD[5773] at mapToPair at MarkDuplicatesSparkUtils.java:497) (first 15 tasks are for partitions Vector(0))
13:50:07.926 INFO  TaskSchedulerImpl - Adding task set 1267.0 with 1 tasks resource profile 0
13:50:07.926 INFO  TaskSetManager - Starting task 0.0 in stage 1267.0 (TID 1102) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.926 INFO  Executor - Running task 0.0 in stage 1267.0 (TID 1102)
13:50:07.928 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.928 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.929 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.929 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.930 INFO  Executor - Finished task 0.0 in stage 1267.0 (TID 1102). 1879 bytes result sent to driver
13:50:07.930 INFO  TaskSetManager - Finished task 0.0 in stage 1267.0 (TID 1102) in 4 ms on localhost (executor driver) (1/1)
13:50:07.930 INFO  TaskSchedulerImpl - Removed TaskSet 1267.0, whose tasks have all completed, from pool 
13:50:07.930 INFO  DAGScheduler - ShuffleMapStage 1267 (mapToPair at MarkDuplicatesSparkUtils.java:497) finished in 0.005 s
13:50:07.930 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.930 INFO  DAGScheduler - running: HashSet()
13:50:07.930 INFO  DAGScheduler - waiting: HashSet(ResultStage 1268)
13:50:07.930 INFO  DAGScheduler - failed: HashSet()
13:50:07.930 INFO  DAGScheduler - Submitting ResultStage 1268 (MapPartitionsRDD[5775] at mapValues at MarkDuplicatesSparkUtils.java:519), which has no missing parents
13:50:07.931 INFO  MemoryStore - Block broadcast_2084 stored as values in memory (estimated size 41.2 KiB, free 1907.2 MiB)
13:50:07.931 INFO  MemoryStore - Block broadcast_2084_piece0 stored as bytes in memory (estimated size 15.3 KiB, free 1907.2 MiB)
13:50:07.931 INFO  BlockManagerInfo - Added broadcast_2084_piece0 in memory on localhost:43843 (size: 15.3 KiB, free: 1917.9 MiB)
13:50:07.931 INFO  SparkContext - Created broadcast 2084 from broadcast at DAGScheduler.scala:1580
13:50:07.931 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1268 (MapPartitionsRDD[5775] at mapValues at MarkDuplicatesSparkUtils.java:519) (first 15 tasks are for partitions Vector(0))
13:50:07.931 INFO  TaskSchedulerImpl - Adding task set 1268.0 with 1 tasks resource profile 0
13:50:07.932 INFO  TaskSetManager - Starting task 0.0 in stage 1268.0 (TID 1103) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.932 INFO  Executor - Running task 0.0 in stage 1268.0 (TID 1103)
13:50:07.933 INFO  ShuffleBlockFetcherIterator - Getting 1 (142.0 B) non-empty blocks including 1 (142.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.933 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.934 INFO  Executor - Finished task 0.0 in stage 1268.0 (TID 1103). 1901 bytes result sent to driver
13:50:07.934 INFO  TaskSetManager - Finished task 0.0 in stage 1268.0 (TID 1103) in 2 ms on localhost (executor driver) (1/1)
13:50:07.934 INFO  TaskSchedulerImpl - Removed TaskSet 1268.0, whose tasks have all completed, from pool 
13:50:07.934 INFO  DAGScheduler - ResultStage 1268 (collectAsMap at MarkDuplicatesSparkUtils.java:542) finished in 0.004 s
13:50:07.935 INFO  DAGScheduler - Job 586 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.935 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1268: Stage finished
13:50:07.935 INFO  DAGScheduler - Job 586 finished: collectAsMap at MarkDuplicatesSparkUtils.java:542, took 0.024788 s
13:50:07.945 INFO  MemoryStore - Block broadcast_2085 stored as values in memory (estimated size 93.5 KiB, free 1907.1 MiB)
13:50:07.946 INFO  MemoryStore - Block broadcast_2085_piece0 stored as bytes in memory (estimated size 5.6 KiB, free 1907.1 MiB)
13:50:07.946 INFO  BlockManagerInfo - Added broadcast_2085_piece0 in memory on localhost:43843 (size: 5.6 KiB, free: 1917.9 MiB)
13:50:07.946 INFO  SparkContext - Created broadcast 2085 from broadcast at ReadsSparkSink.java:133
13:50:07.949 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.949 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.949 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.966 INFO  SparkContext - Starting job: runJob at SparkHadoopWriter.scala:83
13:50:07.967 INFO  DAGScheduler - Registering RDD 5777 (mapToPair at SparkUtils.java:161) as input to shuffle 314
13:50:07.967 INFO  DAGScheduler - Got job 587 (runJob at SparkHadoopWriter.scala:83) with 1 output partitions
13:50:07.967 INFO  DAGScheduler - Final stage: ResultStage 1273 (runJob at SparkHadoopWriter.scala:83)
13:50:07.967 INFO  DAGScheduler - Parents of final stage: List(ShuffleMapStage 1272)
13:50:07.967 INFO  DAGScheduler - Missing parents: List(ShuffleMapStage 1272)
13:50:07.967 INFO  DAGScheduler - Submitting ShuffleMapStage 1272 (MapPartitionsRDD[5777] at mapToPair at SparkUtils.java:161), which has no missing parents
13:50:07.968 INFO  MemoryStore - Block broadcast_2086 stored as values in memory (estimated size 39.1 KiB, free 1907.1 MiB)
13:50:07.968 INFO  MemoryStore - Block broadcast_2086_piece0 stored as bytes in memory (estimated size 14.1 KiB, free 1907.1 MiB)
13:50:07.968 INFO  BlockManagerInfo - Added broadcast_2086_piece0 in memory on localhost:43843 (size: 14.1 KiB, free: 1917.9 MiB)
13:50:07.969 INFO  SparkContext - Created broadcast 2086 from broadcast at DAGScheduler.scala:1580
13:50:07.969 INFO  DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 1272 (MapPartitionsRDD[5777] at mapToPair at SparkUtils.java:161) (first 15 tasks are for partitions Vector(0))
13:50:07.969 INFO  TaskSchedulerImpl - Adding task set 1272.0 with 1 tasks resource profile 0
13:50:07.969 INFO  TaskSetManager - Starting task 0.0 in stage 1272.0 (TID 1104) (localhost, executor driver, partition 0, NODE_LOCAL, 9948 bytes) 
13:50:07.969 INFO  Executor - Running task 0.0 in stage 1272.0 (TID 1104)
13:50:07.971 INFO  ShuffleBlockFetcherIterator - Getting 1 (4.3 KiB) non-empty blocks including 1 (4.3 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.971 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.972 INFO  ShuffleBlockFetcherIterator - Getting 1 (652.0 B) non-empty blocks including 1 (652.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.972 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.973 INFO  Executor - Finished task 0.0 in stage 1272.0 (TID 1104). 1922 bytes result sent to driver
13:50:07.973 INFO  TaskSetManager - Finished task 0.0 in stage 1272.0 (TID 1104) in 4 ms on localhost (executor driver) (1/1)
13:50:07.973 INFO  TaskSchedulerImpl - Removed TaskSet 1272.0, whose tasks have all completed, from pool 
13:50:07.974 INFO  DAGScheduler - ShuffleMapStage 1272 (mapToPair at SparkUtils.java:161) finished in 0.007 s
13:50:07.974 INFO  DAGScheduler - looking for newly runnable stages
13:50:07.974 INFO  DAGScheduler - running: HashSet()
13:50:07.974 INFO  DAGScheduler - waiting: HashSet(ResultStage 1273)
13:50:07.974 INFO  DAGScheduler - failed: HashSet()
13:50:07.974 INFO  DAGScheduler - Submitting ResultStage 1273 (MapPartitionsRDD[5783] at saveAsTextFile at SamSink.java:65), which has no missing parents
13:50:07.980 INFO  MemoryStore - Block broadcast_2087 stored as values in memory (estimated size 184.3 KiB, free 1906.9 MiB)
13:50:07.981 INFO  MemoryStore - Block broadcast_2087_piece0 stored as bytes in memory (estimated size 78.3 KiB, free 1906.8 MiB)
13:50:07.981 INFO  BlockManagerInfo - Added broadcast_2087_piece0 in memory on localhost:43843 (size: 78.3 KiB, free: 1917.8 MiB)
13:50:07.982 INFO  SparkContext - Created broadcast 2087 from broadcast at DAGScheduler.scala:1580
13:50:07.982 INFO  DAGScheduler - Submitting 1 missing tasks from ResultStage 1273 (MapPartitionsRDD[5783] at saveAsTextFile at SamSink.java:65) (first 15 tasks are for partitions Vector(0))
13:50:07.982 INFO  TaskSchedulerImpl - Adding task set 1273.0 with 1 tasks resource profile 0
13:50:07.982 INFO  TaskSetManager - Starting task 0.0 in stage 1273.0 (TID 1105) (localhost, executor driver, partition 0, NODE_LOCAL, 9376 bytes) 
13:50:07.982 INFO  Executor - Running task 0.0 in stage 1273.0 (TID 1105)
13:50:07.986 INFO  ShuffleBlockFetcherIterator - Getting 1 (3.9 KiB) non-empty blocks including 1 (3.9 KiB) local and 0 (0.0 B) host-local and 0 (0.0 B) push-merged-local and 0 (0.0 B) remote blocks
13:50:07.986 INFO  ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
13:50:07.987 INFO  HadoopMapRedCommitProtocol - Using output committer class org.apache.hadoop.mapred.FileOutputCommitter
13:50:07.987 INFO  FileOutputCommitter - File Output Committer Algorithm version is 1
13:50:07.987 INFO  FileOutputCommitter - FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
13:50:07.998 INFO  FileOutputCommitter - Saved output of task 'attempt_202505271350076918801472630259621_5783_m_000000_0' to file:/tmp/MarkDups11431211842882307584/MarkDups.sam.parts/_temporary/0/task_202505271350076918801472630259621_5783_m_000000
13:50:07.998 INFO  SparkHadoopMapRedUtil - attempt_202505271350076918801472630259621_5783_m_000000_0: Committed. Elapsed time: 0 ms.
13:50:07.998 INFO  Executor - Finished task 0.0 in stage 1273.0 (TID 1105). 1858 bytes result sent to driver
13:50:07.998 INFO  TaskSetManager - Finished task 0.0 in stage 1273.0 (TID 1105) in 16 ms on localhost (executor driver) (1/1)
13:50:07.998 INFO  TaskSchedulerImpl - Removed TaskSet 1273.0, whose tasks have all completed, from pool 
13:50:07.998 INFO  DAGScheduler - ResultStage 1273 (runJob at SparkHadoopWriter.scala:83) finished in 0.024 s
13:50:07.998 INFO  DAGScheduler - Job 587 is finished. Cancelling potential speculative or zombie tasks for this job
13:50:07.998 INFO  TaskSchedulerImpl - Killing all running tasks in stage 1273: Stage finished
13:50:07.999 INFO  DAGScheduler - Job 587 finished: runJob at SparkHadoopWriter.scala:83, took 0.032301 s
13:50:07.999 INFO  SparkHadoopWriter - Start to commit write Job job_202505271350076918801472630259621_5783.
13:50:08.003 INFO  SparkHadoopWriter - Write Job job_202505271350076918801472630259621_5783 committed. Elapsed time: 3 ms.
13:50:08.010 INFO  HadoopFileSystemWrapper - Concatenating 2 parts to /tmp/MarkDups11431211842882307584/MarkDups.sam
13:50:08.013 INFO  HadoopFileSystemWrapper - Concatenating to /tmp/MarkDups11431211842882307584/MarkDups.sam done
13:50:08.013 INFO  MarkDuplicatesSpark - Shutting down engine
[May 27, 2025 at 1:50:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:08 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:09 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:10 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:11 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:12 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
13:50:13.733 ERROR Executor - Exception in task 0.0 in stage 1493.0 (TID 1252)
org.broadinstitute.hellbender.exceptions.UserException$ReadMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 is missing the read group (RG) tag, which is required by the GATK.  Please use http://gatkforums.broadinstitute.org/discussion/59/companion-utilities-replacereadgroups to fix this problem
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:64) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
13:50:13.748 ERROR TaskSetManager - Task 0 in stage 1493.0 failed 1 times; aborting job
[May 27, 2025 at 1:50:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
13:50:13.965 ERROR Executor - Exception in task 0.0 in stage 1500.0 (TID 1255)
org.broadinstitute.hellbender.exceptions.UserException$HeaderMissingReadGroup: SAM/BAM/CRAM file (unknown) is malformed: Read RUNID:7:1203:2886:82292 contains an (RG) tag with the group NotADuplicateGroup which is not found in the file header.
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.getLibraryForRead(MarkDuplicatesSparkUtils.java:66) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.EmptyFragment.<init>(EmptyFragment.java:36) ~[main/:?]
	at org.broadinstitute.hellbender.utils.read.markduplicates.sparkrecords.MarkDuplicatesSparkRecord.newEmptyFragment(MarkDuplicatesSparkRecord.java:44) ~[main/:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$0(MarkDuplicatesSparkUtils.java:141) ~[main/:?]
	at java.base/java.util.stream.ReferencePipeline$15$1.accept(ReferencePipeline.java:540) ~[?:?]
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
	at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) ~[?:?]
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
	at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) ~[?:?]
	at org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSparkUtils.lambda$transformToDuplicateNames$5ac2632f$1(MarkDuplicatesSparkUtils.java:150) ~[main/:?]
	at org.apache.spark.api.java.JavaRDDLike.$anonfun$flatMapToPair$1(JavaRDDLike.scala:143) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at scala.collection.Iterator$$anon$10.nextCur(Iterator.scala:594) ~[scala-library-2.13.14.jar:?]
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:608) ~[scala-library-2.13.14.jar:?]
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:140) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:104) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:54) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.scheduler.Task.run(Task.scala:141) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) ~[spark-common-utils_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94) ~[spark-core_2.13-3.5.0.jar:3.5.0]
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) [spark-core_2.13-3.5.0.jar:3.5.0]
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [?:?]
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [?:?]
	at java.base/java.lang.Thread.run(Thread.java:833) [?:?]
13:50:13.967 ERROR TaskSetManager - Task 0 in stage 1500.0 failed 1 times; aborting job
[May 27, 2025 at 1:50:13 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:14 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:15 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:16 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:17 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:18 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:19 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:20 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:21 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:22 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:23 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:24 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:25 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:26 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:27 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:28 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:29 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:30 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:31 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:32 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:33 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:34 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:35 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:36 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:37 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000
[May 27, 2025 at 1:50:38 PM UTC] org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark done. Elapsed time: 0.00 minutes.
Runtime.totalMemory()=1835008000